aix: Fix _STDC_FORMAT_MACROS in inttypes.h [PR97044]
[official-gcc.git] / gcc / tree.c
bloba1fc119c0cc5ee4a27dc44ae276b5857abcf4a41
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "langhooks-def.h"
58 #include "tree-diagnostic.h"
59 #include "except.h"
60 #include "builtins.h"
61 #include "print-tree.h"
62 #include "ipa-utils.h"
63 #include "selftest.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "rtl.h"
67 #include "regs.h"
68 #include "tree-vector-builder.h"
69 #include "gimple-fold.h"
70 #include "escaped_string.h"
72 /* Tree code classes. */
74 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
75 #define END_OF_BASE_TREE_CODES tcc_exceptional,
77 const enum tree_code_class tree_code_type[] = {
78 #include "all-tree.def"
81 #undef DEFTREECODE
82 #undef END_OF_BASE_TREE_CODES
84 /* Table indexed by tree code giving number of expression
85 operands beyond the fixed part of the node structure.
86 Not used for types or decls. */
88 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
89 #define END_OF_BASE_TREE_CODES 0,
91 const unsigned char tree_code_length[] = {
92 #include "all-tree.def"
95 #undef DEFTREECODE
96 #undef END_OF_BASE_TREE_CODES
98 /* Names of tree components.
99 Used for printing out the tree and error messages. */
100 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
101 #define END_OF_BASE_TREE_CODES "@dummy",
103 static const char *const tree_code_name[] = {
104 #include "all-tree.def"
107 #undef DEFTREECODE
108 #undef END_OF_BASE_TREE_CODES
110 /* Each tree code class has an associated string representation.
111 These must correspond to the tree_code_class entries. */
113 const char *const tree_code_class_strings[] =
115 "exceptional",
116 "constant",
117 "type",
118 "declaration",
119 "reference",
120 "comparison",
121 "unary",
122 "binary",
123 "statement",
124 "vl_exp",
125 "expression"
128 /* obstack.[ch] explicitly declined to prototype this. */
129 extern int _obstack_allocated_p (struct obstack *h, void *obj);
131 /* Statistics-gathering stuff. */
133 static uint64_t tree_code_counts[MAX_TREE_CODES];
134 uint64_t tree_node_counts[(int) all_kinds];
135 uint64_t tree_node_sizes[(int) all_kinds];
137 /* Keep in sync with tree.h:enum tree_node_kind. */
138 static const char * const tree_node_kind_names[] = {
139 "decls",
140 "types",
141 "blocks",
142 "stmts",
143 "refs",
144 "exprs",
145 "constants",
146 "identifiers",
147 "vecs",
148 "binfos",
149 "ssa names",
150 "constructors",
151 "random kinds",
152 "lang_decl kinds",
153 "lang_type kinds",
154 "omp clauses",
157 /* Unique id for next decl created. */
158 static GTY(()) int next_decl_uid;
159 /* Unique id for next type created. */
160 static GTY(()) unsigned next_type_uid = 1;
161 /* Unique id for next debug decl created. Use negative numbers,
162 to catch erroneous uses. */
163 static GTY(()) int next_debug_decl_uid;
165 /* Since we cannot rehash a type after it is in the table, we have to
166 keep the hash code. */
168 struct GTY((for_user)) type_hash {
169 unsigned long hash;
170 tree type;
173 /* Initial size of the hash table (rounded to next prime). */
174 #define TYPE_HASH_INITIAL_SIZE 1000
176 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
178 static hashval_t hash (type_hash *t) { return t->hash; }
179 static bool equal (type_hash *a, type_hash *b);
181 static int
182 keep_cache_entry (type_hash *&t)
184 return ggc_marked_p (t->type);
188 /* Now here is the hash table. When recording a type, it is added to
189 the slot whose index is the hash code. Note that the hash table is
190 used for several kinds of types (function types, array types and
191 array index range types, for now). While all these live in the
192 same table, they are completely independent, and the hash code is
193 computed differently for each of these. */
195 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
197 /* Hash table and temporary node for larger integer const values. */
198 static GTY (()) tree int_cst_node;
200 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
202 static hashval_t hash (tree t);
203 static bool equal (tree x, tree y);
206 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
208 /* Class and variable for making sure that there is a single POLY_INT_CST
209 for a given value. */
210 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
212 typedef std::pair<tree, const poly_wide_int *> compare_type;
213 static hashval_t hash (tree t);
214 static bool equal (tree x, const compare_type &y);
217 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
219 /* Hash table for optimization flags and target option flags. Use the same
220 hash table for both sets of options. Nodes for building the current
221 optimization and target option nodes. The assumption is most of the time
222 the options created will already be in the hash table, so we avoid
223 allocating and freeing up a node repeatably. */
224 static GTY (()) tree cl_optimization_node;
225 static GTY (()) tree cl_target_option_node;
227 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
229 static hashval_t hash (tree t);
230 static bool equal (tree x, tree y);
233 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
235 /* General tree->tree mapping structure for use in hash tables. */
238 static GTY ((cache))
239 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
241 static GTY ((cache))
242 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
244 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
246 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
248 static bool
249 equal (tree_vec_map *a, tree_vec_map *b)
251 return a->base.from == b->base.from;
254 static int
255 keep_cache_entry (tree_vec_map *&m)
257 return ggc_marked_p (m->base.from);
261 static GTY ((cache))
262 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
264 static void set_type_quals (tree, int);
265 static void print_type_hash_statistics (void);
266 static void print_debug_expr_statistics (void);
267 static void print_value_expr_statistics (void);
269 static tree build_array_type_1 (tree, tree, bool, bool, bool);
271 tree global_trees[TI_MAX];
272 tree integer_types[itk_none];
274 bool int_n_enabled_p[NUM_INT_N_ENTS];
275 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
277 bool tree_contains_struct[MAX_TREE_CODES][64];
279 /* Number of operands for each OpenMP clause. */
280 unsigned const char omp_clause_num_ops[] =
282 0, /* OMP_CLAUSE_ERROR */
283 1, /* OMP_CLAUSE_PRIVATE */
284 1, /* OMP_CLAUSE_SHARED */
285 1, /* OMP_CLAUSE_FIRSTPRIVATE */
286 2, /* OMP_CLAUSE_LASTPRIVATE */
287 5, /* OMP_CLAUSE_REDUCTION */
288 5, /* OMP_CLAUSE_TASK_REDUCTION */
289 5, /* OMP_CLAUSE_IN_REDUCTION */
290 1, /* OMP_CLAUSE_COPYIN */
291 1, /* OMP_CLAUSE_COPYPRIVATE */
292 3, /* OMP_CLAUSE_LINEAR */
293 2, /* OMP_CLAUSE_ALIGNED */
294 1, /* OMP_CLAUSE_DEPEND */
295 1, /* OMP_CLAUSE_NONTEMPORAL */
296 1, /* OMP_CLAUSE_UNIFORM */
297 1, /* OMP_CLAUSE_TO_DECLARE */
298 1, /* OMP_CLAUSE_LINK */
299 2, /* OMP_CLAUSE_FROM */
300 2, /* OMP_CLAUSE_TO */
301 2, /* OMP_CLAUSE_MAP */
302 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
303 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
304 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
305 1, /* OMP_CLAUSE_INCLUSIVE */
306 1, /* OMP_CLAUSE_EXCLUSIVE */
307 2, /* OMP_CLAUSE__CACHE_ */
308 2, /* OMP_CLAUSE_GANG */
309 1, /* OMP_CLAUSE_ASYNC */
310 1, /* OMP_CLAUSE_WAIT */
311 0, /* OMP_CLAUSE_AUTO */
312 0, /* OMP_CLAUSE_SEQ */
313 1, /* OMP_CLAUSE__LOOPTEMP_ */
314 1, /* OMP_CLAUSE__REDUCTEMP_ */
315 1, /* OMP_CLAUSE__CONDTEMP_ */
316 1, /* OMP_CLAUSE__SCANTEMP_ */
317 1, /* OMP_CLAUSE_IF */
318 1, /* OMP_CLAUSE_NUM_THREADS */
319 1, /* OMP_CLAUSE_SCHEDULE */
320 0, /* OMP_CLAUSE_NOWAIT */
321 1, /* OMP_CLAUSE_ORDERED */
322 0, /* OMP_CLAUSE_DEFAULT */
323 3, /* OMP_CLAUSE_COLLAPSE */
324 0, /* OMP_CLAUSE_UNTIED */
325 1, /* OMP_CLAUSE_FINAL */
326 0, /* OMP_CLAUSE_MERGEABLE */
327 1, /* OMP_CLAUSE_DEVICE */
328 1, /* OMP_CLAUSE_DIST_SCHEDULE */
329 0, /* OMP_CLAUSE_INBRANCH */
330 0, /* OMP_CLAUSE_NOTINBRANCH */
331 1, /* OMP_CLAUSE_NUM_TEAMS */
332 1, /* OMP_CLAUSE_THREAD_LIMIT */
333 0, /* OMP_CLAUSE_PROC_BIND */
334 1, /* OMP_CLAUSE_SAFELEN */
335 1, /* OMP_CLAUSE_SIMDLEN */
336 0, /* OMP_CLAUSE_DEVICE_TYPE */
337 0, /* OMP_CLAUSE_FOR */
338 0, /* OMP_CLAUSE_PARALLEL */
339 0, /* OMP_CLAUSE_SECTIONS */
340 0, /* OMP_CLAUSE_TASKGROUP */
341 1, /* OMP_CLAUSE_PRIORITY */
342 1, /* OMP_CLAUSE_GRAINSIZE */
343 1, /* OMP_CLAUSE_NUM_TASKS */
344 0, /* OMP_CLAUSE_NOGROUP */
345 0, /* OMP_CLAUSE_THREADS */
346 0, /* OMP_CLAUSE_SIMD */
347 1, /* OMP_CLAUSE_HINT */
348 0, /* OMP_CLAUSE_DEFAULTMAP */
349 0, /* OMP_CLAUSE_ORDER */
350 0, /* OMP_CLAUSE_BIND */
351 1, /* OMP_CLAUSE__SIMDUID_ */
352 0, /* OMP_CLAUSE__SIMT_ */
353 0, /* OMP_CLAUSE_INDEPENDENT */
354 1, /* OMP_CLAUSE_WORKER */
355 1, /* OMP_CLAUSE_VECTOR */
356 1, /* OMP_CLAUSE_NUM_GANGS */
357 1, /* OMP_CLAUSE_NUM_WORKERS */
358 1, /* OMP_CLAUSE_VECTOR_LENGTH */
359 3, /* OMP_CLAUSE_TILE */
360 0, /* OMP_CLAUSE_IF_PRESENT */
361 0, /* OMP_CLAUSE_FINALIZE */
364 const char * const omp_clause_code_name[] =
366 "error_clause",
367 "private",
368 "shared",
369 "firstprivate",
370 "lastprivate",
371 "reduction",
372 "task_reduction",
373 "in_reduction",
374 "copyin",
375 "copyprivate",
376 "linear",
377 "aligned",
378 "depend",
379 "nontemporal",
380 "uniform",
381 "to",
382 "link",
383 "from",
384 "to",
385 "map",
386 "use_device_ptr",
387 "use_device_addr",
388 "is_device_ptr",
389 "inclusive",
390 "exclusive",
391 "_cache_",
392 "gang",
393 "async",
394 "wait",
395 "auto",
396 "seq",
397 "_looptemp_",
398 "_reductemp_",
399 "_condtemp_",
400 "_scantemp_",
401 "if",
402 "num_threads",
403 "schedule",
404 "nowait",
405 "ordered",
406 "default",
407 "collapse",
408 "untied",
409 "final",
410 "mergeable",
411 "device",
412 "dist_schedule",
413 "inbranch",
414 "notinbranch",
415 "num_teams",
416 "thread_limit",
417 "proc_bind",
418 "safelen",
419 "simdlen",
420 "device_type",
421 "for",
422 "parallel",
423 "sections",
424 "taskgroup",
425 "priority",
426 "grainsize",
427 "num_tasks",
428 "nogroup",
429 "threads",
430 "simd",
431 "hint",
432 "defaultmap",
433 "order",
434 "bind",
435 "_simduid_",
436 "_simt_",
437 "independent",
438 "worker",
439 "vector",
440 "num_gangs",
441 "num_workers",
442 "vector_length",
443 "tile",
444 "if_present",
445 "finalize",
449 /* Return the tree node structure used by tree code CODE. */
451 static inline enum tree_node_structure_enum
452 tree_node_structure_for_code (enum tree_code code)
454 switch (TREE_CODE_CLASS (code))
456 case tcc_declaration:
457 switch (code)
459 case CONST_DECL: return TS_CONST_DECL;
460 case DEBUG_EXPR_DECL: return TS_DECL_WRTL;
461 case FIELD_DECL: return TS_FIELD_DECL;
462 case FUNCTION_DECL: return TS_FUNCTION_DECL;
463 case LABEL_DECL: return TS_LABEL_DECL;
464 case PARM_DECL: return TS_PARM_DECL;
465 case RESULT_DECL: return TS_RESULT_DECL;
466 case TRANSLATION_UNIT_DECL: return TS_TRANSLATION_UNIT_DECL;
467 case TYPE_DECL: return TS_TYPE_DECL;
468 case VAR_DECL: return TS_VAR_DECL;
469 default: return TS_DECL_NON_COMMON;
472 case tcc_type: return TS_TYPE_NON_COMMON;
474 case tcc_binary:
475 case tcc_comparison:
476 case tcc_expression:
477 case tcc_reference:
478 case tcc_statement:
479 case tcc_unary:
480 case tcc_vl_exp: return TS_EXP;
482 default: /* tcc_constant and tcc_exceptional */
483 break;
486 switch (code)
488 /* tcc_constant cases. */
489 case COMPLEX_CST: return TS_COMPLEX;
490 case FIXED_CST: return TS_FIXED_CST;
491 case INTEGER_CST: return TS_INT_CST;
492 case POLY_INT_CST: return TS_POLY_INT_CST;
493 case REAL_CST: return TS_REAL_CST;
494 case STRING_CST: return TS_STRING;
495 case VECTOR_CST: return TS_VECTOR;
496 case VOID_CST: return TS_TYPED;
498 /* tcc_exceptional cases. */
499 case BLOCK: return TS_BLOCK;
500 case CONSTRUCTOR: return TS_CONSTRUCTOR;
501 case ERROR_MARK: return TS_COMMON;
502 case IDENTIFIER_NODE: return TS_IDENTIFIER;
503 case OMP_CLAUSE: return TS_OMP_CLAUSE;
504 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
505 case PLACEHOLDER_EXPR: return TS_COMMON;
506 case SSA_NAME: return TS_SSA_NAME;
507 case STATEMENT_LIST: return TS_STATEMENT_LIST;
508 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
509 case TREE_BINFO: return TS_BINFO;
510 case TREE_LIST: return TS_LIST;
511 case TREE_VEC: return TS_VEC;
513 default:
514 gcc_unreachable ();
519 /* Initialize tree_contains_struct to describe the hierarchy of tree
520 nodes. */
522 static void
523 initialize_tree_contains_struct (void)
525 unsigned i;
527 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
529 enum tree_code code;
530 enum tree_node_structure_enum ts_code;
532 code = (enum tree_code) i;
533 ts_code = tree_node_structure_for_code (code);
535 /* Mark the TS structure itself. */
536 tree_contains_struct[code][ts_code] = 1;
538 /* Mark all the structures that TS is derived from. */
539 switch (ts_code)
541 case TS_TYPED:
542 case TS_BLOCK:
543 case TS_OPTIMIZATION:
544 case TS_TARGET_OPTION:
545 MARK_TS_BASE (code);
546 break;
548 case TS_COMMON:
549 case TS_INT_CST:
550 case TS_POLY_INT_CST:
551 case TS_REAL_CST:
552 case TS_FIXED_CST:
553 case TS_VECTOR:
554 case TS_STRING:
555 case TS_COMPLEX:
556 case TS_SSA_NAME:
557 case TS_CONSTRUCTOR:
558 case TS_EXP:
559 case TS_STATEMENT_LIST:
560 MARK_TS_TYPED (code);
561 break;
563 case TS_IDENTIFIER:
564 case TS_DECL_MINIMAL:
565 case TS_TYPE_COMMON:
566 case TS_LIST:
567 case TS_VEC:
568 case TS_BINFO:
569 case TS_OMP_CLAUSE:
570 MARK_TS_COMMON (code);
571 break;
573 case TS_TYPE_WITH_LANG_SPECIFIC:
574 MARK_TS_TYPE_COMMON (code);
575 break;
577 case TS_TYPE_NON_COMMON:
578 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
579 break;
581 case TS_DECL_COMMON:
582 MARK_TS_DECL_MINIMAL (code);
583 break;
585 case TS_DECL_WRTL:
586 case TS_CONST_DECL:
587 MARK_TS_DECL_COMMON (code);
588 break;
590 case TS_DECL_NON_COMMON:
591 MARK_TS_DECL_WITH_VIS (code);
592 break;
594 case TS_DECL_WITH_VIS:
595 case TS_PARM_DECL:
596 case TS_LABEL_DECL:
597 case TS_RESULT_DECL:
598 MARK_TS_DECL_WRTL (code);
599 break;
601 case TS_FIELD_DECL:
602 MARK_TS_DECL_COMMON (code);
603 break;
605 case TS_VAR_DECL:
606 MARK_TS_DECL_WITH_VIS (code);
607 break;
609 case TS_TYPE_DECL:
610 case TS_FUNCTION_DECL:
611 MARK_TS_DECL_NON_COMMON (code);
612 break;
614 case TS_TRANSLATION_UNIT_DECL:
615 MARK_TS_DECL_COMMON (code);
616 break;
618 default:
619 gcc_unreachable ();
623 /* Basic consistency checks for attributes used in fold. */
624 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
625 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
626 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
627 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
628 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
629 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
630 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
631 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
632 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
633 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
634 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
635 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
636 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
637 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
638 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
639 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
640 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
641 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
642 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
643 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
644 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
645 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
646 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
647 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
648 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
649 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
650 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
651 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
652 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
653 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
654 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
655 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
656 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
657 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
658 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
659 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
660 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
661 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
662 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
663 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
667 /* Init tree.c. */
669 void
670 init_ttree (void)
672 /* Initialize the hash table of types. */
673 type_hash_table
674 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
676 debug_expr_for_decl
677 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
679 value_expr_for_decl
680 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
682 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
684 poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
686 int_cst_node = make_int_cst (1, 1);
688 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
690 cl_optimization_node = make_node (OPTIMIZATION_NODE);
691 cl_target_option_node = make_node (TARGET_OPTION_NODE);
693 /* Initialize the tree_contains_struct array. */
694 initialize_tree_contains_struct ();
695 lang_hooks.init_ts ();
699 /* The name of the object as the assembler will see it (but before any
700 translations made by ASM_OUTPUT_LABELREF). Often this is the same
701 as DECL_NAME. It is an IDENTIFIER_NODE. */
702 tree
703 decl_assembler_name (tree decl)
705 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
706 lang_hooks.set_decl_assembler_name (decl);
707 return DECL_ASSEMBLER_NAME_RAW (decl);
710 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
711 (either of which may be NULL). Inform the FE, if this changes the
712 name. */
714 void
715 overwrite_decl_assembler_name (tree decl, tree name)
717 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
718 lang_hooks.overwrite_decl_assembler_name (decl, name);
721 /* When the target supports COMDAT groups, this indicates which group the
722 DECL is associated with. This can be either an IDENTIFIER_NODE or a
723 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
724 tree
725 decl_comdat_group (const_tree node)
727 struct symtab_node *snode = symtab_node::get (node);
728 if (!snode)
729 return NULL;
730 return snode->get_comdat_group ();
733 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
734 tree
735 decl_comdat_group_id (const_tree node)
737 struct symtab_node *snode = symtab_node::get (node);
738 if (!snode)
739 return NULL;
740 return snode->get_comdat_group_id ();
743 /* When the target supports named section, return its name as IDENTIFIER_NODE
744 or NULL if it is in no section. */
745 const char *
746 decl_section_name (const_tree node)
748 struct symtab_node *snode = symtab_node::get (node);
749 if (!snode)
750 return NULL;
751 return snode->get_section ();
754 /* Set section name of NODE to VALUE (that is expected to be
755 identifier node) */
756 void
757 set_decl_section_name (tree node, const char *value)
759 struct symtab_node *snode;
761 if (value == NULL)
763 snode = symtab_node::get (node);
764 if (!snode)
765 return;
767 else if (VAR_P (node))
768 snode = varpool_node::get_create (node);
769 else
770 snode = cgraph_node::get_create (node);
771 snode->set_section (value);
774 /* Return TLS model of a variable NODE. */
775 enum tls_model
776 decl_tls_model (const_tree node)
778 struct varpool_node *snode = varpool_node::get (node);
779 if (!snode)
780 return TLS_MODEL_NONE;
781 return snode->tls_model;
784 /* Set TLS model of variable NODE to MODEL. */
785 void
786 set_decl_tls_model (tree node, enum tls_model model)
788 struct varpool_node *vnode;
790 if (model == TLS_MODEL_NONE)
792 vnode = varpool_node::get (node);
793 if (!vnode)
794 return;
796 else
797 vnode = varpool_node::get_create (node);
798 vnode->tls_model = model;
801 /* Compute the number of bytes occupied by a tree with code CODE.
802 This function cannot be used for nodes that have variable sizes,
803 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
804 size_t
805 tree_code_size (enum tree_code code)
807 switch (TREE_CODE_CLASS (code))
809 case tcc_declaration: /* A decl node */
810 switch (code)
812 case FIELD_DECL: return sizeof (tree_field_decl);
813 case PARM_DECL: return sizeof (tree_parm_decl);
814 case VAR_DECL: return sizeof (tree_var_decl);
815 case LABEL_DECL: return sizeof (tree_label_decl);
816 case RESULT_DECL: return sizeof (tree_result_decl);
817 case CONST_DECL: return sizeof (tree_const_decl);
818 case TYPE_DECL: return sizeof (tree_type_decl);
819 case FUNCTION_DECL: return sizeof (tree_function_decl);
820 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
821 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
822 case NAMESPACE_DECL:
823 case IMPORTED_DECL:
824 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
825 default:
826 gcc_checking_assert (code >= NUM_TREE_CODES);
827 return lang_hooks.tree_size (code);
830 case tcc_type: /* a type node */
831 switch (code)
833 case OFFSET_TYPE:
834 case ENUMERAL_TYPE:
835 case BOOLEAN_TYPE:
836 case INTEGER_TYPE:
837 case REAL_TYPE:
838 case POINTER_TYPE:
839 case REFERENCE_TYPE:
840 case NULLPTR_TYPE:
841 case FIXED_POINT_TYPE:
842 case COMPLEX_TYPE:
843 case VECTOR_TYPE:
844 case ARRAY_TYPE:
845 case RECORD_TYPE:
846 case UNION_TYPE:
847 case QUAL_UNION_TYPE:
848 case VOID_TYPE:
849 case FUNCTION_TYPE:
850 case METHOD_TYPE:
851 case LANG_TYPE: return sizeof (tree_type_non_common);
852 default:
853 gcc_checking_assert (code >= NUM_TREE_CODES);
854 return lang_hooks.tree_size (code);
857 case tcc_reference: /* a reference */
858 case tcc_expression: /* an expression */
859 case tcc_statement: /* an expression with side effects */
860 case tcc_comparison: /* a comparison expression */
861 case tcc_unary: /* a unary arithmetic expression */
862 case tcc_binary: /* a binary arithmetic expression */
863 return (sizeof (struct tree_exp)
864 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
866 case tcc_constant: /* a constant */
867 switch (code)
869 case VOID_CST: return sizeof (tree_typed);
870 case INTEGER_CST: gcc_unreachable ();
871 case POLY_INT_CST: return sizeof (tree_poly_int_cst);
872 case REAL_CST: return sizeof (tree_real_cst);
873 case FIXED_CST: return sizeof (tree_fixed_cst);
874 case COMPLEX_CST: return sizeof (tree_complex);
875 case VECTOR_CST: gcc_unreachable ();
876 case STRING_CST: gcc_unreachable ();
877 default:
878 gcc_checking_assert (code >= NUM_TREE_CODES);
879 return lang_hooks.tree_size (code);
882 case tcc_exceptional: /* something random, like an identifier. */
883 switch (code)
885 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
886 case TREE_LIST: return sizeof (tree_list);
888 case ERROR_MARK:
889 case PLACEHOLDER_EXPR: return sizeof (tree_common);
891 case TREE_VEC: gcc_unreachable ();
892 case OMP_CLAUSE: gcc_unreachable ();
894 case SSA_NAME: return sizeof (tree_ssa_name);
896 case STATEMENT_LIST: return sizeof (tree_statement_list);
897 case BLOCK: return sizeof (struct tree_block);
898 case CONSTRUCTOR: return sizeof (tree_constructor);
899 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
900 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
902 default:
903 gcc_checking_assert (code >= NUM_TREE_CODES);
904 return lang_hooks.tree_size (code);
907 default:
908 gcc_unreachable ();
912 /* Compute the number of bytes occupied by NODE. This routine only
913 looks at TREE_CODE, except for those nodes that have variable sizes. */
914 size_t
915 tree_size (const_tree node)
917 const enum tree_code code = TREE_CODE (node);
918 switch (code)
920 case INTEGER_CST:
921 return (sizeof (struct tree_int_cst)
922 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
924 case TREE_BINFO:
925 return (offsetof (struct tree_binfo, base_binfos)
926 + vec<tree, va_gc>
927 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
929 case TREE_VEC:
930 return (sizeof (struct tree_vec)
931 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
933 case VECTOR_CST:
934 return (sizeof (struct tree_vector)
935 + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
937 case STRING_CST:
938 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
940 case OMP_CLAUSE:
941 return (sizeof (struct tree_omp_clause)
942 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
943 * sizeof (tree));
945 default:
946 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
947 return (sizeof (struct tree_exp)
948 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
949 else
950 return tree_code_size (code);
954 /* Return tree node kind based on tree CODE. */
956 static tree_node_kind
957 get_stats_node_kind (enum tree_code code)
959 enum tree_code_class type = TREE_CODE_CLASS (code);
961 switch (type)
963 case tcc_declaration: /* A decl node */
964 return d_kind;
965 case tcc_type: /* a type node */
966 return t_kind;
967 case tcc_statement: /* an expression with side effects */
968 return s_kind;
969 case tcc_reference: /* a reference */
970 return r_kind;
971 case tcc_expression: /* an expression */
972 case tcc_comparison: /* a comparison expression */
973 case tcc_unary: /* a unary arithmetic expression */
974 case tcc_binary: /* a binary arithmetic expression */
975 return e_kind;
976 case tcc_constant: /* a constant */
977 return c_kind;
978 case tcc_exceptional: /* something random, like an identifier. */
979 switch (code)
981 case IDENTIFIER_NODE:
982 return id_kind;
983 case TREE_VEC:
984 return vec_kind;
985 case TREE_BINFO:
986 return binfo_kind;
987 case SSA_NAME:
988 return ssa_name_kind;
989 case BLOCK:
990 return b_kind;
991 case CONSTRUCTOR:
992 return constr_kind;
993 case OMP_CLAUSE:
994 return omp_clause_kind;
995 default:
996 return x_kind;
998 break;
999 case tcc_vl_exp:
1000 return e_kind;
1001 default:
1002 gcc_unreachable ();
1006 /* Record interesting allocation statistics for a tree node with CODE
1007 and LENGTH. */
1009 static void
1010 record_node_allocation_statistics (enum tree_code code, size_t length)
1012 if (!GATHER_STATISTICS)
1013 return;
1015 tree_node_kind kind = get_stats_node_kind (code);
1017 tree_code_counts[(int) code]++;
1018 tree_node_counts[(int) kind]++;
1019 tree_node_sizes[(int) kind] += length;
1022 /* Allocate and return a new UID from the DECL_UID namespace. */
1025 allocate_decl_uid (void)
1027 return next_decl_uid++;
1030 /* Return a newly allocated node of code CODE. For decl and type
1031 nodes, some other fields are initialized. The rest of the node is
1032 initialized to zero. This function cannot be used for TREE_VEC,
1033 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1034 tree_code_size.
1036 Achoo! I got a code in the node. */
1038 tree
1039 make_node (enum tree_code code MEM_STAT_DECL)
1041 tree t;
1042 enum tree_code_class type = TREE_CODE_CLASS (code);
1043 size_t length = tree_code_size (code);
1045 record_node_allocation_statistics (code, length);
1047 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1048 TREE_SET_CODE (t, code);
1050 switch (type)
1052 case tcc_statement:
1053 if (code != DEBUG_BEGIN_STMT)
1054 TREE_SIDE_EFFECTS (t) = 1;
1055 break;
1057 case tcc_declaration:
1058 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1060 if (code == FUNCTION_DECL)
1062 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1063 SET_DECL_MODE (t, FUNCTION_MODE);
1065 else
1066 SET_DECL_ALIGN (t, 1);
1068 DECL_SOURCE_LOCATION (t) = input_location;
1069 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1070 DECL_UID (t) = --next_debug_decl_uid;
1071 else
1073 DECL_UID (t) = allocate_decl_uid ();
1074 SET_DECL_PT_UID (t, -1);
1076 if (TREE_CODE (t) == LABEL_DECL)
1077 LABEL_DECL_UID (t) = -1;
1079 break;
1081 case tcc_type:
1082 TYPE_UID (t) = next_type_uid++;
1083 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1084 TYPE_USER_ALIGN (t) = 0;
1085 TYPE_MAIN_VARIANT (t) = t;
1086 TYPE_CANONICAL (t) = t;
1088 /* Default to no attributes for type, but let target change that. */
1089 TYPE_ATTRIBUTES (t) = NULL_TREE;
1090 targetm.set_default_type_attributes (t);
1092 /* We have not yet computed the alias set for this type. */
1093 TYPE_ALIAS_SET (t) = -1;
1094 break;
1096 case tcc_constant:
1097 TREE_CONSTANT (t) = 1;
1098 break;
1100 case tcc_expression:
1101 switch (code)
1103 case INIT_EXPR:
1104 case MODIFY_EXPR:
1105 case VA_ARG_EXPR:
1106 case PREDECREMENT_EXPR:
1107 case PREINCREMENT_EXPR:
1108 case POSTDECREMENT_EXPR:
1109 case POSTINCREMENT_EXPR:
1110 /* All of these have side-effects, no matter what their
1111 operands are. */
1112 TREE_SIDE_EFFECTS (t) = 1;
1113 break;
1115 default:
1116 break;
1118 break;
1120 case tcc_exceptional:
1121 switch (code)
1123 case TARGET_OPTION_NODE:
1124 TREE_TARGET_OPTION(t)
1125 = ggc_cleared_alloc<struct cl_target_option> ();
1126 break;
1128 case OPTIMIZATION_NODE:
1129 TREE_OPTIMIZATION (t)
1130 = ggc_cleared_alloc<struct cl_optimization> ();
1131 break;
1133 default:
1134 break;
1136 break;
1138 default:
1139 /* Other classes need no special treatment. */
1140 break;
1143 return t;
1146 /* Free tree node. */
1148 void
1149 free_node (tree node)
1151 enum tree_code code = TREE_CODE (node);
1152 if (GATHER_STATISTICS)
1154 enum tree_node_kind kind = get_stats_node_kind (code);
1156 gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1157 gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1158 gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1160 tree_code_counts[(int) TREE_CODE (node)]--;
1161 tree_node_counts[(int) kind]--;
1162 tree_node_sizes[(int) kind] -= tree_size (node);
1164 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1165 vec_free (CONSTRUCTOR_ELTS (node));
1166 else if (code == BLOCK)
1167 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1168 else if (code == TREE_BINFO)
1169 vec_free (BINFO_BASE_ACCESSES (node));
1170 else if (code == OPTIMIZATION_NODE)
1171 cl_optimization_option_free (TREE_OPTIMIZATION (node));
1172 else if (code == TARGET_OPTION_NODE)
1173 cl_target_option_free (TREE_TARGET_OPTION (node));
1174 ggc_free (node);
1177 /* Return a new node with the same contents as NODE except that its
1178 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1180 tree
1181 copy_node (tree node MEM_STAT_DECL)
1183 tree t;
1184 enum tree_code code = TREE_CODE (node);
1185 size_t length;
1187 gcc_assert (code != STATEMENT_LIST);
1189 length = tree_size (node);
1190 record_node_allocation_statistics (code, length);
1191 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1192 memcpy (t, node, length);
1194 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1195 TREE_CHAIN (t) = 0;
1196 TREE_ASM_WRITTEN (t) = 0;
1197 TREE_VISITED (t) = 0;
1199 if (TREE_CODE_CLASS (code) == tcc_declaration)
1201 if (code == DEBUG_EXPR_DECL)
1202 DECL_UID (t) = --next_debug_decl_uid;
1203 else
1205 DECL_UID (t) = allocate_decl_uid ();
1206 if (DECL_PT_UID_SET_P (node))
1207 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1209 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1210 && DECL_HAS_VALUE_EXPR_P (node))
1212 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1213 DECL_HAS_VALUE_EXPR_P (t) = 1;
1215 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1216 if (VAR_P (node))
1218 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1219 t->decl_with_vis.symtab_node = NULL;
1221 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1223 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1224 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1226 if (TREE_CODE (node) == FUNCTION_DECL)
1228 DECL_STRUCT_FUNCTION (t) = NULL;
1229 t->decl_with_vis.symtab_node = NULL;
1232 else if (TREE_CODE_CLASS (code) == tcc_type)
1234 TYPE_UID (t) = next_type_uid++;
1235 /* The following is so that the debug code for
1236 the copy is different from the original type.
1237 The two statements usually duplicate each other
1238 (because they clear fields of the same union),
1239 but the optimizer should catch that. */
1240 TYPE_SYMTAB_ADDRESS (t) = 0;
1241 TYPE_SYMTAB_DIE (t) = 0;
1243 /* Do not copy the values cache. */
1244 if (TYPE_CACHED_VALUES_P (t))
1246 TYPE_CACHED_VALUES_P (t) = 0;
1247 TYPE_CACHED_VALUES (t) = NULL_TREE;
1250 else if (code == TARGET_OPTION_NODE)
1252 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1253 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1254 sizeof (struct cl_target_option));
1256 else if (code == OPTIMIZATION_NODE)
1258 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1259 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1260 sizeof (struct cl_optimization));
1263 return t;
1266 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1267 For example, this can copy a list made of TREE_LIST nodes. */
1269 tree
1270 copy_list (tree list)
1272 tree head;
1273 tree prev, next;
1275 if (list == 0)
1276 return 0;
1278 head = prev = copy_node (list);
1279 next = TREE_CHAIN (list);
1280 while (next)
1282 TREE_CHAIN (prev) = copy_node (next);
1283 prev = TREE_CHAIN (prev);
1284 next = TREE_CHAIN (next);
1286 return head;
1290 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1291 INTEGER_CST with value CST and type TYPE. */
1293 static unsigned int
1294 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1296 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1297 /* We need extra HWIs if CST is an unsigned integer with its
1298 upper bit set. */
1299 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1300 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1301 return cst.get_len ();
1304 /* Return a new INTEGER_CST with value CST and type TYPE. */
1306 static tree
1307 build_new_int_cst (tree type, const wide_int &cst)
1309 unsigned int len = cst.get_len ();
1310 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1311 tree nt = make_int_cst (len, ext_len);
1313 if (len < ext_len)
1315 --ext_len;
1316 TREE_INT_CST_ELT (nt, ext_len)
1317 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1318 for (unsigned int i = len; i < ext_len; ++i)
1319 TREE_INT_CST_ELT (nt, i) = -1;
1321 else if (TYPE_UNSIGNED (type)
1322 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1324 len--;
1325 TREE_INT_CST_ELT (nt, len)
1326 = zext_hwi (cst.elt (len),
1327 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1330 for (unsigned int i = 0; i < len; i++)
1331 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1332 TREE_TYPE (nt) = type;
1333 return nt;
1336 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1338 static tree
1339 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1340 CXX_MEM_STAT_INFO)
1342 size_t length = sizeof (struct tree_poly_int_cst);
1343 record_node_allocation_statistics (POLY_INT_CST, length);
1345 tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1347 TREE_SET_CODE (t, POLY_INT_CST);
1348 TREE_CONSTANT (t) = 1;
1349 TREE_TYPE (t) = type;
1350 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1351 POLY_INT_CST_COEFF (t, i) = coeffs[i];
1352 return t;
1355 /* Create a constant tree that contains CST sign-extended to TYPE. */
1357 tree
1358 build_int_cst (tree type, poly_int64 cst)
1360 /* Support legacy code. */
1361 if (!type)
1362 type = integer_type_node;
1364 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1367 /* Create a constant tree that contains CST zero-extended to TYPE. */
1369 tree
1370 build_int_cstu (tree type, poly_uint64 cst)
1372 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1375 /* Create a constant tree that contains CST sign-extended to TYPE. */
1377 tree
1378 build_int_cst_type (tree type, poly_int64 cst)
1380 gcc_assert (type);
1381 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1384 /* Constructs tree in type TYPE from with value given by CST. Signedness
1385 of CST is assumed to be the same as the signedness of TYPE. */
1387 tree
1388 double_int_to_tree (tree type, double_int cst)
1390 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1393 /* We force the wide_int CST to the range of the type TYPE by sign or
1394 zero extending it. OVERFLOWABLE indicates if we are interested in
1395 overflow of the value, when >0 we are only interested in signed
1396 overflow, for <0 we are interested in any overflow. OVERFLOWED
1397 indicates whether overflow has already occurred. CONST_OVERFLOWED
1398 indicates whether constant overflow has already occurred. We force
1399 T's value to be within range of T's type (by setting to 0 or 1 all
1400 the bits outside the type's range). We set TREE_OVERFLOWED if,
1401 OVERFLOWED is nonzero,
1402 or OVERFLOWABLE is >0 and signed overflow occurs
1403 or OVERFLOWABLE is <0 and any overflow occurs
1404 We return a new tree node for the extended wide_int. The node
1405 is shared if no overflow flags are set. */
1408 tree
1409 force_fit_type (tree type, const poly_wide_int_ref &cst,
1410 int overflowable, bool overflowed)
1412 signop sign = TYPE_SIGN (type);
1414 /* If we need to set overflow flags, return a new unshared node. */
1415 if (overflowed || !wi::fits_to_tree_p (cst, type))
1417 if (overflowed
1418 || overflowable < 0
1419 || (overflowable > 0 && sign == SIGNED))
1421 poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1422 sign);
1423 tree t;
1424 if (tmp.is_constant ())
1425 t = build_new_int_cst (type, tmp.coeffs[0]);
1426 else
1428 tree coeffs[NUM_POLY_INT_COEFFS];
1429 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1431 coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1432 TREE_OVERFLOW (coeffs[i]) = 1;
1434 t = build_new_poly_int_cst (type, coeffs);
1436 TREE_OVERFLOW (t) = 1;
1437 return t;
1441 /* Else build a shared node. */
1442 return wide_int_to_tree (type, cst);
1445 /* These are the hash table functions for the hash table of INTEGER_CST
1446 nodes of a sizetype. */
1448 /* Return the hash code X, an INTEGER_CST. */
1450 hashval_t
1451 int_cst_hasher::hash (tree x)
1453 const_tree const t = x;
1454 hashval_t code = TYPE_UID (TREE_TYPE (t));
1455 int i;
1457 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1458 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1460 return code;
1463 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1464 is the same as that given by *Y, which is the same. */
1466 bool
1467 int_cst_hasher::equal (tree x, tree y)
1469 const_tree const xt = x;
1470 const_tree const yt = y;
1472 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1473 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1474 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1475 return false;
1477 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1478 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1479 return false;
1481 return true;
1484 /* Cache wide_int CST into the TYPE_CACHED_VALUES cache for TYPE.
1485 SLOT is the slot entry to store it in, and MAX_SLOTS is the maximum
1486 number of slots that can be cached for the type. */
1488 static inline tree
1489 cache_wide_int_in_type_cache (tree type, const wide_int &cst,
1490 int slot, int max_slots)
1492 gcc_checking_assert (slot >= 0);
1493 /* Initialize cache. */
1494 if (!TYPE_CACHED_VALUES_P (type))
1496 TYPE_CACHED_VALUES_P (type) = 1;
1497 TYPE_CACHED_VALUES (type) = make_tree_vec (max_slots);
1499 tree t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot);
1500 if (!t)
1502 /* Create a new shared int. */
1503 t = build_new_int_cst (type, cst);
1504 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot) = t;
1506 return t;
1509 /* Create an INT_CST node of TYPE and value CST.
1510 The returned node is always shared. For small integers we use a
1511 per-type vector cache, for larger ones we use a single hash table.
1512 The value is extended from its precision according to the sign of
1513 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1514 the upper bits and ensures that hashing and value equality based
1515 upon the underlying HOST_WIDE_INTs works without masking. */
1517 static tree
1518 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1520 tree t;
1521 int ix = -1;
1522 int limit = 0;
1524 gcc_assert (type);
1525 unsigned int prec = TYPE_PRECISION (type);
1526 signop sgn = TYPE_SIGN (type);
1528 /* Verify that everything is canonical. */
1529 int l = pcst.get_len ();
1530 if (l > 1)
1532 if (pcst.elt (l - 1) == 0)
1533 gcc_checking_assert (pcst.elt (l - 2) < 0);
1534 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1535 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1538 wide_int cst = wide_int::from (pcst, prec, sgn);
1539 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1541 enum tree_code code = TREE_CODE (type);
1542 if (code == POINTER_TYPE || code == REFERENCE_TYPE)
1544 /* Cache NULL pointer and zero bounds. */
1545 if (cst == 0)
1546 ix = 0;
1547 /* Cache upper bounds of pointers. */
1548 else if (cst == wi::max_value (prec, sgn))
1549 ix = 1;
1550 /* Cache 1 which is used for a non-zero range. */
1551 else if (cst == 1)
1552 ix = 2;
1554 if (ix >= 0)
1556 t = cache_wide_int_in_type_cache (type, cst, ix, 3);
1557 /* Make sure no one is clobbering the shared constant. */
1558 gcc_checking_assert (TREE_TYPE (t) == type
1559 && cst == wi::to_wide (t));
1560 return t;
1563 if (ext_len == 1)
1565 /* We just need to store a single HOST_WIDE_INT. */
1566 HOST_WIDE_INT hwi;
1567 if (TYPE_UNSIGNED (type))
1568 hwi = cst.to_uhwi ();
1569 else
1570 hwi = cst.to_shwi ();
1572 switch (code)
1574 case NULLPTR_TYPE:
1575 gcc_assert (hwi == 0);
1576 /* Fallthru. */
1578 case POINTER_TYPE:
1579 case REFERENCE_TYPE:
1580 /* Ignore pointers, as they were already handled above. */
1581 break;
1583 case BOOLEAN_TYPE:
1584 /* Cache false or true. */
1585 limit = 2;
1586 if (IN_RANGE (hwi, 0, 1))
1587 ix = hwi;
1588 break;
1590 case INTEGER_TYPE:
1591 case OFFSET_TYPE:
1592 if (TYPE_SIGN (type) == UNSIGNED)
1594 /* Cache [0, N). */
1595 limit = param_integer_share_limit;
1596 if (IN_RANGE (hwi, 0, param_integer_share_limit - 1))
1597 ix = hwi;
1599 else
1601 /* Cache [-1, N). */
1602 limit = param_integer_share_limit + 1;
1603 if (IN_RANGE (hwi, -1, param_integer_share_limit - 1))
1604 ix = hwi + 1;
1606 break;
1608 case ENUMERAL_TYPE:
1609 break;
1611 default:
1612 gcc_unreachable ();
1615 if (ix >= 0)
1617 t = cache_wide_int_in_type_cache (type, cst, ix, limit);
1618 /* Make sure no one is clobbering the shared constant. */
1619 gcc_checking_assert (TREE_TYPE (t) == type
1620 && TREE_INT_CST_NUNITS (t) == 1
1621 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1622 && TREE_INT_CST_EXT_NUNITS (t) == 1
1623 && TREE_INT_CST_ELT (t, 0) == hwi);
1624 return t;
1626 else
1628 /* Use the cache of larger shared ints, using int_cst_node as
1629 a temporary. */
1631 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1632 TREE_TYPE (int_cst_node) = type;
1634 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1635 t = *slot;
1636 if (!t)
1638 /* Insert this one into the hash table. */
1639 t = int_cst_node;
1640 *slot = t;
1641 /* Make a new node for next time round. */
1642 int_cst_node = make_int_cst (1, 1);
1646 else
1648 /* The value either hashes properly or we drop it on the floor
1649 for the gc to take care of. There will not be enough of them
1650 to worry about. */
1652 tree nt = build_new_int_cst (type, cst);
1653 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1654 t = *slot;
1655 if (!t)
1657 /* Insert this one into the hash table. */
1658 t = nt;
1659 *slot = t;
1661 else
1662 ggc_free (nt);
1665 return t;
1668 hashval_t
1669 poly_int_cst_hasher::hash (tree t)
1671 inchash::hash hstate;
1673 hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1674 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1675 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1677 return hstate.end ();
1680 bool
1681 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1683 if (TREE_TYPE (x) != y.first)
1684 return false;
1685 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1686 if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1687 return false;
1688 return true;
1691 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1692 The elements must also have type TYPE. */
1694 tree
1695 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1697 unsigned int prec = TYPE_PRECISION (type);
1698 gcc_assert (prec <= values.coeffs[0].get_precision ());
1699 poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1701 inchash::hash h;
1702 h.add_int (TYPE_UID (type));
1703 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1704 h.add_wide_int (c.coeffs[i]);
1705 poly_int_cst_hasher::compare_type comp (type, &c);
1706 tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1707 INSERT);
1708 if (*slot == NULL_TREE)
1710 tree coeffs[NUM_POLY_INT_COEFFS];
1711 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1712 coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1713 *slot = build_new_poly_int_cst (type, coeffs);
1715 return *slot;
1718 /* Create a constant tree with value VALUE in type TYPE. */
1720 tree
1721 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1723 if (value.is_constant ())
1724 return wide_int_to_tree_1 (type, value.coeffs[0]);
1725 return build_poly_int_cst (type, value);
1728 void
1729 cache_integer_cst (tree t)
1731 tree type = TREE_TYPE (t);
1732 int ix = -1;
1733 int limit = 0;
1734 int prec = TYPE_PRECISION (type);
1736 gcc_assert (!TREE_OVERFLOW (t));
1738 switch (TREE_CODE (type))
1740 case NULLPTR_TYPE:
1741 gcc_assert (integer_zerop (t));
1742 /* Fallthru. */
1744 case POINTER_TYPE:
1745 case REFERENCE_TYPE:
1746 /* Cache NULL pointer. */
1747 if (integer_zerop (t))
1749 limit = 1;
1750 ix = 0;
1752 break;
1754 case BOOLEAN_TYPE:
1755 /* Cache false or true. */
1756 limit = 2;
1757 if (wi::ltu_p (wi::to_wide (t), 2))
1758 ix = TREE_INT_CST_ELT (t, 0);
1759 break;
1761 case INTEGER_TYPE:
1762 case OFFSET_TYPE:
1763 if (TYPE_UNSIGNED (type))
1765 /* Cache 0..N */
1766 limit = param_integer_share_limit;
1768 /* This is a little hokie, but if the prec is smaller than
1769 what is necessary to hold param_integer_share_limit, then the
1770 obvious test will not get the correct answer. */
1771 if (prec < HOST_BITS_PER_WIDE_INT)
1773 if (tree_to_uhwi (t)
1774 < (unsigned HOST_WIDE_INT) param_integer_share_limit)
1775 ix = tree_to_uhwi (t);
1777 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1778 ix = tree_to_uhwi (t);
1780 else
1782 /* Cache -1..N */
1783 limit = param_integer_share_limit + 1;
1785 if (integer_minus_onep (t))
1786 ix = 0;
1787 else if (!wi::neg_p (wi::to_wide (t)))
1789 if (prec < HOST_BITS_PER_WIDE_INT)
1791 if (tree_to_shwi (t) < param_integer_share_limit)
1792 ix = tree_to_shwi (t) + 1;
1794 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1795 ix = tree_to_shwi (t) + 1;
1798 break;
1800 case ENUMERAL_TYPE:
1801 /* The slot used by TYPE_CACHED_VALUES is used for the enum
1802 members. */
1803 break;
1805 default:
1806 gcc_unreachable ();
1809 if (ix >= 0)
1811 /* Look for it in the type's vector of small shared ints. */
1812 if (!TYPE_CACHED_VALUES_P (type))
1814 TYPE_CACHED_VALUES_P (type) = 1;
1815 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1818 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1819 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1821 else
1823 /* Use the cache of larger shared ints. */
1824 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1825 /* If there is already an entry for the number verify it's the
1826 same. */
1827 if (*slot)
1828 gcc_assert (wi::to_wide (tree (*slot)) == wi::to_wide (t));
1829 else
1830 /* Otherwise insert this one into the hash table. */
1831 *slot = t;
1836 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1837 and the rest are zeros. */
1839 tree
1840 build_low_bits_mask (tree type, unsigned bits)
1842 gcc_assert (bits <= TYPE_PRECISION (type));
1844 return wide_int_to_tree (type, wi::mask (bits, false,
1845 TYPE_PRECISION (type)));
1848 /* Checks that X is integer constant that can be expressed in (unsigned)
1849 HOST_WIDE_INT without loss of precision. */
1851 bool
1852 cst_and_fits_in_hwi (const_tree x)
1854 return (TREE_CODE (x) == INTEGER_CST
1855 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
1858 /* Build a newly constructed VECTOR_CST with the given values of
1859 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
1861 tree
1862 make_vector (unsigned log2_npatterns,
1863 unsigned int nelts_per_pattern MEM_STAT_DECL)
1865 gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
1866 tree t;
1867 unsigned npatterns = 1 << log2_npatterns;
1868 unsigned encoded_nelts = npatterns * nelts_per_pattern;
1869 unsigned length = (sizeof (struct tree_vector)
1870 + (encoded_nelts - 1) * sizeof (tree));
1872 record_node_allocation_statistics (VECTOR_CST, length);
1874 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1876 TREE_SET_CODE (t, VECTOR_CST);
1877 TREE_CONSTANT (t) = 1;
1878 VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
1879 VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
1881 return t;
1884 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1885 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1887 tree
1888 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1890 if (vec_safe_length (v) == 0)
1891 return build_zero_cst (type);
1893 unsigned HOST_WIDE_INT idx, nelts;
1894 tree value;
1896 /* We can't construct a VECTOR_CST for a variable number of elements. */
1897 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
1898 tree_vector_builder vec (type, nelts, 1);
1899 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1901 if (TREE_CODE (value) == VECTOR_CST)
1903 /* If NELTS is constant then this must be too. */
1904 unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
1905 for (unsigned i = 0; i < sub_nelts; ++i)
1906 vec.quick_push (VECTOR_CST_ELT (value, i));
1908 else
1909 vec.quick_push (value);
1911 while (vec.length () < nelts)
1912 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
1914 return vec.build ();
1917 /* Build a vector of type VECTYPE where all the elements are SCs. */
1918 tree
1919 build_vector_from_val (tree vectype, tree sc)
1921 unsigned HOST_WIDE_INT i, nunits;
1923 if (sc == error_mark_node)
1924 return sc;
1926 /* Verify that the vector type is suitable for SC. Note that there
1927 is some inconsistency in the type-system with respect to restrict
1928 qualifications of pointers. Vector types always have a main-variant
1929 element type and the qualification is applied to the vector-type.
1930 So TREE_TYPE (vector-type) does not return a properly qualified
1931 vector element-type. */
1932 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1933 TREE_TYPE (vectype)));
1935 if (CONSTANT_CLASS_P (sc))
1937 tree_vector_builder v (vectype, 1, 1);
1938 v.quick_push (sc);
1939 return v.build ();
1941 else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
1942 return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
1943 else
1945 vec<constructor_elt, va_gc> *v;
1946 vec_alloc (v, nunits);
1947 for (i = 0; i < nunits; ++i)
1948 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1949 return build_constructor (vectype, v);
1953 /* If TYPE is not a vector type, just return SC, otherwise return
1954 build_vector_from_val (TYPE, SC). */
1956 tree
1957 build_uniform_cst (tree type, tree sc)
1959 if (!VECTOR_TYPE_P (type))
1960 return sc;
1962 return build_vector_from_val (type, sc);
1965 /* Build a vector series of type TYPE in which element I has the value
1966 BASE + I * STEP. The result is a constant if BASE and STEP are constant
1967 and a VEC_SERIES_EXPR otherwise. */
1969 tree
1970 build_vec_series (tree type, tree base, tree step)
1972 if (integer_zerop (step))
1973 return build_vector_from_val (type, base);
1974 if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
1976 tree_vector_builder builder (type, 1, 3);
1977 tree elt1 = wide_int_to_tree (TREE_TYPE (base),
1978 wi::to_wide (base) + wi::to_wide (step));
1979 tree elt2 = wide_int_to_tree (TREE_TYPE (base),
1980 wi::to_wide (elt1) + wi::to_wide (step));
1981 builder.quick_push (base);
1982 builder.quick_push (elt1);
1983 builder.quick_push (elt2);
1984 return builder.build ();
1986 return build2 (VEC_SERIES_EXPR, type, base, step);
1989 /* Return a vector with the same number of units and number of bits
1990 as VEC_TYPE, but in which the elements are a linear series of unsigned
1991 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
1993 tree
1994 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
1996 tree index_vec_type = vec_type;
1997 tree index_elt_type = TREE_TYPE (vec_type);
1998 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
1999 if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
2001 index_elt_type = build_nonstandard_integer_type
2002 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
2003 index_vec_type = build_vector_type (index_elt_type, nunits);
2006 tree_vector_builder v (index_vec_type, 1, 3);
2007 for (unsigned int i = 0; i < 3; ++i)
2008 v.quick_push (build_int_cstu (index_elt_type, base + i * step));
2009 return v.build ();
2012 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
2013 elements are A and the rest are B. */
2015 tree
2016 build_vector_a_then_b (tree vec_type, unsigned int num_a, tree a, tree b)
2018 gcc_assert (known_le (num_a, TYPE_VECTOR_SUBPARTS (vec_type)));
2019 unsigned int count = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type));
2020 /* Optimize the constant case. */
2021 if ((count & 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type).is_constant ())
2022 count /= 2;
2023 tree_vector_builder builder (vec_type, count, 2);
2024 for (unsigned int i = 0; i < count * 2; ++i)
2025 builder.quick_push (i < num_a ? a : b);
2026 return builder.build ();
2029 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2030 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
2032 void
2033 recompute_constructor_flags (tree c)
2035 unsigned int i;
2036 tree val;
2037 bool constant_p = true;
2038 bool side_effects_p = false;
2039 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2041 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2043 /* Mostly ctors will have elts that don't have side-effects, so
2044 the usual case is to scan all the elements. Hence a single
2045 loop for both const and side effects, rather than one loop
2046 each (with early outs). */
2047 if (!TREE_CONSTANT (val))
2048 constant_p = false;
2049 if (TREE_SIDE_EFFECTS (val))
2050 side_effects_p = true;
2053 TREE_SIDE_EFFECTS (c) = side_effects_p;
2054 TREE_CONSTANT (c) = constant_p;
2057 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2058 CONSTRUCTOR C. */
2060 void
2061 verify_constructor_flags (tree c)
2063 unsigned int i;
2064 tree val;
2065 bool constant_p = TREE_CONSTANT (c);
2066 bool side_effects_p = TREE_SIDE_EFFECTS (c);
2067 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2069 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2071 if (constant_p && !TREE_CONSTANT (val))
2072 internal_error ("non-constant element in constant CONSTRUCTOR");
2073 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
2074 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2078 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2079 are in the vec pointed to by VALS. */
2080 tree
2081 build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL)
2083 tree c = make_node (CONSTRUCTOR PASS_MEM_STAT);
2085 TREE_TYPE (c) = type;
2086 CONSTRUCTOR_ELTS (c) = vals;
2088 recompute_constructor_flags (c);
2090 return c;
2093 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2094 INDEX and VALUE. */
2095 tree
2096 build_constructor_single (tree type, tree index, tree value)
2098 vec<constructor_elt, va_gc> *v;
2099 constructor_elt elt = {index, value};
2101 vec_alloc (v, 1);
2102 v->quick_push (elt);
2104 return build_constructor (type, v);
2108 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2109 are in a list pointed to by VALS. */
2110 tree
2111 build_constructor_from_list (tree type, tree vals)
2113 tree t;
2114 vec<constructor_elt, va_gc> *v = NULL;
2116 if (vals)
2118 vec_alloc (v, list_length (vals));
2119 for (t = vals; t; t = TREE_CHAIN (t))
2120 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2123 return build_constructor (type, v);
2126 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2127 are in a vector pointed to by VALS. Note that the TREE_PURPOSE
2128 fields in the constructor remain null. */
2130 tree
2131 build_constructor_from_vec (tree type, const vec<tree, va_gc> *vals)
2133 vec<constructor_elt, va_gc> *v = NULL;
2135 for (tree t : *vals)
2136 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t);
2138 return build_constructor (type, v);
2141 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2142 of elements, provided as index/value pairs. */
2144 tree
2145 build_constructor_va (tree type, int nelts, ...)
2147 vec<constructor_elt, va_gc> *v = NULL;
2148 va_list p;
2150 va_start (p, nelts);
2151 vec_alloc (v, nelts);
2152 while (nelts--)
2154 tree index = va_arg (p, tree);
2155 tree value = va_arg (p, tree);
2156 CONSTRUCTOR_APPEND_ELT (v, index, value);
2158 va_end (p);
2159 return build_constructor (type, v);
2162 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2164 tree
2165 build_clobber (tree type)
2167 tree clobber = build_constructor (type, NULL);
2168 TREE_THIS_VOLATILE (clobber) = true;
2169 return clobber;
2172 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2174 tree
2175 build_fixed (tree type, FIXED_VALUE_TYPE f)
2177 tree v;
2178 FIXED_VALUE_TYPE *fp;
2180 v = make_node (FIXED_CST);
2181 fp = ggc_alloc<fixed_value> ();
2182 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2184 TREE_TYPE (v) = type;
2185 TREE_FIXED_CST_PTR (v) = fp;
2186 return v;
2189 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2191 tree
2192 build_real (tree type, REAL_VALUE_TYPE d)
2194 tree v;
2195 REAL_VALUE_TYPE *dp;
2196 int overflow = 0;
2198 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2199 Consider doing it via real_convert now. */
2201 v = make_node (REAL_CST);
2202 dp = ggc_alloc<real_value> ();
2203 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
2205 TREE_TYPE (v) = type;
2206 TREE_REAL_CST_PTR (v) = dp;
2207 TREE_OVERFLOW (v) = overflow;
2208 return v;
2211 /* Like build_real, but first truncate D to the type. */
2213 tree
2214 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2216 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2219 /* Return a new REAL_CST node whose type is TYPE
2220 and whose value is the integer value of the INTEGER_CST node I. */
2222 REAL_VALUE_TYPE
2223 real_value_from_int_cst (const_tree type, const_tree i)
2225 REAL_VALUE_TYPE d;
2227 /* Clear all bits of the real value type so that we can later do
2228 bitwise comparisons to see if two values are the same. */
2229 memset (&d, 0, sizeof d);
2231 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2232 TYPE_SIGN (TREE_TYPE (i)));
2233 return d;
2236 /* Given a tree representing an integer constant I, return a tree
2237 representing the same value as a floating-point constant of type TYPE. */
2239 tree
2240 build_real_from_int_cst (tree type, const_tree i)
2242 tree v;
2243 int overflow = TREE_OVERFLOW (i);
2245 v = build_real (type, real_value_from_int_cst (type, i));
2247 TREE_OVERFLOW (v) |= overflow;
2248 return v;
2251 /* Return a newly constructed STRING_CST node whose value is the LEN
2252 characters at STR when STR is nonnull, or all zeros otherwise.
2253 Note that for a C string literal, LEN should include the trailing NUL.
2254 The TREE_TYPE is not initialized. */
2256 tree
2257 build_string (unsigned len, const char *str /*= NULL */)
2259 /* Do not waste bytes provided by padding of struct tree_string. */
2260 unsigned size = len + offsetof (struct tree_string, str) + 1;
2262 record_node_allocation_statistics (STRING_CST, size);
2264 tree s = (tree) ggc_internal_alloc (size);
2266 memset (s, 0, sizeof (struct tree_typed));
2267 TREE_SET_CODE (s, STRING_CST);
2268 TREE_CONSTANT (s) = 1;
2269 TREE_STRING_LENGTH (s) = len;
2270 if (str)
2271 memcpy (s->string.str, str, len);
2272 else
2273 memset (s->string.str, 0, len);
2274 s->string.str[len] = '\0';
2276 return s;
2279 /* Return a newly constructed COMPLEX_CST node whose value is
2280 specified by the real and imaginary parts REAL and IMAG.
2281 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2282 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2284 tree
2285 build_complex (tree type, tree real, tree imag)
2287 gcc_assert (CONSTANT_CLASS_P (real));
2288 gcc_assert (CONSTANT_CLASS_P (imag));
2290 tree t = make_node (COMPLEX_CST);
2292 TREE_REALPART (t) = real;
2293 TREE_IMAGPART (t) = imag;
2294 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2295 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2296 return t;
2299 /* Build a complex (inf +- 0i), such as for the result of cproj.
2300 TYPE is the complex tree type of the result. If NEG is true, the
2301 imaginary zero is negative. */
2303 tree
2304 build_complex_inf (tree type, bool neg)
2306 REAL_VALUE_TYPE rinf, rzero = dconst0;
2308 real_inf (&rinf);
2309 rzero.sign = neg;
2310 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2311 build_real (TREE_TYPE (type), rzero));
2314 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2315 element is set to 1. In particular, this is 1 + i for complex types. */
2317 tree
2318 build_each_one_cst (tree type)
2320 if (TREE_CODE (type) == COMPLEX_TYPE)
2322 tree scalar = build_one_cst (TREE_TYPE (type));
2323 return build_complex (type, scalar, scalar);
2325 else
2326 return build_one_cst (type);
2329 /* Return a constant of arithmetic type TYPE which is the
2330 multiplicative identity of the set TYPE. */
2332 tree
2333 build_one_cst (tree type)
2335 switch (TREE_CODE (type))
2337 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2338 case POINTER_TYPE: case REFERENCE_TYPE:
2339 case OFFSET_TYPE:
2340 return build_int_cst (type, 1);
2342 case REAL_TYPE:
2343 return build_real (type, dconst1);
2345 case FIXED_POINT_TYPE:
2346 /* We can only generate 1 for accum types. */
2347 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2348 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2350 case VECTOR_TYPE:
2352 tree scalar = build_one_cst (TREE_TYPE (type));
2354 return build_vector_from_val (type, scalar);
2357 case COMPLEX_TYPE:
2358 return build_complex (type,
2359 build_one_cst (TREE_TYPE (type)),
2360 build_zero_cst (TREE_TYPE (type)));
2362 default:
2363 gcc_unreachable ();
2367 /* Return an integer of type TYPE containing all 1's in as much precision as
2368 it contains, or a complex or vector whose subparts are such integers. */
2370 tree
2371 build_all_ones_cst (tree type)
2373 if (TREE_CODE (type) == COMPLEX_TYPE)
2375 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2376 return build_complex (type, scalar, scalar);
2378 else
2379 return build_minus_one_cst (type);
2382 /* Return a constant of arithmetic type TYPE which is the
2383 opposite of the multiplicative identity of the set TYPE. */
2385 tree
2386 build_minus_one_cst (tree type)
2388 switch (TREE_CODE (type))
2390 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2391 case POINTER_TYPE: case REFERENCE_TYPE:
2392 case OFFSET_TYPE:
2393 return build_int_cst (type, -1);
2395 case REAL_TYPE:
2396 return build_real (type, dconstm1);
2398 case FIXED_POINT_TYPE:
2399 /* We can only generate 1 for accum types. */
2400 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2401 return build_fixed (type,
2402 fixed_from_double_int (double_int_minus_one,
2403 SCALAR_TYPE_MODE (type)));
2405 case VECTOR_TYPE:
2407 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2409 return build_vector_from_val (type, scalar);
2412 case COMPLEX_TYPE:
2413 return build_complex (type,
2414 build_minus_one_cst (TREE_TYPE (type)),
2415 build_zero_cst (TREE_TYPE (type)));
2417 default:
2418 gcc_unreachable ();
2422 /* Build 0 constant of type TYPE. This is used by constructor folding
2423 and thus the constant should be represented in memory by
2424 zero(es). */
2426 tree
2427 build_zero_cst (tree type)
2429 switch (TREE_CODE (type))
2431 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2432 case POINTER_TYPE: case REFERENCE_TYPE:
2433 case OFFSET_TYPE: case NULLPTR_TYPE:
2434 return build_int_cst (type, 0);
2436 case REAL_TYPE:
2437 return build_real (type, dconst0);
2439 case FIXED_POINT_TYPE:
2440 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2442 case VECTOR_TYPE:
2444 tree scalar = build_zero_cst (TREE_TYPE (type));
2446 return build_vector_from_val (type, scalar);
2449 case COMPLEX_TYPE:
2451 tree zero = build_zero_cst (TREE_TYPE (type));
2453 return build_complex (type, zero, zero);
2456 default:
2457 if (!AGGREGATE_TYPE_P (type))
2458 return fold_convert (type, integer_zero_node);
2459 return build_constructor (type, NULL);
2464 /* Build a BINFO with LEN language slots. */
2466 tree
2467 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2469 tree t;
2470 size_t length = (offsetof (struct tree_binfo, base_binfos)
2471 + vec<tree, va_gc>::embedded_size (base_binfos));
2473 record_node_allocation_statistics (TREE_BINFO, length);
2475 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2477 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2479 TREE_SET_CODE (t, TREE_BINFO);
2481 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2483 return t;
2486 /* Create a CASE_LABEL_EXPR tree node and return it. */
2488 tree
2489 build_case_label (tree low_value, tree high_value, tree label_decl)
2491 tree t = make_node (CASE_LABEL_EXPR);
2493 TREE_TYPE (t) = void_type_node;
2494 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2496 CASE_LOW (t) = low_value;
2497 CASE_HIGH (t) = high_value;
2498 CASE_LABEL (t) = label_decl;
2499 CASE_CHAIN (t) = NULL_TREE;
2501 return t;
2504 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2505 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2506 The latter determines the length of the HOST_WIDE_INT vector. */
2508 tree
2509 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2511 tree t;
2512 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2513 + sizeof (struct tree_int_cst));
2515 gcc_assert (len);
2516 record_node_allocation_statistics (INTEGER_CST, length);
2518 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2520 TREE_SET_CODE (t, INTEGER_CST);
2521 TREE_INT_CST_NUNITS (t) = len;
2522 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2523 /* to_offset can only be applied to trees that are offset_int-sized
2524 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2525 must be exactly the precision of offset_int and so LEN is correct. */
2526 if (ext_len <= OFFSET_INT_ELTS)
2527 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2528 else
2529 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2531 TREE_CONSTANT (t) = 1;
2533 return t;
2536 /* Build a newly constructed TREE_VEC node of length LEN. */
2538 tree
2539 make_tree_vec (int len MEM_STAT_DECL)
2541 tree t;
2542 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2544 record_node_allocation_statistics (TREE_VEC, length);
2546 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2548 TREE_SET_CODE (t, TREE_VEC);
2549 TREE_VEC_LENGTH (t) = len;
2551 return t;
2554 /* Grow a TREE_VEC node to new length LEN. */
2556 tree
2557 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2559 gcc_assert (TREE_CODE (v) == TREE_VEC);
2561 int oldlen = TREE_VEC_LENGTH (v);
2562 gcc_assert (len > oldlen);
2564 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2565 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2567 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2569 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2571 TREE_VEC_LENGTH (v) = len;
2573 return v;
2576 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2577 fixed, and scalar, complex or vector. */
2579 bool
2580 zerop (const_tree expr)
2582 return (integer_zerop (expr)
2583 || real_zerop (expr)
2584 || fixed_zerop (expr));
2587 /* Return 1 if EXPR is the integer constant zero or a complex constant
2588 of zero, or a location wrapper for such a constant. */
2590 bool
2591 integer_zerop (const_tree expr)
2593 STRIP_ANY_LOCATION_WRAPPER (expr);
2595 switch (TREE_CODE (expr))
2597 case INTEGER_CST:
2598 return wi::to_wide (expr) == 0;
2599 case COMPLEX_CST:
2600 return (integer_zerop (TREE_REALPART (expr))
2601 && integer_zerop (TREE_IMAGPART (expr)));
2602 case VECTOR_CST:
2603 return (VECTOR_CST_NPATTERNS (expr) == 1
2604 && VECTOR_CST_DUPLICATE_P (expr)
2605 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2606 default:
2607 return false;
2611 /* Return 1 if EXPR is the integer constant one or the corresponding
2612 complex constant, or a location wrapper for such a constant. */
2614 bool
2615 integer_onep (const_tree expr)
2617 STRIP_ANY_LOCATION_WRAPPER (expr);
2619 switch (TREE_CODE (expr))
2621 case INTEGER_CST:
2622 return wi::eq_p (wi::to_widest (expr), 1);
2623 case COMPLEX_CST:
2624 return (integer_onep (TREE_REALPART (expr))
2625 && integer_zerop (TREE_IMAGPART (expr)));
2626 case VECTOR_CST:
2627 return (VECTOR_CST_NPATTERNS (expr) == 1
2628 && VECTOR_CST_DUPLICATE_P (expr)
2629 && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2630 default:
2631 return false;
2635 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2636 return 1 if every piece is the integer constant one.
2637 Also return 1 for location wrappers for such a constant. */
2639 bool
2640 integer_each_onep (const_tree expr)
2642 STRIP_ANY_LOCATION_WRAPPER (expr);
2644 if (TREE_CODE (expr) == COMPLEX_CST)
2645 return (integer_onep (TREE_REALPART (expr))
2646 && integer_onep (TREE_IMAGPART (expr)));
2647 else
2648 return integer_onep (expr);
2651 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2652 it contains, or a complex or vector whose subparts are such integers,
2653 or a location wrapper for such a constant. */
2655 bool
2656 integer_all_onesp (const_tree expr)
2658 STRIP_ANY_LOCATION_WRAPPER (expr);
2660 if (TREE_CODE (expr) == COMPLEX_CST
2661 && integer_all_onesp (TREE_REALPART (expr))
2662 && integer_all_onesp (TREE_IMAGPART (expr)))
2663 return true;
2665 else if (TREE_CODE (expr) == VECTOR_CST)
2666 return (VECTOR_CST_NPATTERNS (expr) == 1
2667 && VECTOR_CST_DUPLICATE_P (expr)
2668 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2670 else if (TREE_CODE (expr) != INTEGER_CST)
2671 return false;
2673 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2674 == wi::to_wide (expr));
2677 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
2678 for such a constant. */
2680 bool
2681 integer_minus_onep (const_tree expr)
2683 STRIP_ANY_LOCATION_WRAPPER (expr);
2685 if (TREE_CODE (expr) == COMPLEX_CST)
2686 return (integer_all_onesp (TREE_REALPART (expr))
2687 && integer_zerop (TREE_IMAGPART (expr)));
2688 else
2689 return integer_all_onesp (expr);
2692 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2693 one bit on), or a location wrapper for such a constant. */
2695 bool
2696 integer_pow2p (const_tree expr)
2698 STRIP_ANY_LOCATION_WRAPPER (expr);
2700 if (TREE_CODE (expr) == COMPLEX_CST
2701 && integer_pow2p (TREE_REALPART (expr))
2702 && integer_zerop (TREE_IMAGPART (expr)))
2703 return true;
2705 if (TREE_CODE (expr) != INTEGER_CST)
2706 return false;
2708 return wi::popcount (wi::to_wide (expr)) == 1;
2711 /* Return 1 if EXPR is an integer constant other than zero or a
2712 complex constant other than zero, or a location wrapper for such a
2713 constant. */
2715 bool
2716 integer_nonzerop (const_tree expr)
2718 STRIP_ANY_LOCATION_WRAPPER (expr);
2720 return ((TREE_CODE (expr) == INTEGER_CST
2721 && wi::to_wide (expr) != 0)
2722 || (TREE_CODE (expr) == COMPLEX_CST
2723 && (integer_nonzerop (TREE_REALPART (expr))
2724 || integer_nonzerop (TREE_IMAGPART (expr)))));
2727 /* Return 1 if EXPR is the integer constant one. For vector,
2728 return 1 if every piece is the integer constant minus one
2729 (representing the value TRUE).
2730 Also return 1 for location wrappers for such a constant. */
2732 bool
2733 integer_truep (const_tree expr)
2735 STRIP_ANY_LOCATION_WRAPPER (expr);
2737 if (TREE_CODE (expr) == VECTOR_CST)
2738 return integer_all_onesp (expr);
2739 return integer_onep (expr);
2742 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
2743 for such a constant. */
2745 bool
2746 fixed_zerop (const_tree expr)
2748 STRIP_ANY_LOCATION_WRAPPER (expr);
2750 return (TREE_CODE (expr) == FIXED_CST
2751 && TREE_FIXED_CST (expr).data.is_zero ());
2754 /* Return the power of two represented by a tree node known to be a
2755 power of two. */
2758 tree_log2 (const_tree expr)
2760 if (TREE_CODE (expr) == COMPLEX_CST)
2761 return tree_log2 (TREE_REALPART (expr));
2763 return wi::exact_log2 (wi::to_wide (expr));
2766 /* Similar, but return the largest integer Y such that 2 ** Y is less
2767 than or equal to EXPR. */
2770 tree_floor_log2 (const_tree expr)
2772 if (TREE_CODE (expr) == COMPLEX_CST)
2773 return tree_log2 (TREE_REALPART (expr));
2775 return wi::floor_log2 (wi::to_wide (expr));
2778 /* Return number of known trailing zero bits in EXPR, or, if the value of
2779 EXPR is known to be zero, the precision of it's type. */
2781 unsigned int
2782 tree_ctz (const_tree expr)
2784 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2785 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2786 return 0;
2788 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2789 switch (TREE_CODE (expr))
2791 case INTEGER_CST:
2792 ret1 = wi::ctz (wi::to_wide (expr));
2793 return MIN (ret1, prec);
2794 case SSA_NAME:
2795 ret1 = wi::ctz (get_nonzero_bits (expr));
2796 return MIN (ret1, prec);
2797 case PLUS_EXPR:
2798 case MINUS_EXPR:
2799 case BIT_IOR_EXPR:
2800 case BIT_XOR_EXPR:
2801 case MIN_EXPR:
2802 case MAX_EXPR:
2803 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2804 if (ret1 == 0)
2805 return ret1;
2806 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2807 return MIN (ret1, ret2);
2808 case POINTER_PLUS_EXPR:
2809 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2810 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2811 /* Second operand is sizetype, which could be in theory
2812 wider than pointer's precision. Make sure we never
2813 return more than prec. */
2814 ret2 = MIN (ret2, prec);
2815 return MIN (ret1, ret2);
2816 case BIT_AND_EXPR:
2817 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2818 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2819 return MAX (ret1, ret2);
2820 case MULT_EXPR:
2821 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2822 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2823 return MIN (ret1 + ret2, prec);
2824 case LSHIFT_EXPR:
2825 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2826 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2827 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2829 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2830 return MIN (ret1 + ret2, prec);
2832 return ret1;
2833 case RSHIFT_EXPR:
2834 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2835 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2837 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2838 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2839 if (ret1 > ret2)
2840 return ret1 - ret2;
2842 return 0;
2843 case TRUNC_DIV_EXPR:
2844 case CEIL_DIV_EXPR:
2845 case FLOOR_DIV_EXPR:
2846 case ROUND_DIV_EXPR:
2847 case EXACT_DIV_EXPR:
2848 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2849 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2851 int l = tree_log2 (TREE_OPERAND (expr, 1));
2852 if (l >= 0)
2854 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2855 ret2 = l;
2856 if (ret1 > ret2)
2857 return ret1 - ret2;
2860 return 0;
2861 CASE_CONVERT:
2862 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2863 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2864 ret1 = prec;
2865 return MIN (ret1, prec);
2866 case SAVE_EXPR:
2867 return tree_ctz (TREE_OPERAND (expr, 0));
2868 case COND_EXPR:
2869 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2870 if (ret1 == 0)
2871 return 0;
2872 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2873 return MIN (ret1, ret2);
2874 case COMPOUND_EXPR:
2875 return tree_ctz (TREE_OPERAND (expr, 1));
2876 case ADDR_EXPR:
2877 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2878 if (ret1 > BITS_PER_UNIT)
2880 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2881 return MIN (ret1, prec);
2883 return 0;
2884 default:
2885 return 0;
2889 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2890 decimal float constants, so don't return 1 for them.
2891 Also return 1 for location wrappers around such a constant. */
2893 bool
2894 real_zerop (const_tree expr)
2896 STRIP_ANY_LOCATION_WRAPPER (expr);
2898 switch (TREE_CODE (expr))
2900 case REAL_CST:
2901 return real_equal (&TREE_REAL_CST (expr), &dconst0)
2902 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2903 case COMPLEX_CST:
2904 return real_zerop (TREE_REALPART (expr))
2905 && real_zerop (TREE_IMAGPART (expr));
2906 case VECTOR_CST:
2908 /* Don't simply check for a duplicate because the predicate
2909 accepts both +0.0 and -0.0. */
2910 unsigned count = vector_cst_encoded_nelts (expr);
2911 for (unsigned int i = 0; i < count; ++i)
2912 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
2913 return false;
2914 return true;
2916 default:
2917 return false;
2921 /* Return 1 if EXPR is the real constant one in real or complex form.
2922 Trailing zeroes matter for decimal float constants, so don't return
2923 1 for them.
2924 Also return 1 for location wrappers around such a constant. */
2926 bool
2927 real_onep (const_tree expr)
2929 STRIP_ANY_LOCATION_WRAPPER (expr);
2931 switch (TREE_CODE (expr))
2933 case REAL_CST:
2934 return real_equal (&TREE_REAL_CST (expr), &dconst1)
2935 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2936 case COMPLEX_CST:
2937 return real_onep (TREE_REALPART (expr))
2938 && real_zerop (TREE_IMAGPART (expr));
2939 case VECTOR_CST:
2940 return (VECTOR_CST_NPATTERNS (expr) == 1
2941 && VECTOR_CST_DUPLICATE_P (expr)
2942 && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2943 default:
2944 return false;
2948 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2949 matter for decimal float constants, so don't return 1 for them.
2950 Also return 1 for location wrappers around such a constant. */
2952 bool
2953 real_minus_onep (const_tree expr)
2955 STRIP_ANY_LOCATION_WRAPPER (expr);
2957 switch (TREE_CODE (expr))
2959 case REAL_CST:
2960 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2961 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2962 case COMPLEX_CST:
2963 return real_minus_onep (TREE_REALPART (expr))
2964 && real_zerop (TREE_IMAGPART (expr));
2965 case VECTOR_CST:
2966 return (VECTOR_CST_NPATTERNS (expr) == 1
2967 && VECTOR_CST_DUPLICATE_P (expr)
2968 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2969 default:
2970 return false;
2974 /* Nonzero if EXP is a constant or a cast of a constant. */
2976 bool
2977 really_constant_p (const_tree exp)
2979 /* This is not quite the same as STRIP_NOPS. It does more. */
2980 while (CONVERT_EXPR_P (exp)
2981 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2982 exp = TREE_OPERAND (exp, 0);
2983 return TREE_CONSTANT (exp);
2986 /* Return true if T holds a polynomial pointer difference, storing it in
2987 *VALUE if so. A true return means that T's precision is no greater
2988 than 64 bits, which is the largest address space we support, so *VALUE
2989 never loses precision. However, the signedness of the result does
2990 not necessarily match the signedness of T: sometimes an unsigned type
2991 like sizetype is used to encode a value that is actually negative. */
2993 bool
2994 ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
2996 if (!t)
2997 return false;
2998 if (TREE_CODE (t) == INTEGER_CST)
3000 if (!cst_and_fits_in_hwi (t))
3001 return false;
3002 *value = int_cst_value (t);
3003 return true;
3005 if (POLY_INT_CST_P (t))
3007 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3008 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
3009 return false;
3010 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3011 value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
3012 return true;
3014 return false;
3017 poly_int64
3018 tree_to_poly_int64 (const_tree t)
3020 gcc_assert (tree_fits_poly_int64_p (t));
3021 if (POLY_INT_CST_P (t))
3022 return poly_int_cst_value (t).force_shwi ();
3023 return TREE_INT_CST_LOW (t);
3026 poly_uint64
3027 tree_to_poly_uint64 (const_tree t)
3029 gcc_assert (tree_fits_poly_uint64_p (t));
3030 if (POLY_INT_CST_P (t))
3031 return poly_int_cst_value (t).force_uhwi ();
3032 return TREE_INT_CST_LOW (t);
3035 /* Return first list element whose TREE_VALUE is ELEM.
3036 Return 0 if ELEM is not in LIST. */
3038 tree
3039 value_member (tree elem, tree list)
3041 while (list)
3043 if (elem == TREE_VALUE (list))
3044 return list;
3045 list = TREE_CHAIN (list);
3047 return NULL_TREE;
3050 /* Return first list element whose TREE_PURPOSE is ELEM.
3051 Return 0 if ELEM is not in LIST. */
3053 tree
3054 purpose_member (const_tree elem, tree list)
3056 while (list)
3058 if (elem == TREE_PURPOSE (list))
3059 return list;
3060 list = TREE_CHAIN (list);
3062 return NULL_TREE;
3065 /* Return true if ELEM is in V. */
3067 bool
3068 vec_member (const_tree elem, vec<tree, va_gc> *v)
3070 unsigned ix;
3071 tree t;
3072 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
3073 if (elem == t)
3074 return true;
3075 return false;
3078 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3079 NULL_TREE. */
3081 tree
3082 chain_index (int idx, tree chain)
3084 for (; chain && idx > 0; --idx)
3085 chain = TREE_CHAIN (chain);
3086 return chain;
3089 /* Return nonzero if ELEM is part of the chain CHAIN. */
3091 bool
3092 chain_member (const_tree elem, const_tree chain)
3094 while (chain)
3096 if (elem == chain)
3097 return true;
3098 chain = DECL_CHAIN (chain);
3101 return false;
3104 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3105 We expect a null pointer to mark the end of the chain.
3106 This is the Lisp primitive `length'. */
3109 list_length (const_tree t)
3111 const_tree p = t;
3112 #ifdef ENABLE_TREE_CHECKING
3113 const_tree q = t;
3114 #endif
3115 int len = 0;
3117 while (p)
3119 p = TREE_CHAIN (p);
3120 #ifdef ENABLE_TREE_CHECKING
3121 if (len % 2)
3122 q = TREE_CHAIN (q);
3123 gcc_assert (p != q);
3124 #endif
3125 len++;
3128 return len;
3131 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3132 UNION_TYPE TYPE, or NULL_TREE if none. */
3134 tree
3135 first_field (const_tree type)
3137 tree t = TYPE_FIELDS (type);
3138 while (t && TREE_CODE (t) != FIELD_DECL)
3139 t = TREE_CHAIN (t);
3140 return t;
3143 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3144 UNION_TYPE TYPE, or NULL_TREE if none. */
3146 tree
3147 last_field (const_tree type)
3149 tree last = NULL_TREE;
3151 for (tree fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
3153 if (TREE_CODE (fld) != FIELD_DECL)
3154 continue;
3156 last = fld;
3159 return last;
3162 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3163 by modifying the last node in chain 1 to point to chain 2.
3164 This is the Lisp primitive `nconc'. */
3166 tree
3167 chainon (tree op1, tree op2)
3169 tree t1;
3171 if (!op1)
3172 return op2;
3173 if (!op2)
3174 return op1;
3176 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3177 continue;
3178 TREE_CHAIN (t1) = op2;
3180 #ifdef ENABLE_TREE_CHECKING
3182 tree t2;
3183 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3184 gcc_assert (t2 != t1);
3186 #endif
3188 return op1;
3191 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3193 tree
3194 tree_last (tree chain)
3196 tree next;
3197 if (chain)
3198 while ((next = TREE_CHAIN (chain)))
3199 chain = next;
3200 return chain;
3203 /* Reverse the order of elements in the chain T,
3204 and return the new head of the chain (old last element). */
3206 tree
3207 nreverse (tree t)
3209 tree prev = 0, decl, next;
3210 for (decl = t; decl; decl = next)
3212 /* We shouldn't be using this function to reverse BLOCK chains; we
3213 have blocks_nreverse for that. */
3214 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3215 next = TREE_CHAIN (decl);
3216 TREE_CHAIN (decl) = prev;
3217 prev = decl;
3219 return prev;
3222 /* Return a newly created TREE_LIST node whose
3223 purpose and value fields are PARM and VALUE. */
3225 tree
3226 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3228 tree t = make_node (TREE_LIST PASS_MEM_STAT);
3229 TREE_PURPOSE (t) = parm;
3230 TREE_VALUE (t) = value;
3231 return t;
3234 /* Build a chain of TREE_LIST nodes from a vector. */
3236 tree
3237 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3239 tree ret = NULL_TREE;
3240 tree *pp = &ret;
3241 unsigned int i;
3242 tree t;
3243 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3245 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3246 pp = &TREE_CHAIN (*pp);
3248 return ret;
3251 /* Return a newly created TREE_LIST node whose
3252 purpose and value fields are PURPOSE and VALUE
3253 and whose TREE_CHAIN is CHAIN. */
3255 tree
3256 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3258 tree node;
3260 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3261 memset (node, 0, sizeof (struct tree_common));
3263 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3265 TREE_SET_CODE (node, TREE_LIST);
3266 TREE_CHAIN (node) = chain;
3267 TREE_PURPOSE (node) = purpose;
3268 TREE_VALUE (node) = value;
3269 return node;
3272 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3273 trees. */
3275 vec<tree, va_gc> *
3276 ctor_to_vec (tree ctor)
3278 vec<tree, va_gc> *vec;
3279 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3280 unsigned int ix;
3281 tree val;
3283 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3284 vec->quick_push (val);
3286 return vec;
3289 /* Return the size nominally occupied by an object of type TYPE
3290 when it resides in memory. The value is measured in units of bytes,
3291 and its data type is that normally used for type sizes
3292 (which is the first type created by make_signed_type or
3293 make_unsigned_type). */
3295 tree
3296 size_in_bytes_loc (location_t loc, const_tree type)
3298 tree t;
3300 if (type == error_mark_node)
3301 return integer_zero_node;
3303 type = TYPE_MAIN_VARIANT (type);
3304 t = TYPE_SIZE_UNIT (type);
3306 if (t == 0)
3308 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3309 return size_zero_node;
3312 return t;
3315 /* Return the size of TYPE (in bytes) as a wide integer
3316 or return -1 if the size can vary or is larger than an integer. */
3318 HOST_WIDE_INT
3319 int_size_in_bytes (const_tree type)
3321 tree t;
3323 if (type == error_mark_node)
3324 return 0;
3326 type = TYPE_MAIN_VARIANT (type);
3327 t = TYPE_SIZE_UNIT (type);
3329 if (t && tree_fits_uhwi_p (t))
3330 return TREE_INT_CST_LOW (t);
3331 else
3332 return -1;
3335 /* Return the maximum size of TYPE (in bytes) as a wide integer
3336 or return -1 if the size can vary or is larger than an integer. */
3338 HOST_WIDE_INT
3339 max_int_size_in_bytes (const_tree type)
3341 HOST_WIDE_INT size = -1;
3342 tree size_tree;
3344 /* If this is an array type, check for a possible MAX_SIZE attached. */
3346 if (TREE_CODE (type) == ARRAY_TYPE)
3348 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3350 if (size_tree && tree_fits_uhwi_p (size_tree))
3351 size = tree_to_uhwi (size_tree);
3354 /* If we still haven't been able to get a size, see if the language
3355 can compute a maximum size. */
3357 if (size == -1)
3359 size_tree = lang_hooks.types.max_size (type);
3361 if (size_tree && tree_fits_uhwi_p (size_tree))
3362 size = tree_to_uhwi (size_tree);
3365 return size;
3368 /* Return the bit position of FIELD, in bits from the start of the record.
3369 This is a tree of type bitsizetype. */
3371 tree
3372 bit_position (const_tree field)
3374 return bit_from_pos (DECL_FIELD_OFFSET (field),
3375 DECL_FIELD_BIT_OFFSET (field));
3378 /* Return the byte position of FIELD, in bytes from the start of the record.
3379 This is a tree of type sizetype. */
3381 tree
3382 byte_position (const_tree field)
3384 return byte_from_pos (DECL_FIELD_OFFSET (field),
3385 DECL_FIELD_BIT_OFFSET (field));
3388 /* Likewise, but return as an integer. It must be representable in
3389 that way (since it could be a signed value, we don't have the
3390 option of returning -1 like int_size_in_byte can. */
3392 HOST_WIDE_INT
3393 int_byte_position (const_tree field)
3395 return tree_to_shwi (byte_position (field));
3398 /* Return, as a tree node, the number of elements for TYPE (which is an
3399 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3401 tree
3402 array_type_nelts (const_tree type)
3404 tree index_type, min, max;
3406 /* If they did it with unspecified bounds, then we should have already
3407 given an error about it before we got here. */
3408 if (! TYPE_DOMAIN (type))
3409 return error_mark_node;
3411 index_type = TYPE_DOMAIN (type);
3412 min = TYPE_MIN_VALUE (index_type);
3413 max = TYPE_MAX_VALUE (index_type);
3415 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3416 if (!max)
3417 return error_mark_node;
3419 return (integer_zerop (min)
3420 ? max
3421 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3424 /* If arg is static -- a reference to an object in static storage -- then
3425 return the object. This is not the same as the C meaning of `static'.
3426 If arg isn't static, return NULL. */
3428 tree
3429 staticp (tree arg)
3431 switch (TREE_CODE (arg))
3433 case FUNCTION_DECL:
3434 /* Nested functions are static, even though taking their address will
3435 involve a trampoline as we unnest the nested function and create
3436 the trampoline on the tree level. */
3437 return arg;
3439 case VAR_DECL:
3440 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3441 && ! DECL_THREAD_LOCAL_P (arg)
3442 && ! DECL_DLLIMPORT_P (arg)
3443 ? arg : NULL);
3445 case CONST_DECL:
3446 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3447 ? arg : NULL);
3449 case CONSTRUCTOR:
3450 return TREE_STATIC (arg) ? arg : NULL;
3452 case LABEL_DECL:
3453 case STRING_CST:
3454 return arg;
3456 case COMPONENT_REF:
3457 /* If the thing being referenced is not a field, then it is
3458 something language specific. */
3459 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3461 /* If we are referencing a bitfield, we can't evaluate an
3462 ADDR_EXPR at compile time and so it isn't a constant. */
3463 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3464 return NULL;
3466 return staticp (TREE_OPERAND (arg, 0));
3468 case BIT_FIELD_REF:
3469 return NULL;
3471 case INDIRECT_REF:
3472 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3474 case ARRAY_REF:
3475 case ARRAY_RANGE_REF:
3476 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3477 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3478 return staticp (TREE_OPERAND (arg, 0));
3479 else
3480 return NULL;
3482 case COMPOUND_LITERAL_EXPR:
3483 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3485 default:
3486 return NULL;
3493 /* Return whether OP is a DECL whose address is function-invariant. */
3495 bool
3496 decl_address_invariant_p (const_tree op)
3498 /* The conditions below are slightly less strict than the one in
3499 staticp. */
3501 switch (TREE_CODE (op))
3503 case PARM_DECL:
3504 case RESULT_DECL:
3505 case LABEL_DECL:
3506 case FUNCTION_DECL:
3507 return true;
3509 case VAR_DECL:
3510 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3511 || DECL_THREAD_LOCAL_P (op)
3512 || DECL_CONTEXT (op) == current_function_decl
3513 || decl_function_context (op) == current_function_decl)
3514 return true;
3515 break;
3517 case CONST_DECL:
3518 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3519 || decl_function_context (op) == current_function_decl)
3520 return true;
3521 break;
3523 default:
3524 break;
3527 return false;
3530 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3532 bool
3533 decl_address_ip_invariant_p (const_tree op)
3535 /* The conditions below are slightly less strict than the one in
3536 staticp. */
3538 switch (TREE_CODE (op))
3540 case LABEL_DECL:
3541 case FUNCTION_DECL:
3542 case STRING_CST:
3543 return true;
3545 case VAR_DECL:
3546 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3547 && !DECL_DLLIMPORT_P (op))
3548 || DECL_THREAD_LOCAL_P (op))
3549 return true;
3550 break;
3552 case CONST_DECL:
3553 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3554 return true;
3555 break;
3557 default:
3558 break;
3561 return false;
3565 /* Return true if T is function-invariant (internal function, does
3566 not handle arithmetic; that's handled in skip_simple_arithmetic and
3567 tree_invariant_p). */
3569 static bool
3570 tree_invariant_p_1 (tree t)
3572 tree op;
3574 if (TREE_CONSTANT (t)
3575 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3576 return true;
3578 switch (TREE_CODE (t))
3580 case SAVE_EXPR:
3581 return true;
3583 case ADDR_EXPR:
3584 op = TREE_OPERAND (t, 0);
3585 while (handled_component_p (op))
3587 switch (TREE_CODE (op))
3589 case ARRAY_REF:
3590 case ARRAY_RANGE_REF:
3591 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3592 || TREE_OPERAND (op, 2) != NULL_TREE
3593 || TREE_OPERAND (op, 3) != NULL_TREE)
3594 return false;
3595 break;
3597 case COMPONENT_REF:
3598 if (TREE_OPERAND (op, 2) != NULL_TREE)
3599 return false;
3600 break;
3602 default:;
3604 op = TREE_OPERAND (op, 0);
3607 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3609 default:
3610 break;
3613 return false;
3616 /* Return true if T is function-invariant. */
3618 bool
3619 tree_invariant_p (tree t)
3621 tree inner = skip_simple_arithmetic (t);
3622 return tree_invariant_p_1 (inner);
3625 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3626 Do this to any expression which may be used in more than one place,
3627 but must be evaluated only once.
3629 Normally, expand_expr would reevaluate the expression each time.
3630 Calling save_expr produces something that is evaluated and recorded
3631 the first time expand_expr is called on it. Subsequent calls to
3632 expand_expr just reuse the recorded value.
3634 The call to expand_expr that generates code that actually computes
3635 the value is the first call *at compile time*. Subsequent calls
3636 *at compile time* generate code to use the saved value.
3637 This produces correct result provided that *at run time* control
3638 always flows through the insns made by the first expand_expr
3639 before reaching the other places where the save_expr was evaluated.
3640 You, the caller of save_expr, must make sure this is so.
3642 Constants, and certain read-only nodes, are returned with no
3643 SAVE_EXPR because that is safe. Expressions containing placeholders
3644 are not touched; see tree.def for an explanation of what these
3645 are used for. */
3647 tree
3648 save_expr (tree expr)
3650 tree inner;
3652 /* If the tree evaluates to a constant, then we don't want to hide that
3653 fact (i.e. this allows further folding, and direct checks for constants).
3654 However, a read-only object that has side effects cannot be bypassed.
3655 Since it is no problem to reevaluate literals, we just return the
3656 literal node. */
3657 inner = skip_simple_arithmetic (expr);
3658 if (TREE_CODE (inner) == ERROR_MARK)
3659 return inner;
3661 if (tree_invariant_p_1 (inner))
3662 return expr;
3664 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3665 it means that the size or offset of some field of an object depends on
3666 the value within another field.
3668 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3669 and some variable since it would then need to be both evaluated once and
3670 evaluated more than once. Front-ends must assure this case cannot
3671 happen by surrounding any such subexpressions in their own SAVE_EXPR
3672 and forcing evaluation at the proper time. */
3673 if (contains_placeholder_p (inner))
3674 return expr;
3676 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3678 /* This expression might be placed ahead of a jump to ensure that the
3679 value was computed on both sides of the jump. So make sure it isn't
3680 eliminated as dead. */
3681 TREE_SIDE_EFFECTS (expr) = 1;
3682 return expr;
3685 /* Look inside EXPR into any simple arithmetic operations. Return the
3686 outermost non-arithmetic or non-invariant node. */
3688 tree
3689 skip_simple_arithmetic (tree expr)
3691 /* We don't care about whether this can be used as an lvalue in this
3692 context. */
3693 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3694 expr = TREE_OPERAND (expr, 0);
3696 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3697 a constant, it will be more efficient to not make another SAVE_EXPR since
3698 it will allow better simplification and GCSE will be able to merge the
3699 computations if they actually occur. */
3700 while (true)
3702 if (UNARY_CLASS_P (expr))
3703 expr = TREE_OPERAND (expr, 0);
3704 else if (BINARY_CLASS_P (expr))
3706 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3707 expr = TREE_OPERAND (expr, 0);
3708 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3709 expr = TREE_OPERAND (expr, 1);
3710 else
3711 break;
3713 else
3714 break;
3717 return expr;
3720 /* Look inside EXPR into simple arithmetic operations involving constants.
3721 Return the outermost non-arithmetic or non-constant node. */
3723 tree
3724 skip_simple_constant_arithmetic (tree expr)
3726 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3727 expr = TREE_OPERAND (expr, 0);
3729 while (true)
3731 if (UNARY_CLASS_P (expr))
3732 expr = TREE_OPERAND (expr, 0);
3733 else if (BINARY_CLASS_P (expr))
3735 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3736 expr = TREE_OPERAND (expr, 0);
3737 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3738 expr = TREE_OPERAND (expr, 1);
3739 else
3740 break;
3742 else
3743 break;
3746 return expr;
3749 /* Return which tree structure is used by T. */
3751 enum tree_node_structure_enum
3752 tree_node_structure (const_tree t)
3754 const enum tree_code code = TREE_CODE (t);
3755 return tree_node_structure_for_code (code);
3758 /* Set various status flags when building a CALL_EXPR object T. */
3760 static void
3761 process_call_operands (tree t)
3763 bool side_effects = TREE_SIDE_EFFECTS (t);
3764 bool read_only = false;
3765 int i = call_expr_flags (t);
3767 /* Calls have side-effects, except those to const or pure functions. */
3768 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3769 side_effects = true;
3770 /* Propagate TREE_READONLY of arguments for const functions. */
3771 if (i & ECF_CONST)
3772 read_only = true;
3774 if (!side_effects || read_only)
3775 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3777 tree op = TREE_OPERAND (t, i);
3778 if (op && TREE_SIDE_EFFECTS (op))
3779 side_effects = true;
3780 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3781 read_only = false;
3784 TREE_SIDE_EFFECTS (t) = side_effects;
3785 TREE_READONLY (t) = read_only;
3788 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3789 size or offset that depends on a field within a record. */
3791 bool
3792 contains_placeholder_p (const_tree exp)
3794 enum tree_code code;
3796 if (!exp)
3797 return 0;
3799 code = TREE_CODE (exp);
3800 if (code == PLACEHOLDER_EXPR)
3801 return 1;
3803 switch (TREE_CODE_CLASS (code))
3805 case tcc_reference:
3806 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3807 position computations since they will be converted into a
3808 WITH_RECORD_EXPR involving the reference, which will assume
3809 here will be valid. */
3810 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3812 case tcc_exceptional:
3813 if (code == TREE_LIST)
3814 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3815 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3816 break;
3818 case tcc_unary:
3819 case tcc_binary:
3820 case tcc_comparison:
3821 case tcc_expression:
3822 switch (code)
3824 case COMPOUND_EXPR:
3825 /* Ignoring the first operand isn't quite right, but works best. */
3826 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3828 case COND_EXPR:
3829 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3830 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3831 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3833 case SAVE_EXPR:
3834 /* The save_expr function never wraps anything containing
3835 a PLACEHOLDER_EXPR. */
3836 return 0;
3838 default:
3839 break;
3842 switch (TREE_CODE_LENGTH (code))
3844 case 1:
3845 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3846 case 2:
3847 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3848 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3849 default:
3850 return 0;
3853 case tcc_vl_exp:
3854 switch (code)
3856 case CALL_EXPR:
3858 const_tree arg;
3859 const_call_expr_arg_iterator iter;
3860 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3861 if (CONTAINS_PLACEHOLDER_P (arg))
3862 return 1;
3863 return 0;
3865 default:
3866 return 0;
3869 default:
3870 return 0;
3872 return 0;
3875 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3876 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3877 field positions. */
3879 static bool
3880 type_contains_placeholder_1 (const_tree type)
3882 /* If the size contains a placeholder or the parent type (component type in
3883 the case of arrays) type involves a placeholder, this type does. */
3884 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3885 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3886 || (!POINTER_TYPE_P (type)
3887 && TREE_TYPE (type)
3888 && type_contains_placeholder_p (TREE_TYPE (type))))
3889 return true;
3891 /* Now do type-specific checks. Note that the last part of the check above
3892 greatly limits what we have to do below. */
3893 switch (TREE_CODE (type))
3895 case VOID_TYPE:
3896 case COMPLEX_TYPE:
3897 case ENUMERAL_TYPE:
3898 case BOOLEAN_TYPE:
3899 case POINTER_TYPE:
3900 case OFFSET_TYPE:
3901 case REFERENCE_TYPE:
3902 case METHOD_TYPE:
3903 case FUNCTION_TYPE:
3904 case VECTOR_TYPE:
3905 case NULLPTR_TYPE:
3906 return false;
3908 case INTEGER_TYPE:
3909 case REAL_TYPE:
3910 case FIXED_POINT_TYPE:
3911 /* Here we just check the bounds. */
3912 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3913 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3915 case ARRAY_TYPE:
3916 /* We have already checked the component type above, so just check
3917 the domain type. Flexible array members have a null domain. */
3918 return TYPE_DOMAIN (type) ?
3919 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
3921 case RECORD_TYPE:
3922 case UNION_TYPE:
3923 case QUAL_UNION_TYPE:
3925 tree field;
3927 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3928 if (TREE_CODE (field) == FIELD_DECL
3929 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3930 || (TREE_CODE (type) == QUAL_UNION_TYPE
3931 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3932 || type_contains_placeholder_p (TREE_TYPE (field))))
3933 return true;
3935 return false;
3938 default:
3939 gcc_unreachable ();
3943 /* Wrapper around above function used to cache its result. */
3945 bool
3946 type_contains_placeholder_p (tree type)
3948 bool result;
3950 /* If the contains_placeholder_bits field has been initialized,
3951 then we know the answer. */
3952 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3953 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3955 /* Indicate that we've seen this type node, and the answer is false.
3956 This is what we want to return if we run into recursion via fields. */
3957 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3959 /* Compute the real value. */
3960 result = type_contains_placeholder_1 (type);
3962 /* Store the real value. */
3963 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3965 return result;
3968 /* Push tree EXP onto vector QUEUE if it is not already present. */
3970 static void
3971 push_without_duplicates (tree exp, vec<tree> *queue)
3973 unsigned int i;
3974 tree iter;
3976 FOR_EACH_VEC_ELT (*queue, i, iter)
3977 if (simple_cst_equal (iter, exp) == 1)
3978 break;
3980 if (!iter)
3981 queue->safe_push (exp);
3984 /* Given a tree EXP, find all occurrences of references to fields
3985 in a PLACEHOLDER_EXPR and place them in vector REFS without
3986 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3987 we assume here that EXP contains only arithmetic expressions
3988 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3989 argument list. */
3991 void
3992 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3994 enum tree_code code = TREE_CODE (exp);
3995 tree inner;
3996 int i;
3998 /* We handle TREE_LIST and COMPONENT_REF separately. */
3999 if (code == TREE_LIST)
4001 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
4002 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
4004 else if (code == COMPONENT_REF)
4006 for (inner = TREE_OPERAND (exp, 0);
4007 REFERENCE_CLASS_P (inner);
4008 inner = TREE_OPERAND (inner, 0))
4011 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
4012 push_without_duplicates (exp, refs);
4013 else
4014 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
4016 else
4017 switch (TREE_CODE_CLASS (code))
4019 case tcc_constant:
4020 break;
4022 case tcc_declaration:
4023 /* Variables allocated to static storage can stay. */
4024 if (!TREE_STATIC (exp))
4025 push_without_duplicates (exp, refs);
4026 break;
4028 case tcc_expression:
4029 /* This is the pattern built in ada/make_aligning_type. */
4030 if (code == ADDR_EXPR
4031 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
4033 push_without_duplicates (exp, refs);
4034 break;
4037 /* Fall through. */
4039 case tcc_exceptional:
4040 case tcc_unary:
4041 case tcc_binary:
4042 case tcc_comparison:
4043 case tcc_reference:
4044 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
4045 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4046 break;
4048 case tcc_vl_exp:
4049 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4050 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4051 break;
4053 default:
4054 gcc_unreachable ();
4058 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4059 return a tree with all occurrences of references to F in a
4060 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4061 CONST_DECLs. Note that we assume here that EXP contains only
4062 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4063 occurring only in their argument list. */
4065 tree
4066 substitute_in_expr (tree exp, tree f, tree r)
4068 enum tree_code code = TREE_CODE (exp);
4069 tree op0, op1, op2, op3;
4070 tree new_tree;
4072 /* We handle TREE_LIST and COMPONENT_REF separately. */
4073 if (code == TREE_LIST)
4075 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
4076 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
4077 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4078 return exp;
4080 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4082 else if (code == COMPONENT_REF)
4084 tree inner;
4086 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4087 and it is the right field, replace it with R. */
4088 for (inner = TREE_OPERAND (exp, 0);
4089 REFERENCE_CLASS_P (inner);
4090 inner = TREE_OPERAND (inner, 0))
4093 /* The field. */
4094 op1 = TREE_OPERAND (exp, 1);
4096 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
4097 return r;
4099 /* If this expression hasn't been completed let, leave it alone. */
4100 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
4101 return exp;
4103 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4104 if (op0 == TREE_OPERAND (exp, 0))
4105 return exp;
4107 new_tree
4108 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4110 else
4111 switch (TREE_CODE_CLASS (code))
4113 case tcc_constant:
4114 return exp;
4116 case tcc_declaration:
4117 if (exp == f)
4118 return r;
4119 else
4120 return exp;
4122 case tcc_expression:
4123 if (exp == f)
4124 return r;
4126 /* Fall through. */
4128 case tcc_exceptional:
4129 case tcc_unary:
4130 case tcc_binary:
4131 case tcc_comparison:
4132 case tcc_reference:
4133 switch (TREE_CODE_LENGTH (code))
4135 case 0:
4136 return exp;
4138 case 1:
4139 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4140 if (op0 == TREE_OPERAND (exp, 0))
4141 return exp;
4143 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4144 break;
4146 case 2:
4147 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4148 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4150 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4151 return exp;
4153 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4154 break;
4156 case 3:
4157 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4158 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4159 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4161 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4162 && op2 == TREE_OPERAND (exp, 2))
4163 return exp;
4165 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4166 break;
4168 case 4:
4169 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4170 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4171 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4172 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4174 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4175 && op2 == TREE_OPERAND (exp, 2)
4176 && op3 == TREE_OPERAND (exp, 3))
4177 return exp;
4179 new_tree
4180 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4181 break;
4183 default:
4184 gcc_unreachable ();
4186 break;
4188 case tcc_vl_exp:
4190 int i;
4192 new_tree = NULL_TREE;
4194 /* If we are trying to replace F with a constant or with another
4195 instance of one of the arguments of the call, inline back
4196 functions which do nothing else than computing a value from
4197 the arguments they are passed. This makes it possible to
4198 fold partially or entirely the replacement expression. */
4199 if (code == CALL_EXPR)
4201 bool maybe_inline = false;
4202 if (CONSTANT_CLASS_P (r))
4203 maybe_inline = true;
4204 else
4205 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4206 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4208 maybe_inline = true;
4209 break;
4211 if (maybe_inline)
4213 tree t = maybe_inline_call_in_expr (exp);
4214 if (t)
4215 return SUBSTITUTE_IN_EXPR (t, f, r);
4219 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4221 tree op = TREE_OPERAND (exp, i);
4222 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4223 if (new_op != op)
4225 if (!new_tree)
4226 new_tree = copy_node (exp);
4227 TREE_OPERAND (new_tree, i) = new_op;
4231 if (new_tree)
4233 new_tree = fold (new_tree);
4234 if (TREE_CODE (new_tree) == CALL_EXPR)
4235 process_call_operands (new_tree);
4237 else
4238 return exp;
4240 break;
4242 default:
4243 gcc_unreachable ();
4246 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4248 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4249 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4251 return new_tree;
4254 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4255 for it within OBJ, a tree that is an object or a chain of references. */
4257 tree
4258 substitute_placeholder_in_expr (tree exp, tree obj)
4260 enum tree_code code = TREE_CODE (exp);
4261 tree op0, op1, op2, op3;
4262 tree new_tree;
4264 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4265 in the chain of OBJ. */
4266 if (code == PLACEHOLDER_EXPR)
4268 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4269 tree elt;
4271 for (elt = obj; elt != 0;
4272 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4273 || TREE_CODE (elt) == COND_EXPR)
4274 ? TREE_OPERAND (elt, 1)
4275 : (REFERENCE_CLASS_P (elt)
4276 || UNARY_CLASS_P (elt)
4277 || BINARY_CLASS_P (elt)
4278 || VL_EXP_CLASS_P (elt)
4279 || EXPRESSION_CLASS_P (elt))
4280 ? TREE_OPERAND (elt, 0) : 0))
4281 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4282 return elt;
4284 for (elt = obj; elt != 0;
4285 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4286 || TREE_CODE (elt) == COND_EXPR)
4287 ? TREE_OPERAND (elt, 1)
4288 : (REFERENCE_CLASS_P (elt)
4289 || UNARY_CLASS_P (elt)
4290 || BINARY_CLASS_P (elt)
4291 || VL_EXP_CLASS_P (elt)
4292 || EXPRESSION_CLASS_P (elt))
4293 ? TREE_OPERAND (elt, 0) : 0))
4294 if (POINTER_TYPE_P (TREE_TYPE (elt))
4295 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4296 == need_type))
4297 return fold_build1 (INDIRECT_REF, need_type, elt);
4299 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4300 survives until RTL generation, there will be an error. */
4301 return exp;
4304 /* TREE_LIST is special because we need to look at TREE_VALUE
4305 and TREE_CHAIN, not TREE_OPERANDS. */
4306 else if (code == TREE_LIST)
4308 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4309 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4310 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4311 return exp;
4313 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4315 else
4316 switch (TREE_CODE_CLASS (code))
4318 case tcc_constant:
4319 case tcc_declaration:
4320 return exp;
4322 case tcc_exceptional:
4323 case tcc_unary:
4324 case tcc_binary:
4325 case tcc_comparison:
4326 case tcc_expression:
4327 case tcc_reference:
4328 case tcc_statement:
4329 switch (TREE_CODE_LENGTH (code))
4331 case 0:
4332 return exp;
4334 case 1:
4335 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4336 if (op0 == TREE_OPERAND (exp, 0))
4337 return exp;
4339 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4340 break;
4342 case 2:
4343 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4344 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4346 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4347 return exp;
4349 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4350 break;
4352 case 3:
4353 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4354 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4355 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4357 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4358 && op2 == TREE_OPERAND (exp, 2))
4359 return exp;
4361 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4362 break;
4364 case 4:
4365 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4366 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4367 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4368 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4370 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4371 && op2 == TREE_OPERAND (exp, 2)
4372 && op3 == TREE_OPERAND (exp, 3))
4373 return exp;
4375 new_tree
4376 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4377 break;
4379 default:
4380 gcc_unreachable ();
4382 break;
4384 case tcc_vl_exp:
4386 int i;
4388 new_tree = NULL_TREE;
4390 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4392 tree op = TREE_OPERAND (exp, i);
4393 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4394 if (new_op != op)
4396 if (!new_tree)
4397 new_tree = copy_node (exp);
4398 TREE_OPERAND (new_tree, i) = new_op;
4402 if (new_tree)
4404 new_tree = fold (new_tree);
4405 if (TREE_CODE (new_tree) == CALL_EXPR)
4406 process_call_operands (new_tree);
4408 else
4409 return exp;
4411 break;
4413 default:
4414 gcc_unreachable ();
4417 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4419 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4420 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4422 return new_tree;
4426 /* Subroutine of stabilize_reference; this is called for subtrees of
4427 references. Any expression with side-effects must be put in a SAVE_EXPR
4428 to ensure that it is only evaluated once.
4430 We don't put SAVE_EXPR nodes around everything, because assigning very
4431 simple expressions to temporaries causes us to miss good opportunities
4432 for optimizations. Among other things, the opportunity to fold in the
4433 addition of a constant into an addressing mode often gets lost, e.g.
4434 "y[i+1] += x;". In general, we take the approach that we should not make
4435 an assignment unless we are forced into it - i.e., that any non-side effect
4436 operator should be allowed, and that cse should take care of coalescing
4437 multiple utterances of the same expression should that prove fruitful. */
4439 static tree
4440 stabilize_reference_1 (tree e)
4442 tree result;
4443 enum tree_code code = TREE_CODE (e);
4445 /* We cannot ignore const expressions because it might be a reference
4446 to a const array but whose index contains side-effects. But we can
4447 ignore things that are actual constant or that already have been
4448 handled by this function. */
4450 if (tree_invariant_p (e))
4451 return e;
4453 switch (TREE_CODE_CLASS (code))
4455 case tcc_exceptional:
4456 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4457 have side-effects. */
4458 if (code == STATEMENT_LIST)
4459 return save_expr (e);
4460 /* FALLTHRU */
4461 case tcc_type:
4462 case tcc_declaration:
4463 case tcc_comparison:
4464 case tcc_statement:
4465 case tcc_expression:
4466 case tcc_reference:
4467 case tcc_vl_exp:
4468 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4469 so that it will only be evaluated once. */
4470 /* The reference (r) and comparison (<) classes could be handled as
4471 below, but it is generally faster to only evaluate them once. */
4472 if (TREE_SIDE_EFFECTS (e))
4473 return save_expr (e);
4474 return e;
4476 case tcc_constant:
4477 /* Constants need no processing. In fact, we should never reach
4478 here. */
4479 return e;
4481 case tcc_binary:
4482 /* Division is slow and tends to be compiled with jumps,
4483 especially the division by powers of 2 that is often
4484 found inside of an array reference. So do it just once. */
4485 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4486 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4487 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4488 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4489 return save_expr (e);
4490 /* Recursively stabilize each operand. */
4491 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4492 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4493 break;
4495 case tcc_unary:
4496 /* Recursively stabilize each operand. */
4497 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4498 break;
4500 default:
4501 gcc_unreachable ();
4504 TREE_TYPE (result) = TREE_TYPE (e);
4505 TREE_READONLY (result) = TREE_READONLY (e);
4506 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4507 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4509 return result;
4512 /* Stabilize a reference so that we can use it any number of times
4513 without causing its operands to be evaluated more than once.
4514 Returns the stabilized reference. This works by means of save_expr,
4515 so see the caveats in the comments about save_expr.
4517 Also allows conversion expressions whose operands are references.
4518 Any other kind of expression is returned unchanged. */
4520 tree
4521 stabilize_reference (tree ref)
4523 tree result;
4524 enum tree_code code = TREE_CODE (ref);
4526 switch (code)
4528 case VAR_DECL:
4529 case PARM_DECL:
4530 case RESULT_DECL:
4531 /* No action is needed in this case. */
4532 return ref;
4534 CASE_CONVERT:
4535 case FLOAT_EXPR:
4536 case FIX_TRUNC_EXPR:
4537 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4538 break;
4540 case INDIRECT_REF:
4541 result = build_nt (INDIRECT_REF,
4542 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4543 break;
4545 case COMPONENT_REF:
4546 result = build_nt (COMPONENT_REF,
4547 stabilize_reference (TREE_OPERAND (ref, 0)),
4548 TREE_OPERAND (ref, 1), NULL_TREE);
4549 break;
4551 case BIT_FIELD_REF:
4552 result = build_nt (BIT_FIELD_REF,
4553 stabilize_reference (TREE_OPERAND (ref, 0)),
4554 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4555 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4556 break;
4558 case ARRAY_REF:
4559 result = build_nt (ARRAY_REF,
4560 stabilize_reference (TREE_OPERAND (ref, 0)),
4561 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4562 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4563 break;
4565 case ARRAY_RANGE_REF:
4566 result = build_nt (ARRAY_RANGE_REF,
4567 stabilize_reference (TREE_OPERAND (ref, 0)),
4568 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4569 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4570 break;
4572 case COMPOUND_EXPR:
4573 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4574 it wouldn't be ignored. This matters when dealing with
4575 volatiles. */
4576 return stabilize_reference_1 (ref);
4578 /* If arg isn't a kind of lvalue we recognize, make no change.
4579 Caller should recognize the error for an invalid lvalue. */
4580 default:
4581 return ref;
4583 case ERROR_MARK:
4584 return error_mark_node;
4587 TREE_TYPE (result) = TREE_TYPE (ref);
4588 TREE_READONLY (result) = TREE_READONLY (ref);
4589 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4590 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4592 return result;
4595 /* Low-level constructors for expressions. */
4597 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4598 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4600 void
4601 recompute_tree_invariant_for_addr_expr (tree t)
4603 tree node;
4604 bool tc = true, se = false;
4606 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4608 /* We started out assuming this address is both invariant and constant, but
4609 does not have side effects. Now go down any handled components and see if
4610 any of them involve offsets that are either non-constant or non-invariant.
4611 Also check for side-effects.
4613 ??? Note that this code makes no attempt to deal with the case where
4614 taking the address of something causes a copy due to misalignment. */
4616 #define UPDATE_FLAGS(NODE) \
4617 do { tree _node = (NODE); \
4618 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4619 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4621 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4622 node = TREE_OPERAND (node, 0))
4624 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4625 array reference (probably made temporarily by the G++ front end),
4626 so ignore all the operands. */
4627 if ((TREE_CODE (node) == ARRAY_REF
4628 || TREE_CODE (node) == ARRAY_RANGE_REF)
4629 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4631 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4632 if (TREE_OPERAND (node, 2))
4633 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4634 if (TREE_OPERAND (node, 3))
4635 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4637 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4638 FIELD_DECL, apparently. The G++ front end can put something else
4639 there, at least temporarily. */
4640 else if (TREE_CODE (node) == COMPONENT_REF
4641 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4643 if (TREE_OPERAND (node, 2))
4644 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4648 node = lang_hooks.expr_to_decl (node, &tc, &se);
4650 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4651 the address, since &(*a)->b is a form of addition. If it's a constant, the
4652 address is constant too. If it's a decl, its address is constant if the
4653 decl is static. Everything else is not constant and, furthermore,
4654 taking the address of a volatile variable is not volatile. */
4655 if (TREE_CODE (node) == INDIRECT_REF
4656 || TREE_CODE (node) == MEM_REF)
4657 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4658 else if (CONSTANT_CLASS_P (node))
4660 else if (DECL_P (node))
4661 tc &= (staticp (node) != NULL_TREE);
4662 else
4664 tc = false;
4665 se |= TREE_SIDE_EFFECTS (node);
4669 TREE_CONSTANT (t) = tc;
4670 TREE_SIDE_EFFECTS (t) = se;
4671 #undef UPDATE_FLAGS
4674 /* Build an expression of code CODE, data type TYPE, and operands as
4675 specified. Expressions and reference nodes can be created this way.
4676 Constants, decls, types and misc nodes cannot be.
4678 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4679 enough for all extant tree codes. */
4681 tree
4682 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4684 tree t;
4686 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4688 t = make_node (code PASS_MEM_STAT);
4689 TREE_TYPE (t) = tt;
4691 return t;
4694 tree
4695 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4697 int length = sizeof (struct tree_exp);
4698 tree t;
4700 record_node_allocation_statistics (code, length);
4702 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4704 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4706 memset (t, 0, sizeof (struct tree_common));
4708 TREE_SET_CODE (t, code);
4710 TREE_TYPE (t) = type;
4711 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4712 TREE_OPERAND (t, 0) = node;
4713 if (node && !TYPE_P (node))
4715 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4716 TREE_READONLY (t) = TREE_READONLY (node);
4719 if (TREE_CODE_CLASS (code) == tcc_statement)
4721 if (code != DEBUG_BEGIN_STMT)
4722 TREE_SIDE_EFFECTS (t) = 1;
4724 else switch (code)
4726 case VA_ARG_EXPR:
4727 /* All of these have side-effects, no matter what their
4728 operands are. */
4729 TREE_SIDE_EFFECTS (t) = 1;
4730 TREE_READONLY (t) = 0;
4731 break;
4733 case INDIRECT_REF:
4734 /* Whether a dereference is readonly has nothing to do with whether
4735 its operand is readonly. */
4736 TREE_READONLY (t) = 0;
4737 break;
4739 case ADDR_EXPR:
4740 if (node)
4741 recompute_tree_invariant_for_addr_expr (t);
4742 break;
4744 default:
4745 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4746 && node && !TYPE_P (node)
4747 && TREE_CONSTANT (node))
4748 TREE_CONSTANT (t) = 1;
4749 if (TREE_CODE_CLASS (code) == tcc_reference
4750 && node && TREE_THIS_VOLATILE (node))
4751 TREE_THIS_VOLATILE (t) = 1;
4752 break;
4755 return t;
4758 #define PROCESS_ARG(N) \
4759 do { \
4760 TREE_OPERAND (t, N) = arg##N; \
4761 if (arg##N &&!TYPE_P (arg##N)) \
4763 if (TREE_SIDE_EFFECTS (arg##N)) \
4764 side_effects = 1; \
4765 if (!TREE_READONLY (arg##N) \
4766 && !CONSTANT_CLASS_P (arg##N)) \
4767 (void) (read_only = 0); \
4768 if (!TREE_CONSTANT (arg##N)) \
4769 (void) (constant = 0); \
4771 } while (0)
4773 tree
4774 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4776 bool constant, read_only, side_effects, div_by_zero;
4777 tree t;
4779 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4781 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4782 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4783 /* When sizetype precision doesn't match that of pointers
4784 we need to be able to build explicit extensions or truncations
4785 of the offset argument. */
4786 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4787 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4788 && TREE_CODE (arg1) == INTEGER_CST);
4790 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4791 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4792 && ptrofftype_p (TREE_TYPE (arg1)));
4794 t = make_node (code PASS_MEM_STAT);
4795 TREE_TYPE (t) = tt;
4797 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4798 result based on those same flags for the arguments. But if the
4799 arguments aren't really even `tree' expressions, we shouldn't be trying
4800 to do this. */
4802 /* Expressions without side effects may be constant if their
4803 arguments are as well. */
4804 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4805 || TREE_CODE_CLASS (code) == tcc_binary);
4806 read_only = 1;
4807 side_effects = TREE_SIDE_EFFECTS (t);
4809 switch (code)
4811 case TRUNC_DIV_EXPR:
4812 case CEIL_DIV_EXPR:
4813 case FLOOR_DIV_EXPR:
4814 case ROUND_DIV_EXPR:
4815 case EXACT_DIV_EXPR:
4816 case CEIL_MOD_EXPR:
4817 case FLOOR_MOD_EXPR:
4818 case ROUND_MOD_EXPR:
4819 case TRUNC_MOD_EXPR:
4820 div_by_zero = integer_zerop (arg1);
4821 break;
4822 default:
4823 div_by_zero = false;
4826 PROCESS_ARG (0);
4827 PROCESS_ARG (1);
4829 TREE_SIDE_EFFECTS (t) = side_effects;
4830 if (code == MEM_REF)
4832 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4834 tree o = TREE_OPERAND (arg0, 0);
4835 TREE_READONLY (t) = TREE_READONLY (o);
4836 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4839 else
4841 TREE_READONLY (t) = read_only;
4842 /* Don't mark X / 0 as constant. */
4843 TREE_CONSTANT (t) = constant && !div_by_zero;
4844 TREE_THIS_VOLATILE (t)
4845 = (TREE_CODE_CLASS (code) == tcc_reference
4846 && arg0 && TREE_THIS_VOLATILE (arg0));
4849 return t;
4853 tree
4854 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
4855 tree arg2 MEM_STAT_DECL)
4857 bool constant, read_only, side_effects;
4858 tree t;
4860 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4861 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4863 t = make_node (code PASS_MEM_STAT);
4864 TREE_TYPE (t) = tt;
4866 read_only = 1;
4868 /* As a special exception, if COND_EXPR has NULL branches, we
4869 assume that it is a gimple statement and always consider
4870 it to have side effects. */
4871 if (code == COND_EXPR
4872 && tt == void_type_node
4873 && arg1 == NULL_TREE
4874 && arg2 == NULL_TREE)
4875 side_effects = true;
4876 else
4877 side_effects = TREE_SIDE_EFFECTS (t);
4879 PROCESS_ARG (0);
4880 PROCESS_ARG (1);
4881 PROCESS_ARG (2);
4883 if (code == COND_EXPR)
4884 TREE_READONLY (t) = read_only;
4886 TREE_SIDE_EFFECTS (t) = side_effects;
4887 TREE_THIS_VOLATILE (t)
4888 = (TREE_CODE_CLASS (code) == tcc_reference
4889 && arg0 && TREE_THIS_VOLATILE (arg0));
4891 return t;
4894 tree
4895 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
4896 tree arg2, tree arg3 MEM_STAT_DECL)
4898 bool constant, read_only, side_effects;
4899 tree t;
4901 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4903 t = make_node (code PASS_MEM_STAT);
4904 TREE_TYPE (t) = tt;
4906 side_effects = TREE_SIDE_EFFECTS (t);
4908 PROCESS_ARG (0);
4909 PROCESS_ARG (1);
4910 PROCESS_ARG (2);
4911 PROCESS_ARG (3);
4913 TREE_SIDE_EFFECTS (t) = side_effects;
4914 TREE_THIS_VOLATILE (t)
4915 = (TREE_CODE_CLASS (code) == tcc_reference
4916 && arg0 && TREE_THIS_VOLATILE (arg0));
4918 return t;
4921 tree
4922 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
4923 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4925 bool constant, read_only, side_effects;
4926 tree t;
4928 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4930 t = make_node (code PASS_MEM_STAT);
4931 TREE_TYPE (t) = tt;
4933 side_effects = TREE_SIDE_EFFECTS (t);
4935 PROCESS_ARG (0);
4936 PROCESS_ARG (1);
4937 PROCESS_ARG (2);
4938 PROCESS_ARG (3);
4939 PROCESS_ARG (4);
4941 TREE_SIDE_EFFECTS (t) = side_effects;
4942 if (code == TARGET_MEM_REF)
4944 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4946 tree o = TREE_OPERAND (arg0, 0);
4947 TREE_READONLY (t) = TREE_READONLY (o);
4948 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4951 else
4952 TREE_THIS_VOLATILE (t)
4953 = (TREE_CODE_CLASS (code) == tcc_reference
4954 && arg0 && TREE_THIS_VOLATILE (arg0));
4956 return t;
4959 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4960 on the pointer PTR. */
4962 tree
4963 build_simple_mem_ref_loc (location_t loc, tree ptr)
4965 poly_int64 offset = 0;
4966 tree ptype = TREE_TYPE (ptr);
4967 tree tem;
4968 /* For convenience allow addresses that collapse to a simple base
4969 and offset. */
4970 if (TREE_CODE (ptr) == ADDR_EXPR
4971 && (handled_component_p (TREE_OPERAND (ptr, 0))
4972 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4974 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4975 gcc_assert (ptr);
4976 if (TREE_CODE (ptr) == MEM_REF)
4978 offset += mem_ref_offset (ptr).force_shwi ();
4979 ptr = TREE_OPERAND (ptr, 0);
4981 else
4982 ptr = build_fold_addr_expr (ptr);
4983 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4985 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4986 ptr, build_int_cst (ptype, offset));
4987 SET_EXPR_LOCATION (tem, loc);
4988 return tem;
4991 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4993 poly_offset_int
4994 mem_ref_offset (const_tree t)
4996 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
4997 SIGNED);
5000 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
5001 offsetted by OFFSET units. */
5003 tree
5004 build_invariant_address (tree type, tree base, poly_int64 offset)
5006 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
5007 build_fold_addr_expr (base),
5008 build_int_cst (ptr_type_node, offset));
5009 tree addr = build1 (ADDR_EXPR, type, ref);
5010 recompute_tree_invariant_for_addr_expr (addr);
5011 return addr;
5014 /* Similar except don't specify the TREE_TYPE
5015 and leave the TREE_SIDE_EFFECTS as 0.
5016 It is permissible for arguments to be null,
5017 or even garbage if their values do not matter. */
5019 tree
5020 build_nt (enum tree_code code, ...)
5022 tree t;
5023 int length;
5024 int i;
5025 va_list p;
5027 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5029 va_start (p, code);
5031 t = make_node (code);
5032 length = TREE_CODE_LENGTH (code);
5034 for (i = 0; i < length; i++)
5035 TREE_OPERAND (t, i) = va_arg (p, tree);
5037 va_end (p);
5038 return t;
5041 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5042 tree vec. */
5044 tree
5045 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
5047 tree ret, t;
5048 unsigned int ix;
5050 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
5051 CALL_EXPR_FN (ret) = fn;
5052 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
5053 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
5054 CALL_EXPR_ARG (ret, ix) = t;
5055 return ret;
5058 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5059 and data type TYPE.
5060 We do NOT enter this node in any sort of symbol table.
5062 LOC is the location of the decl.
5064 layout_decl is used to set up the decl's storage layout.
5065 Other slots are initialized to 0 or null pointers. */
5067 tree
5068 build_decl (location_t loc, enum tree_code code, tree name,
5069 tree type MEM_STAT_DECL)
5071 tree t;
5073 t = make_node (code PASS_MEM_STAT);
5074 DECL_SOURCE_LOCATION (t) = loc;
5076 /* if (type == error_mark_node)
5077 type = integer_type_node; */
5078 /* That is not done, deliberately, so that having error_mark_node
5079 as the type can suppress useless errors in the use of this variable. */
5081 DECL_NAME (t) = name;
5082 TREE_TYPE (t) = type;
5084 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
5085 layout_decl (t, 0);
5087 return t;
5090 /* Builds and returns function declaration with NAME and TYPE. */
5092 tree
5093 build_fn_decl (const char *name, tree type)
5095 tree id = get_identifier (name);
5096 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
5098 DECL_EXTERNAL (decl) = 1;
5099 TREE_PUBLIC (decl) = 1;
5100 DECL_ARTIFICIAL (decl) = 1;
5101 TREE_NOTHROW (decl) = 1;
5103 return decl;
5106 vec<tree, va_gc> *all_translation_units;
5108 /* Builds a new translation-unit decl with name NAME, queues it in the
5109 global list of translation-unit decls and returns it. */
5111 tree
5112 build_translation_unit_decl (tree name)
5114 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5115 name, NULL_TREE);
5116 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5117 vec_safe_push (all_translation_units, tu);
5118 return tu;
5122 /* BLOCK nodes are used to represent the structure of binding contours
5123 and declarations, once those contours have been exited and their contents
5124 compiled. This information is used for outputting debugging info. */
5126 tree
5127 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5129 tree block = make_node (BLOCK);
5131 BLOCK_VARS (block) = vars;
5132 BLOCK_SUBBLOCKS (block) = subblocks;
5133 BLOCK_SUPERCONTEXT (block) = supercontext;
5134 BLOCK_CHAIN (block) = chain;
5135 return block;
5139 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5141 LOC is the location to use in tree T. */
5143 void
5144 protected_set_expr_location (tree t, location_t loc)
5146 if (CAN_HAVE_LOCATION_P (t))
5147 SET_EXPR_LOCATION (t, loc);
5148 else if (t && TREE_CODE (t) == STATEMENT_LIST)
5150 t = expr_single (t);
5151 if (t && CAN_HAVE_LOCATION_P (t))
5152 SET_EXPR_LOCATION (t, loc);
5156 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
5157 UNKNOWN_LOCATION. */
5159 void
5160 protected_set_expr_location_if_unset (tree t, location_t loc)
5162 t = expr_single (t);
5163 if (t && !EXPR_HAS_LOCATION (t))
5164 protected_set_expr_location (t, loc);
5167 /* Data used when collecting DECLs and TYPEs for language data removal. */
5169 class free_lang_data_d
5171 public:
5172 free_lang_data_d () : decls (100), types (100) {}
5174 /* Worklist to avoid excessive recursion. */
5175 auto_vec<tree> worklist;
5177 /* Set of traversed objects. Used to avoid duplicate visits. */
5178 hash_set<tree> pset;
5180 /* Array of symbols to process with free_lang_data_in_decl. */
5181 auto_vec<tree> decls;
5183 /* Array of types to process with free_lang_data_in_type. */
5184 auto_vec<tree> types;
5188 /* Add type or decl T to one of the list of tree nodes that need their
5189 language data removed. The lists are held inside FLD. */
5191 static void
5192 add_tree_to_fld_list (tree t, class free_lang_data_d *fld)
5194 if (DECL_P (t))
5195 fld->decls.safe_push (t);
5196 else if (TYPE_P (t))
5197 fld->types.safe_push (t);
5198 else
5199 gcc_unreachable ();
5202 /* Push tree node T into FLD->WORKLIST. */
5204 static inline void
5205 fld_worklist_push (tree t, class free_lang_data_d *fld)
5207 if (t && !is_lang_specific (t) && !fld->pset.contains (t))
5208 fld->worklist.safe_push ((t));
5213 /* Return simplified TYPE_NAME of TYPE. */
5215 static tree
5216 fld_simplified_type_name (tree type)
5218 if (!TYPE_NAME (type) || TREE_CODE (TYPE_NAME (type)) != TYPE_DECL)
5219 return TYPE_NAME (type);
5220 /* Drop TYPE_DECLs in TYPE_NAME in favor of the identifier in the
5221 TYPE_DECL if the type doesn't have linkage.
5222 this must match fld_ */
5223 if (type != TYPE_MAIN_VARIANT (type)
5224 || (!DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (type))
5225 && (TREE_CODE (type) != RECORD_TYPE
5226 || !TYPE_BINFO (type)
5227 || !BINFO_VTABLE (TYPE_BINFO (type)))))
5228 return DECL_NAME (TYPE_NAME (type));
5229 return TYPE_NAME (type);
5232 /* Do same comparsion as check_qualified_type skipping lang part of type
5233 and be more permissive about type names: we only care that names are
5234 same (for diagnostics) and that ODR names are the same.
5235 If INNER_TYPE is non-NULL, be sure that TREE_TYPE match it. */
5237 static bool
5238 fld_type_variant_equal_p (tree t, tree v, tree inner_type)
5240 if (TYPE_QUALS (t) != TYPE_QUALS (v)
5241 /* We want to match incomplete variants with complete types.
5242 In this case we need to ignore alignment. */
5243 || ((!RECORD_OR_UNION_TYPE_P (t) || COMPLETE_TYPE_P (v))
5244 && (TYPE_ALIGN (t) != TYPE_ALIGN (v)
5245 || TYPE_USER_ALIGN (t) != TYPE_USER_ALIGN (v)))
5246 || fld_simplified_type_name (t) != fld_simplified_type_name (v)
5247 || !attribute_list_equal (TYPE_ATTRIBUTES (t),
5248 TYPE_ATTRIBUTES (v))
5249 || (inner_type && TREE_TYPE (v) != inner_type))
5250 return false;
5252 return true;
5255 /* Find variant of FIRST that match T and create new one if necessary.
5256 Set TREE_TYPE to INNER_TYPE if non-NULL. */
5258 static tree
5259 fld_type_variant (tree first, tree t, class free_lang_data_d *fld,
5260 tree inner_type = NULL)
5262 if (first == TYPE_MAIN_VARIANT (t))
5263 return t;
5264 for (tree v = first; v; v = TYPE_NEXT_VARIANT (v))
5265 if (fld_type_variant_equal_p (t, v, inner_type))
5266 return v;
5267 tree v = build_variant_type_copy (first);
5268 TYPE_READONLY (v) = TYPE_READONLY (t);
5269 TYPE_VOLATILE (v) = TYPE_VOLATILE (t);
5270 TYPE_ATOMIC (v) = TYPE_ATOMIC (t);
5271 TYPE_RESTRICT (v) = TYPE_RESTRICT (t);
5272 TYPE_ADDR_SPACE (v) = TYPE_ADDR_SPACE (t);
5273 TYPE_NAME (v) = TYPE_NAME (t);
5274 TYPE_ATTRIBUTES (v) = TYPE_ATTRIBUTES (t);
5275 TYPE_CANONICAL (v) = TYPE_CANONICAL (t);
5276 /* Variants of incomplete types should have alignment
5277 set to BITS_PER_UNIT. Do not copy the actual alignment. */
5278 if (!RECORD_OR_UNION_TYPE_P (v) || COMPLETE_TYPE_P (v))
5280 SET_TYPE_ALIGN (v, TYPE_ALIGN (t));
5281 TYPE_USER_ALIGN (v) = TYPE_USER_ALIGN (t);
5283 if (inner_type)
5284 TREE_TYPE (v) = inner_type;
5285 gcc_checking_assert (fld_type_variant_equal_p (t,v, inner_type));
5286 if (!fld->pset.add (v))
5287 add_tree_to_fld_list (v, fld);
5288 return v;
5291 /* Map complete types to incomplete types. */
5293 static hash_map<tree, tree> *fld_incomplete_types;
5295 /* Map types to simplified types. */
5297 static hash_map<tree, tree> *fld_simplified_types;
5299 /* Produce variant of T whose TREE_TYPE is T2. If it is main variant,
5300 use MAP to prevent duplicates. */
5302 static tree
5303 fld_process_array_type (tree t, tree t2, hash_map<tree, tree> *map,
5304 class free_lang_data_d *fld)
5306 if (TREE_TYPE (t) == t2)
5307 return t;
5309 if (TYPE_MAIN_VARIANT (t) != t)
5311 return fld_type_variant
5312 (fld_process_array_type (TYPE_MAIN_VARIANT (t),
5313 TYPE_MAIN_VARIANT (t2), map, fld),
5314 t, fld, t2);
5317 bool existed;
5318 tree &array
5319 = map->get_or_insert (t, &existed);
5320 if (!existed)
5322 array
5323 = build_array_type_1 (t2, TYPE_DOMAIN (t), TYPE_TYPELESS_STORAGE (t),
5324 false, false);
5325 TYPE_CANONICAL (array) = TYPE_CANONICAL (t);
5326 if (!fld->pset.add (array))
5327 add_tree_to_fld_list (array, fld);
5329 return array;
5332 /* Return CTX after removal of contexts that are not relevant */
5334 static tree
5335 fld_decl_context (tree ctx)
5337 /* Variably modified types are needed for tree_is_indexable to decide
5338 whether the type needs to go to local or global section.
5339 This code is semi-broken but for now it is easiest to keep contexts
5340 as expected. */
5341 if (ctx && TYPE_P (ctx)
5342 && !variably_modified_type_p (ctx, NULL_TREE))
5344 while (ctx && TYPE_P (ctx))
5345 ctx = TYPE_CONTEXT (ctx);
5347 return ctx;
5350 /* For T being aggregate type try to turn it into a incomplete variant.
5351 Return T if no simplification is possible. */
5353 static tree
5354 fld_incomplete_type_of (tree t, class free_lang_data_d *fld)
5356 if (!t)
5357 return NULL;
5358 if (POINTER_TYPE_P (t))
5360 tree t2 = fld_incomplete_type_of (TREE_TYPE (t), fld);
5361 if (t2 != TREE_TYPE (t))
5363 tree first;
5364 if (TREE_CODE (t) == POINTER_TYPE)
5365 first = build_pointer_type_for_mode (t2, TYPE_MODE (t),
5366 TYPE_REF_CAN_ALIAS_ALL (t));
5367 else
5368 first = build_reference_type_for_mode (t2, TYPE_MODE (t),
5369 TYPE_REF_CAN_ALIAS_ALL (t));
5370 gcc_assert (TYPE_CANONICAL (t2) != t2
5371 && TYPE_CANONICAL (t2) == TYPE_CANONICAL (TREE_TYPE (t)));
5372 if (!fld->pset.add (first))
5373 add_tree_to_fld_list (first, fld);
5374 return fld_type_variant (first, t, fld);
5376 return t;
5378 if (TREE_CODE (t) == ARRAY_TYPE)
5379 return fld_process_array_type (t,
5380 fld_incomplete_type_of (TREE_TYPE (t), fld),
5381 fld_incomplete_types, fld);
5382 if ((!RECORD_OR_UNION_TYPE_P (t) && TREE_CODE (t) != ENUMERAL_TYPE)
5383 || !COMPLETE_TYPE_P (t))
5384 return t;
5385 if (TYPE_MAIN_VARIANT (t) == t)
5387 bool existed;
5388 tree &copy
5389 = fld_incomplete_types->get_or_insert (t, &existed);
5391 if (!existed)
5393 copy = build_distinct_type_copy (t);
5395 /* It is possible that type was not seen by free_lang_data yet. */
5396 if (!fld->pset.add (copy))
5397 add_tree_to_fld_list (copy, fld);
5398 TYPE_SIZE (copy) = NULL;
5399 TYPE_USER_ALIGN (copy) = 0;
5400 TYPE_SIZE_UNIT (copy) = NULL;
5401 TYPE_CANONICAL (copy) = TYPE_CANONICAL (t);
5402 TREE_ADDRESSABLE (copy) = 0;
5403 if (AGGREGATE_TYPE_P (t))
5405 SET_TYPE_MODE (copy, VOIDmode);
5406 SET_TYPE_ALIGN (copy, BITS_PER_UNIT);
5407 TYPE_TYPELESS_STORAGE (copy) = 0;
5408 TYPE_FIELDS (copy) = NULL;
5409 TYPE_BINFO (copy) = NULL;
5410 TYPE_FINAL_P (copy) = 0;
5411 TYPE_EMPTY_P (copy) = 0;
5413 else
5415 TYPE_VALUES (copy) = NULL;
5416 ENUM_IS_OPAQUE (copy) = 0;
5417 ENUM_IS_SCOPED (copy) = 0;
5420 /* Build copy of TYPE_DECL in TYPE_NAME if necessary.
5421 This is needed for ODR violation warnings to come out right (we
5422 want duplicate TYPE_DECLs whenever the type is duplicated because
5423 of ODR violation. Because lang data in the TYPE_DECL may not
5424 have been freed yet, rebuild it from scratch and copy relevant
5425 fields. */
5426 TYPE_NAME (copy) = fld_simplified_type_name (copy);
5427 tree name = TYPE_NAME (copy);
5429 if (name && TREE_CODE (name) == TYPE_DECL)
5431 gcc_checking_assert (TREE_TYPE (name) == t);
5432 tree name2 = build_decl (DECL_SOURCE_LOCATION (name), TYPE_DECL,
5433 DECL_NAME (name), copy);
5434 if (DECL_ASSEMBLER_NAME_SET_P (name))
5435 SET_DECL_ASSEMBLER_NAME (name2, DECL_ASSEMBLER_NAME (name));
5436 SET_DECL_ALIGN (name2, 0);
5437 DECL_CONTEXT (name2) = fld_decl_context
5438 (DECL_CONTEXT (name));
5439 TYPE_NAME (copy) = name2;
5442 return copy;
5444 return (fld_type_variant
5445 (fld_incomplete_type_of (TYPE_MAIN_VARIANT (t), fld), t, fld));
5448 /* Simplify type T for scenarios where we do not need complete pointer
5449 types. */
5451 static tree
5452 fld_simplified_type (tree t, class free_lang_data_d *fld)
5454 if (!t)
5455 return t;
5456 if (POINTER_TYPE_P (t))
5457 return fld_incomplete_type_of (t, fld);
5458 /* FIXME: This triggers verification error, see PR88140. */
5459 if (TREE_CODE (t) == ARRAY_TYPE && 0)
5460 return fld_process_array_type (t, fld_simplified_type (TREE_TYPE (t), fld),
5461 fld_simplified_types, fld);
5462 return t;
5465 /* Reset the expression *EXPR_P, a size or position.
5467 ??? We could reset all non-constant sizes or positions. But it's cheap
5468 enough to not do so and refrain from adding workarounds to dwarf2out.c.
5470 We need to reset self-referential sizes or positions because they cannot
5471 be gimplified and thus can contain a CALL_EXPR after the gimplification
5472 is finished, which will run afoul of LTO streaming. And they need to be
5473 reset to something essentially dummy but not constant, so as to preserve
5474 the properties of the object they are attached to. */
5476 static inline void
5477 free_lang_data_in_one_sizepos (tree *expr_p)
5479 tree expr = *expr_p;
5480 if (CONTAINS_PLACEHOLDER_P (expr))
5481 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5485 /* Reset all the fields in a binfo node BINFO. We only keep
5486 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
5488 static void
5489 free_lang_data_in_binfo (tree binfo)
5491 unsigned i;
5492 tree t;
5494 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5496 BINFO_VIRTUALS (binfo) = NULL_TREE;
5497 BINFO_BASE_ACCESSES (binfo) = NULL;
5498 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5499 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5500 BINFO_VPTR_FIELD (binfo) = NULL_TREE;
5501 TREE_PUBLIC (binfo) = 0;
5503 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5504 free_lang_data_in_binfo (t);
5508 /* Reset all language specific information still present in TYPE. */
5510 static void
5511 free_lang_data_in_type (tree type, class free_lang_data_d *fld)
5513 gcc_assert (TYPE_P (type));
5515 /* Give the FE a chance to remove its own data first. */
5516 lang_hooks.free_lang_data (type);
5518 TREE_LANG_FLAG_0 (type) = 0;
5519 TREE_LANG_FLAG_1 (type) = 0;
5520 TREE_LANG_FLAG_2 (type) = 0;
5521 TREE_LANG_FLAG_3 (type) = 0;
5522 TREE_LANG_FLAG_4 (type) = 0;
5523 TREE_LANG_FLAG_5 (type) = 0;
5524 TREE_LANG_FLAG_6 (type) = 0;
5526 TYPE_NEEDS_CONSTRUCTING (type) = 0;
5528 /* Purge non-marked variants from the variants chain, so that they
5529 don't reappear in the IL after free_lang_data. */
5530 while (TYPE_NEXT_VARIANT (type)
5531 && !fld->pset.contains (TYPE_NEXT_VARIANT (type)))
5533 tree t = TYPE_NEXT_VARIANT (type);
5534 TYPE_NEXT_VARIANT (type) = TYPE_NEXT_VARIANT (t);
5535 /* Turn the removed types into distinct types. */
5536 TYPE_MAIN_VARIANT (t) = t;
5537 TYPE_NEXT_VARIANT (t) = NULL_TREE;
5540 if (TREE_CODE (type) == FUNCTION_TYPE)
5542 TREE_TYPE (type) = fld_simplified_type (TREE_TYPE (type), fld);
5543 /* Remove the const and volatile qualifiers from arguments. The
5544 C++ front end removes them, but the C front end does not,
5545 leading to false ODR violation errors when merging two
5546 instances of the same function signature compiled by
5547 different front ends. */
5548 for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5550 TREE_VALUE (p) = fld_simplified_type (TREE_VALUE (p), fld);
5551 tree arg_type = TREE_VALUE (p);
5553 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5555 int quals = TYPE_QUALS (arg_type)
5556 & ~TYPE_QUAL_CONST
5557 & ~TYPE_QUAL_VOLATILE;
5558 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5559 if (!fld->pset.add (TREE_VALUE (p)))
5560 free_lang_data_in_type (TREE_VALUE (p), fld);
5562 /* C++ FE uses TREE_PURPOSE to store initial values. */
5563 TREE_PURPOSE (p) = NULL;
5566 else if (TREE_CODE (type) == METHOD_TYPE)
5568 TREE_TYPE (type) = fld_simplified_type (TREE_TYPE (type), fld);
5569 for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5571 /* C++ FE uses TREE_PURPOSE to store initial values. */
5572 TREE_VALUE (p) = fld_simplified_type (TREE_VALUE (p), fld);
5573 TREE_PURPOSE (p) = NULL;
5576 else if (RECORD_OR_UNION_TYPE_P (type))
5578 /* Remove members that are not FIELD_DECLs from the field list
5579 of an aggregate. These occur in C++. */
5580 for (tree *prev = &TYPE_FIELDS (type), member; (member = *prev);)
5581 if (TREE_CODE (member) == FIELD_DECL)
5582 prev = &DECL_CHAIN (member);
5583 else
5584 *prev = DECL_CHAIN (member);
5586 TYPE_VFIELD (type) = NULL_TREE;
5588 if (TYPE_BINFO (type))
5590 free_lang_data_in_binfo (TYPE_BINFO (type));
5591 /* We need to preserve link to bases and virtual table for all
5592 polymorphic types to make devirtualization machinery working. */
5593 if (!BINFO_VTABLE (TYPE_BINFO (type)))
5594 TYPE_BINFO (type) = NULL;
5597 else if (INTEGRAL_TYPE_P (type)
5598 || SCALAR_FLOAT_TYPE_P (type)
5599 || FIXED_POINT_TYPE_P (type))
5601 if (TREE_CODE (type) == ENUMERAL_TYPE)
5603 ENUM_IS_OPAQUE (type) = 0;
5604 ENUM_IS_SCOPED (type) = 0;
5605 /* Type values are used only for C++ ODR checking. Drop them
5606 for all type variants and non-ODR types.
5607 For ODR types the data is freed in free_odr_warning_data. */
5608 if (!TYPE_VALUES (type))
5610 else if (TYPE_MAIN_VARIANT (type) != type
5611 || !type_with_linkage_p (type)
5612 || type_in_anonymous_namespace_p (type))
5613 TYPE_VALUES (type) = NULL;
5614 else
5615 register_odr_enum (type);
5617 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5618 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5621 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5623 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5624 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5626 if (TYPE_CONTEXT (type)
5627 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5629 tree ctx = TYPE_CONTEXT (type);
5632 ctx = BLOCK_SUPERCONTEXT (ctx);
5634 while (ctx && TREE_CODE (ctx) == BLOCK);
5635 TYPE_CONTEXT (type) = ctx;
5638 TYPE_STUB_DECL (type) = NULL;
5639 TYPE_NAME (type) = fld_simplified_type_name (type);
5643 /* Return true if DECL may need an assembler name to be set. */
5645 static inline bool
5646 need_assembler_name_p (tree decl)
5648 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5649 Rule merging. This makes type_odr_p to return true on those types during
5650 LTO and by comparing the mangled name, we can say what types are intended
5651 to be equivalent across compilation unit.
5653 We do not store names of type_in_anonymous_namespace_p.
5655 Record, union and enumeration type have linkage that allows use
5656 to check type_in_anonymous_namespace_p. We do not mangle compound types
5657 that always can be compared structurally.
5659 Similarly for builtin types, we compare properties of their main variant.
5660 A special case are integer types where mangling do make differences
5661 between char/signed char/unsigned char etc. Storing name for these makes
5662 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5663 See cp/mangle.c:write_builtin_type for details. */
5665 if (TREE_CODE (decl) == TYPE_DECL)
5667 if (DECL_NAME (decl)
5668 && decl == TYPE_NAME (TREE_TYPE (decl))
5669 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
5670 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5671 && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
5672 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE)
5673 || TYPE_CXX_ODR_P (TREE_TYPE (decl)))
5674 && (type_with_linkage_p (TREE_TYPE (decl))
5675 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5676 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5677 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5678 return false;
5680 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5681 if (!VAR_OR_FUNCTION_DECL_P (decl))
5682 return false;
5684 /* If DECL already has its assembler name set, it does not need a
5685 new one. */
5686 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5687 || DECL_ASSEMBLER_NAME_SET_P (decl))
5688 return false;
5690 /* Abstract decls do not need an assembler name. */
5691 if (DECL_ABSTRACT_P (decl))
5692 return false;
5694 /* For VAR_DECLs, only static, public and external symbols need an
5695 assembler name. */
5696 if (VAR_P (decl)
5697 && !TREE_STATIC (decl)
5698 && !TREE_PUBLIC (decl)
5699 && !DECL_EXTERNAL (decl))
5700 return false;
5702 if (TREE_CODE (decl) == FUNCTION_DECL)
5704 /* Do not set assembler name on builtins. Allow RTL expansion to
5705 decide whether to expand inline or via a regular call. */
5706 if (fndecl_built_in_p (decl)
5707 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5708 return false;
5710 /* Functions represented in the callgraph need an assembler name. */
5711 if (cgraph_node::get (decl) != NULL)
5712 return true;
5714 /* Unused and not public functions don't need an assembler name. */
5715 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5716 return false;
5719 return true;
5723 /* Reset all language specific information still present in symbol
5724 DECL. */
5726 static void
5727 free_lang_data_in_decl (tree decl, class free_lang_data_d *fld)
5729 gcc_assert (DECL_P (decl));
5731 /* Give the FE a chance to remove its own data first. */
5732 lang_hooks.free_lang_data (decl);
5734 TREE_LANG_FLAG_0 (decl) = 0;
5735 TREE_LANG_FLAG_1 (decl) = 0;
5736 TREE_LANG_FLAG_2 (decl) = 0;
5737 TREE_LANG_FLAG_3 (decl) = 0;
5738 TREE_LANG_FLAG_4 (decl) = 0;
5739 TREE_LANG_FLAG_5 (decl) = 0;
5740 TREE_LANG_FLAG_6 (decl) = 0;
5742 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5743 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5744 if (TREE_CODE (decl) == FIELD_DECL)
5746 DECL_FCONTEXT (decl) = NULL;
5747 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5748 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5749 DECL_QUALIFIER (decl) = NULL_TREE;
5752 if (TREE_CODE (decl) == FUNCTION_DECL)
5754 struct cgraph_node *node;
5755 /* Frontends do not set TREE_ADDRESSABLE on public variables even though
5756 the address may be taken in other unit, so this flag has no practical
5757 use for middle-end.
5759 It would make more sense if frontends set TREE_ADDRESSABLE to 0 only
5760 for public objects that indeed cannot be adressed, but it is not
5761 the case. Set the flag to true so we do not get merge failures for
5762 i.e. virtual tables between units that take address of it and
5763 units that don't. */
5764 if (TREE_PUBLIC (decl))
5765 TREE_ADDRESSABLE (decl) = true;
5766 TREE_TYPE (decl) = fld_simplified_type (TREE_TYPE (decl), fld);
5767 if (!(node = cgraph_node::get (decl))
5768 || (!node->definition && !node->clones))
5770 if (node)
5771 node->release_body ();
5772 else
5774 release_function_body (decl);
5775 DECL_ARGUMENTS (decl) = NULL;
5776 DECL_RESULT (decl) = NULL;
5777 DECL_INITIAL (decl) = error_mark_node;
5780 if (gimple_has_body_p (decl) || (node && node->thunk.thunk_p))
5782 tree t;
5784 /* If DECL has a gimple body, then the context for its
5785 arguments must be DECL. Otherwise, it doesn't really
5786 matter, as we will not be emitting any code for DECL. In
5787 general, there may be other instances of DECL created by
5788 the front end and since PARM_DECLs are generally shared,
5789 their DECL_CONTEXT changes as the replicas of DECL are
5790 created. The only time where DECL_CONTEXT is important
5791 is for the FUNCTION_DECLs that have a gimple body (since
5792 the PARM_DECL will be used in the function's body). */
5793 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5794 DECL_CONTEXT (t) = decl;
5795 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5796 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5797 = target_option_default_node;
5798 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5799 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5800 = optimization_default_node;
5803 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5804 At this point, it is not needed anymore. */
5805 DECL_SAVED_TREE (decl) = NULL_TREE;
5807 /* Clear the abstract origin if it refers to a method.
5808 Otherwise dwarf2out.c will ICE as we splice functions out of
5809 TYPE_FIELDS and thus the origin will not be output
5810 correctly. */
5811 if (DECL_ABSTRACT_ORIGIN (decl)
5812 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5813 && RECORD_OR_UNION_TYPE_P
5814 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5815 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5817 DECL_VINDEX (decl) = NULL_TREE;
5819 else if (VAR_P (decl))
5821 /* See comment above why we set the flag for functions. */
5822 if (TREE_PUBLIC (decl))
5823 TREE_ADDRESSABLE (decl) = true;
5824 if ((DECL_EXTERNAL (decl)
5825 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5826 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5827 DECL_INITIAL (decl) = NULL_TREE;
5829 else if (TREE_CODE (decl) == TYPE_DECL)
5831 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5832 DECL_VISIBILITY_SPECIFIED (decl) = 0;
5833 TREE_PUBLIC (decl) = 0;
5834 TREE_PRIVATE (decl) = 0;
5835 DECL_ARTIFICIAL (decl) = 0;
5836 TYPE_DECL_SUPPRESS_DEBUG (decl) = 0;
5837 DECL_INITIAL (decl) = NULL_TREE;
5838 DECL_ORIGINAL_TYPE (decl) = NULL_TREE;
5839 DECL_MODE (decl) = VOIDmode;
5840 SET_DECL_ALIGN (decl, 0);
5841 /* TREE_TYPE is cleared at WPA time in free_odr_warning_data. */
5843 else if (TREE_CODE (decl) == FIELD_DECL)
5845 TREE_TYPE (decl) = fld_simplified_type (TREE_TYPE (decl), fld);
5846 DECL_INITIAL (decl) = NULL_TREE;
5848 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5849 && DECL_INITIAL (decl)
5850 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5852 /* Strip builtins from the translation-unit BLOCK. We still have targets
5853 without builtin_decl_explicit support and also builtins are shared
5854 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5855 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5856 while (*nextp)
5858 tree var = *nextp;
5859 if (TREE_CODE (var) == FUNCTION_DECL
5860 && fndecl_built_in_p (var))
5861 *nextp = TREE_CHAIN (var);
5862 else
5863 nextp = &TREE_CHAIN (var);
5866 /* We need to keep field decls associated with their trees. Otherwise tree
5867 merging may merge some fileds and keep others disjoint wich in turn will
5868 not do well with TREE_CHAIN pointers linking them.
5870 Also do not drop containing types for virtual methods and tables because
5871 these are needed by devirtualization.
5872 C++ destructors are special because C++ frontends sometimes produces
5873 virtual destructor as an alias of non-virtual destructor. In
5874 devirutalization code we always walk through aliases and we need
5875 context to be preserved too. See PR89335 */
5876 if (TREE_CODE (decl) != FIELD_DECL
5877 && ((TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL)
5878 || (!DECL_VIRTUAL_P (decl)
5879 && (TREE_CODE (decl) != FUNCTION_DECL
5880 || !DECL_CXX_DESTRUCTOR_P (decl)))))
5881 DECL_CONTEXT (decl) = fld_decl_context (DECL_CONTEXT (decl));
5885 /* Operand callback helper for free_lang_data_in_node. *TP is the
5886 subtree operand being considered. */
5888 static tree
5889 find_decls_types_r (tree *tp, int *ws, void *data)
5891 tree t = *tp;
5892 class free_lang_data_d *fld = (class free_lang_data_d *) data;
5894 if (TREE_CODE (t) == TREE_LIST)
5895 return NULL_TREE;
5897 /* Language specific nodes will be removed, so there is no need
5898 to gather anything under them. */
5899 if (is_lang_specific (t))
5901 *ws = 0;
5902 return NULL_TREE;
5905 if (DECL_P (t))
5907 /* Note that walk_tree does not traverse every possible field in
5908 decls, so we have to do our own traversals here. */
5909 add_tree_to_fld_list (t, fld);
5911 fld_worklist_push (DECL_NAME (t), fld);
5912 fld_worklist_push (DECL_CONTEXT (t), fld);
5913 fld_worklist_push (DECL_SIZE (t), fld);
5914 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5916 /* We are going to remove everything under DECL_INITIAL for
5917 TYPE_DECLs. No point walking them. */
5918 if (TREE_CODE (t) != TYPE_DECL)
5919 fld_worklist_push (DECL_INITIAL (t), fld);
5921 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5922 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5924 if (TREE_CODE (t) == FUNCTION_DECL)
5926 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5927 fld_worklist_push (DECL_RESULT (t), fld);
5929 else if (TREE_CODE (t) == FIELD_DECL)
5931 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5932 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5933 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5934 fld_worklist_push (DECL_FCONTEXT (t), fld);
5937 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL)
5938 && DECL_HAS_VALUE_EXPR_P (t))
5939 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5941 if (TREE_CODE (t) != FIELD_DECL
5942 && TREE_CODE (t) != TYPE_DECL)
5943 fld_worklist_push (TREE_CHAIN (t), fld);
5944 *ws = 0;
5946 else if (TYPE_P (t))
5948 /* Note that walk_tree does not traverse every possible field in
5949 types, so we have to do our own traversals here. */
5950 add_tree_to_fld_list (t, fld);
5952 if (!RECORD_OR_UNION_TYPE_P (t))
5953 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5954 fld_worklist_push (TYPE_SIZE (t), fld);
5955 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5956 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5957 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5958 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5959 fld_worklist_push (TYPE_NAME (t), fld);
5960 /* While we do not stream TYPE_POINTER_TO and TYPE_REFERENCE_TO
5961 lists, we may look types up in these lists and use them while
5962 optimizing the function body. Thus we need to free lang data
5963 in them. */
5964 if (TREE_CODE (t) == POINTER_TYPE)
5965 fld_worklist_push (TYPE_NEXT_PTR_TO (t), fld);
5966 if (TREE_CODE (t) == REFERENCE_TYPE)
5967 fld_worklist_push (TYPE_NEXT_REF_TO (t), fld);
5968 if (!POINTER_TYPE_P (t))
5969 fld_worklist_push (TYPE_MIN_VALUE_RAW (t), fld);
5970 /* TYPE_MAX_VALUE_RAW is TYPE_BINFO for record types. */
5971 if (!RECORD_OR_UNION_TYPE_P (t))
5972 fld_worklist_push (TYPE_MAX_VALUE_RAW (t), fld);
5973 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5974 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5975 do not and want not to reach unused variants this way. */
5976 if (TYPE_CONTEXT (t))
5978 tree ctx = TYPE_CONTEXT (t);
5979 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5980 So push that instead. */
5981 while (ctx && TREE_CODE (ctx) == BLOCK)
5982 ctx = BLOCK_SUPERCONTEXT (ctx);
5983 fld_worklist_push (ctx, fld);
5985 fld_worklist_push (TYPE_CANONICAL (t), fld);
5987 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5989 unsigned i;
5990 tree tem;
5991 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5992 fld_worklist_push (TREE_TYPE (tem), fld);
5993 fld_worklist_push (BINFO_TYPE (TYPE_BINFO (t)), fld);
5994 fld_worklist_push (BINFO_VTABLE (TYPE_BINFO (t)), fld);
5996 if (RECORD_OR_UNION_TYPE_P (t))
5998 tree tem;
5999 /* Push all TYPE_FIELDS - there can be interleaving interesting
6000 and non-interesting things. */
6001 tem = TYPE_FIELDS (t);
6002 while (tem)
6004 if (TREE_CODE (tem) == FIELD_DECL)
6005 fld_worklist_push (tem, fld);
6006 tem = TREE_CHAIN (tem);
6009 if (FUNC_OR_METHOD_TYPE_P (t))
6010 fld_worklist_push (TYPE_METHOD_BASETYPE (t), fld);
6012 fld_worklist_push (TYPE_STUB_DECL (t), fld);
6013 *ws = 0;
6015 else if (TREE_CODE (t) == BLOCK)
6017 for (tree *tem = &BLOCK_VARS (t); *tem; )
6019 if (TREE_CODE (*tem) != LABEL_DECL
6020 && (TREE_CODE (*tem) != VAR_DECL
6021 || !auto_var_in_fn_p (*tem, DECL_CONTEXT (*tem))))
6023 gcc_assert (TREE_CODE (*tem) != RESULT_DECL
6024 && TREE_CODE (*tem) != PARM_DECL);
6025 *tem = TREE_CHAIN (*tem);
6027 else
6029 fld_worklist_push (*tem, fld);
6030 tem = &TREE_CHAIN (*tem);
6033 for (tree tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
6034 fld_worklist_push (tem, fld);
6035 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
6038 if (TREE_CODE (t) != IDENTIFIER_NODE
6039 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
6040 fld_worklist_push (TREE_TYPE (t), fld);
6042 return NULL_TREE;
6046 /* Find decls and types in T. */
6048 static void
6049 find_decls_types (tree t, class free_lang_data_d *fld)
6051 while (1)
6053 if (!fld->pset.contains (t))
6054 walk_tree (&t, find_decls_types_r, fld, &fld->pset);
6055 if (fld->worklist.is_empty ())
6056 break;
6057 t = fld->worklist.pop ();
6061 /* Translate all the types in LIST with the corresponding runtime
6062 types. */
6064 static tree
6065 get_eh_types_for_runtime (tree list)
6067 tree head, prev;
6069 if (list == NULL_TREE)
6070 return NULL_TREE;
6072 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
6073 prev = head;
6074 list = TREE_CHAIN (list);
6075 while (list)
6077 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
6078 TREE_CHAIN (prev) = n;
6079 prev = TREE_CHAIN (prev);
6080 list = TREE_CHAIN (list);
6083 return head;
6087 /* Find decls and types referenced in EH region R and store them in
6088 FLD->DECLS and FLD->TYPES. */
6090 static void
6091 find_decls_types_in_eh_region (eh_region r, class free_lang_data_d *fld)
6093 switch (r->type)
6095 case ERT_CLEANUP:
6096 break;
6098 case ERT_TRY:
6100 eh_catch c;
6102 /* The types referenced in each catch must first be changed to the
6103 EH types used at runtime. This removes references to FE types
6104 in the region. */
6105 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
6107 c->type_list = get_eh_types_for_runtime (c->type_list);
6108 walk_tree (&c->type_list, find_decls_types_r, fld, &fld->pset);
6111 break;
6113 case ERT_ALLOWED_EXCEPTIONS:
6114 r->u.allowed.type_list
6115 = get_eh_types_for_runtime (r->u.allowed.type_list);
6116 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, &fld->pset);
6117 break;
6119 case ERT_MUST_NOT_THROW:
6120 walk_tree (&r->u.must_not_throw.failure_decl,
6121 find_decls_types_r, fld, &fld->pset);
6122 break;
6127 /* Find decls and types referenced in cgraph node N and store them in
6128 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
6129 look for *every* kind of DECL and TYPE node reachable from N,
6130 including those embedded inside types and decls (i.e,, TYPE_DECLs,
6131 NAMESPACE_DECLs, etc). */
6133 static void
6134 find_decls_types_in_node (struct cgraph_node *n, class free_lang_data_d *fld)
6136 basic_block bb;
6137 struct function *fn;
6138 unsigned ix;
6139 tree t;
6141 find_decls_types (n->decl, fld);
6143 if (!gimple_has_body_p (n->decl))
6144 return;
6146 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
6148 fn = DECL_STRUCT_FUNCTION (n->decl);
6150 /* Traverse locals. */
6151 FOR_EACH_LOCAL_DECL (fn, ix, t)
6152 find_decls_types (t, fld);
6154 /* Traverse EH regions in FN. */
6156 eh_region r;
6157 FOR_ALL_EH_REGION_FN (r, fn)
6158 find_decls_types_in_eh_region (r, fld);
6161 /* Traverse every statement in FN. */
6162 FOR_EACH_BB_FN (bb, fn)
6164 gphi_iterator psi;
6165 gimple_stmt_iterator si;
6166 unsigned i;
6168 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
6170 gphi *phi = psi.phi ();
6172 for (i = 0; i < gimple_phi_num_args (phi); i++)
6174 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
6175 find_decls_types (*arg_p, fld);
6179 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6181 gimple *stmt = gsi_stmt (si);
6183 if (is_gimple_call (stmt))
6184 find_decls_types (gimple_call_fntype (stmt), fld);
6186 for (i = 0; i < gimple_num_ops (stmt); i++)
6188 tree arg = gimple_op (stmt, i);
6189 find_decls_types (arg, fld);
6190 /* find_decls_types doesn't walk TREE_PURPOSE of TREE_LISTs,
6191 which we need for asm stmts. */
6192 if (arg
6193 && TREE_CODE (arg) == TREE_LIST
6194 && TREE_PURPOSE (arg)
6195 && gimple_code (stmt) == GIMPLE_ASM)
6196 find_decls_types (TREE_PURPOSE (arg), fld);
6203 /* Find decls and types referenced in varpool node N and store them in
6204 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
6205 look for *every* kind of DECL and TYPE node reachable from N,
6206 including those embedded inside types and decls (i.e,, TYPE_DECLs,
6207 NAMESPACE_DECLs, etc). */
6209 static void
6210 find_decls_types_in_var (varpool_node *v, class free_lang_data_d *fld)
6212 find_decls_types (v->decl, fld);
6215 /* If T needs an assembler name, have one created for it. */
6217 void
6218 assign_assembler_name_if_needed (tree t)
6220 if (need_assembler_name_p (t))
6222 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
6223 diagnostics that use input_location to show locus
6224 information. The problem here is that, at this point,
6225 input_location is generally anchored to the end of the file
6226 (since the parser is long gone), so we don't have a good
6227 position to pin it to.
6229 To alleviate this problem, this uses the location of T's
6230 declaration. Examples of this are
6231 testsuite/g++.dg/template/cond2.C and
6232 testsuite/g++.dg/template/pr35240.C. */
6233 location_t saved_location = input_location;
6234 input_location = DECL_SOURCE_LOCATION (t);
6236 decl_assembler_name (t);
6238 input_location = saved_location;
6243 /* Free language specific information for every operand and expression
6244 in every node of the call graph. This process operates in three stages:
6246 1- Every callgraph node and varpool node is traversed looking for
6247 decls and types embedded in them. This is a more exhaustive
6248 search than that done by find_referenced_vars, because it will
6249 also collect individual fields, decls embedded in types, etc.
6251 2- All the decls found are sent to free_lang_data_in_decl.
6253 3- All the types found are sent to free_lang_data_in_type.
6255 The ordering between decls and types is important because
6256 free_lang_data_in_decl sets assembler names, which includes
6257 mangling. So types cannot be freed up until assembler names have
6258 been set up. */
6260 static void
6261 free_lang_data_in_cgraph (class free_lang_data_d *fld)
6263 struct cgraph_node *n;
6264 varpool_node *v;
6265 tree t;
6266 unsigned i;
6267 alias_pair *p;
6269 /* Find decls and types in the body of every function in the callgraph. */
6270 FOR_EACH_FUNCTION (n)
6271 find_decls_types_in_node (n, fld);
6273 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
6274 find_decls_types (p->decl, fld);
6276 /* Find decls and types in every varpool symbol. */
6277 FOR_EACH_VARIABLE (v)
6278 find_decls_types_in_var (v, fld);
6280 /* Set the assembler name on every decl found. We need to do this
6281 now because free_lang_data_in_decl will invalidate data needed
6282 for mangling. This breaks mangling on interdependent decls. */
6283 FOR_EACH_VEC_ELT (fld->decls, i, t)
6284 assign_assembler_name_if_needed (t);
6286 /* Traverse every decl found freeing its language data. */
6287 FOR_EACH_VEC_ELT (fld->decls, i, t)
6288 free_lang_data_in_decl (t, fld);
6290 /* Traverse every type found freeing its language data. */
6291 FOR_EACH_VEC_ELT (fld->types, i, t)
6292 free_lang_data_in_type (t, fld);
6296 /* Free resources that are used by FE but are not needed once they are done. */
6298 static unsigned
6299 free_lang_data (void)
6301 unsigned i;
6302 class free_lang_data_d fld;
6304 /* If we are the LTO frontend we have freed lang-specific data already. */
6305 if (in_lto_p
6306 || (!flag_generate_lto && !flag_generate_offload))
6308 /* Rebuild type inheritance graph even when not doing LTO to get
6309 consistent profile data. */
6310 rebuild_type_inheritance_graph ();
6311 return 0;
6314 fld_incomplete_types = new hash_map<tree, tree>;
6315 fld_simplified_types = new hash_map<tree, tree>;
6317 /* Provide a dummy TRANSLATION_UNIT_DECL if the FE failed to provide one. */
6318 if (vec_safe_is_empty (all_translation_units))
6319 build_translation_unit_decl (NULL_TREE);
6321 /* Allocate and assign alias sets to the standard integer types
6322 while the slots are still in the way the frontends generated them. */
6323 for (i = 0; i < itk_none; ++i)
6324 if (integer_types[i])
6325 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
6327 /* Traverse the IL resetting language specific information for
6328 operands, expressions, etc. */
6329 free_lang_data_in_cgraph (&fld);
6331 /* Create gimple variants for common types. */
6332 for (unsigned i = 0;
6333 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
6334 ++i)
6335 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
6337 /* Reset some langhooks. Do not reset types_compatible_p, it may
6338 still be used indirectly via the get_alias_set langhook. */
6339 lang_hooks.dwarf_name = lhd_dwarf_name;
6340 lang_hooks.decl_printable_name = gimple_decl_printable_name;
6341 lang_hooks.gimplify_expr = lhd_gimplify_expr;
6342 lang_hooks.overwrite_decl_assembler_name = lhd_overwrite_decl_assembler_name;
6343 lang_hooks.print_xnode = lhd_print_tree_nothing;
6344 lang_hooks.print_decl = lhd_print_tree_nothing;
6345 lang_hooks.print_type = lhd_print_tree_nothing;
6346 lang_hooks.print_identifier = lhd_print_tree_nothing;
6348 lang_hooks.tree_inlining.var_mod_type_p = hook_bool_tree_tree_false;
6350 if (flag_checking)
6352 int i;
6353 tree t;
6355 FOR_EACH_VEC_ELT (fld.types, i, t)
6356 verify_type (t);
6359 /* We do not want the default decl_assembler_name implementation,
6360 rather if we have fixed everything we want a wrapper around it
6361 asserting that all non-local symbols already got their assembler
6362 name and only produce assembler names for local symbols. Or rather
6363 make sure we never call decl_assembler_name on local symbols and
6364 devise a separate, middle-end private scheme for it. */
6366 /* Reset diagnostic machinery. */
6367 tree_diagnostics_defaults (global_dc);
6369 rebuild_type_inheritance_graph ();
6371 delete fld_incomplete_types;
6372 delete fld_simplified_types;
6374 return 0;
6378 namespace {
6380 const pass_data pass_data_ipa_free_lang_data =
6382 SIMPLE_IPA_PASS, /* type */
6383 "*free_lang_data", /* name */
6384 OPTGROUP_NONE, /* optinfo_flags */
6385 TV_IPA_FREE_LANG_DATA, /* tv_id */
6386 0, /* properties_required */
6387 0, /* properties_provided */
6388 0, /* properties_destroyed */
6389 0, /* todo_flags_start */
6390 0, /* todo_flags_finish */
6393 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
6395 public:
6396 pass_ipa_free_lang_data (gcc::context *ctxt)
6397 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
6400 /* opt_pass methods: */
6401 virtual unsigned int execute (function *) { return free_lang_data (); }
6403 }; // class pass_ipa_free_lang_data
6405 } // anon namespace
6407 simple_ipa_opt_pass *
6408 make_pass_ipa_free_lang_data (gcc::context *ctxt)
6410 return new pass_ipa_free_lang_data (ctxt);
6413 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6414 of the various TYPE_QUAL values. */
6416 static void
6417 set_type_quals (tree type, int type_quals)
6419 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6420 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6421 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6422 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6423 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6426 /* Returns true iff CAND and BASE have equivalent language-specific
6427 qualifiers. */
6429 bool
6430 check_lang_type (const_tree cand, const_tree base)
6432 if (lang_hooks.types.type_hash_eq == NULL)
6433 return true;
6434 /* type_hash_eq currently only applies to these types. */
6435 if (TREE_CODE (cand) != FUNCTION_TYPE
6436 && TREE_CODE (cand) != METHOD_TYPE)
6437 return true;
6438 return lang_hooks.types.type_hash_eq (cand, base);
6441 /* This function checks to see if TYPE matches the size one of the built-in
6442 atomic types, and returns that core atomic type. */
6444 static tree
6445 find_atomic_core_type (const_tree type)
6447 tree base_atomic_type;
6449 /* Only handle complete types. */
6450 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
6451 return NULL_TREE;
6453 switch (tree_to_uhwi (TYPE_SIZE (type)))
6455 case 8:
6456 base_atomic_type = atomicQI_type_node;
6457 break;
6459 case 16:
6460 base_atomic_type = atomicHI_type_node;
6461 break;
6463 case 32:
6464 base_atomic_type = atomicSI_type_node;
6465 break;
6467 case 64:
6468 base_atomic_type = atomicDI_type_node;
6469 break;
6471 case 128:
6472 base_atomic_type = atomicTI_type_node;
6473 break;
6475 default:
6476 base_atomic_type = NULL_TREE;
6479 return base_atomic_type;
6482 /* Returns true iff unqualified CAND and BASE are equivalent. */
6484 bool
6485 check_base_type (const_tree cand, const_tree base)
6487 if (TYPE_NAME (cand) != TYPE_NAME (base)
6488 /* Apparently this is needed for Objective-C. */
6489 || TYPE_CONTEXT (cand) != TYPE_CONTEXT (base)
6490 || !attribute_list_equal (TYPE_ATTRIBUTES (cand),
6491 TYPE_ATTRIBUTES (base)))
6492 return false;
6493 /* Check alignment. */
6494 if (TYPE_ALIGN (cand) == TYPE_ALIGN (base))
6495 return true;
6496 /* Atomic types increase minimal alignment. We must to do so as well
6497 or we get duplicated canonical types. See PR88686. */
6498 if ((TYPE_QUALS (cand) & TYPE_QUAL_ATOMIC))
6500 /* See if this object can map to a basic atomic type. */
6501 tree atomic_type = find_atomic_core_type (cand);
6502 if (atomic_type && TYPE_ALIGN (atomic_type) == TYPE_ALIGN (cand))
6503 return true;
6505 return false;
6508 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6510 bool
6511 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6513 return (TYPE_QUALS (cand) == type_quals
6514 && check_base_type (cand, base)
6515 && check_lang_type (cand, base));
6518 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6520 static bool
6521 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6523 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6524 && TYPE_NAME (cand) == TYPE_NAME (base)
6525 /* Apparently this is needed for Objective-C. */
6526 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6527 /* Check alignment. */
6528 && TYPE_ALIGN (cand) == align
6529 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6530 TYPE_ATTRIBUTES (base))
6531 && check_lang_type (cand, base));
6534 /* Return a version of the TYPE, qualified as indicated by the
6535 TYPE_QUALS, if one exists. If no qualified version exists yet,
6536 return NULL_TREE. */
6538 tree
6539 get_qualified_type (tree type, int type_quals)
6541 if (TYPE_QUALS (type) == type_quals)
6542 return type;
6544 tree mv = TYPE_MAIN_VARIANT (type);
6545 if (check_qualified_type (mv, type, type_quals))
6546 return mv;
6548 /* Search the chain of variants to see if there is already one there just
6549 like the one we need to have. If so, use that existing one. We must
6550 preserve the TYPE_NAME, since there is code that depends on this. */
6551 for (tree *tp = &TYPE_NEXT_VARIANT (mv); *tp; tp = &TYPE_NEXT_VARIANT (*tp))
6552 if (check_qualified_type (*tp, type, type_quals))
6554 /* Put the found variant at the head of the variant list so
6555 frequently searched variants get found faster. The C++ FE
6556 benefits greatly from this. */
6557 tree t = *tp;
6558 *tp = TYPE_NEXT_VARIANT (t);
6559 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
6560 TYPE_NEXT_VARIANT (mv) = t;
6561 return t;
6564 return NULL_TREE;
6567 /* Like get_qualified_type, but creates the type if it does not
6568 exist. This function never returns NULL_TREE. */
6570 tree
6571 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
6573 tree t;
6575 /* See if we already have the appropriate qualified variant. */
6576 t = get_qualified_type (type, type_quals);
6578 /* If not, build it. */
6579 if (!t)
6581 t = build_variant_type_copy (type PASS_MEM_STAT);
6582 set_type_quals (t, type_quals);
6584 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6586 /* See if this object can map to a basic atomic type. */
6587 tree atomic_type = find_atomic_core_type (type);
6588 if (atomic_type)
6590 /* Ensure the alignment of this type is compatible with
6591 the required alignment of the atomic type. */
6592 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6593 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
6597 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6598 /* Propagate structural equality. */
6599 SET_TYPE_STRUCTURAL_EQUALITY (t);
6600 else if (TYPE_CANONICAL (type) != type)
6601 /* Build the underlying canonical type, since it is different
6602 from TYPE. */
6604 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6605 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6607 else
6608 /* T is its own canonical type. */
6609 TYPE_CANONICAL (t) = t;
6613 return t;
6616 /* Create a variant of type T with alignment ALIGN. */
6618 tree
6619 build_aligned_type (tree type, unsigned int align)
6621 tree t;
6623 if (TYPE_PACKED (type)
6624 || TYPE_ALIGN (type) == align)
6625 return type;
6627 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6628 if (check_aligned_type (t, type, align))
6629 return t;
6631 t = build_variant_type_copy (type);
6632 SET_TYPE_ALIGN (t, align);
6633 TYPE_USER_ALIGN (t) = 1;
6635 return t;
6638 /* Create a new distinct copy of TYPE. The new type is made its own
6639 MAIN_VARIANT. If TYPE requires structural equality checks, the
6640 resulting type requires structural equality checks; otherwise, its
6641 TYPE_CANONICAL points to itself. */
6643 tree
6644 build_distinct_type_copy (tree type MEM_STAT_DECL)
6646 tree t = copy_node (type PASS_MEM_STAT);
6648 TYPE_POINTER_TO (t) = 0;
6649 TYPE_REFERENCE_TO (t) = 0;
6651 /* Set the canonical type either to a new equivalence class, or
6652 propagate the need for structural equality checks. */
6653 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6654 SET_TYPE_STRUCTURAL_EQUALITY (t);
6655 else
6656 TYPE_CANONICAL (t) = t;
6658 /* Make it its own variant. */
6659 TYPE_MAIN_VARIANT (t) = t;
6660 TYPE_NEXT_VARIANT (t) = 0;
6662 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6663 whose TREE_TYPE is not t. This can also happen in the Ada
6664 frontend when using subtypes. */
6666 return t;
6669 /* Create a new variant of TYPE, equivalent but distinct. This is so
6670 the caller can modify it. TYPE_CANONICAL for the return type will
6671 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6672 are considered equal by the language itself (or that both types
6673 require structural equality checks). */
6675 tree
6676 build_variant_type_copy (tree type MEM_STAT_DECL)
6678 tree t, m = TYPE_MAIN_VARIANT (type);
6680 t = build_distinct_type_copy (type PASS_MEM_STAT);
6682 /* Since we're building a variant, assume that it is a non-semantic
6683 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6684 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6685 /* Type variants have no alias set defined. */
6686 TYPE_ALIAS_SET (t) = -1;
6688 /* Add the new type to the chain of variants of TYPE. */
6689 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6690 TYPE_NEXT_VARIANT (m) = t;
6691 TYPE_MAIN_VARIANT (t) = m;
6693 return t;
6696 /* Return true if the from tree in both tree maps are equal. */
6699 tree_map_base_eq (const void *va, const void *vb)
6701 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6702 *const b = (const struct tree_map_base *) vb;
6703 return (a->from == b->from);
6706 /* Hash a from tree in a tree_base_map. */
6708 unsigned int
6709 tree_map_base_hash (const void *item)
6711 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6714 /* Return true if this tree map structure is marked for garbage collection
6715 purposes. We simply return true if the from tree is marked, so that this
6716 structure goes away when the from tree goes away. */
6719 tree_map_base_marked_p (const void *p)
6721 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6724 /* Hash a from tree in a tree_map. */
6726 unsigned int
6727 tree_map_hash (const void *item)
6729 return (((const struct tree_map *) item)->hash);
6732 /* Hash a from tree in a tree_decl_map. */
6734 unsigned int
6735 tree_decl_map_hash (const void *item)
6737 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6740 /* Return the initialization priority for DECL. */
6742 priority_type
6743 decl_init_priority_lookup (tree decl)
6745 symtab_node *snode = symtab_node::get (decl);
6747 if (!snode)
6748 return DEFAULT_INIT_PRIORITY;
6749 return
6750 snode->get_init_priority ();
6753 /* Return the finalization priority for DECL. */
6755 priority_type
6756 decl_fini_priority_lookup (tree decl)
6758 cgraph_node *node = cgraph_node::get (decl);
6760 if (!node)
6761 return DEFAULT_INIT_PRIORITY;
6762 return
6763 node->get_fini_priority ();
6766 /* Set the initialization priority for DECL to PRIORITY. */
6768 void
6769 decl_init_priority_insert (tree decl, priority_type priority)
6771 struct symtab_node *snode;
6773 if (priority == DEFAULT_INIT_PRIORITY)
6775 snode = symtab_node::get (decl);
6776 if (!snode)
6777 return;
6779 else if (VAR_P (decl))
6780 snode = varpool_node::get_create (decl);
6781 else
6782 snode = cgraph_node::get_create (decl);
6783 snode->set_init_priority (priority);
6786 /* Set the finalization priority for DECL to PRIORITY. */
6788 void
6789 decl_fini_priority_insert (tree decl, priority_type priority)
6791 struct cgraph_node *node;
6793 if (priority == DEFAULT_INIT_PRIORITY)
6795 node = cgraph_node::get (decl);
6796 if (!node)
6797 return;
6799 else
6800 node = cgraph_node::get_create (decl);
6801 node->set_fini_priority (priority);
6804 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6806 static void
6807 print_debug_expr_statistics (void)
6809 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6810 (long) debug_expr_for_decl->size (),
6811 (long) debug_expr_for_decl->elements (),
6812 debug_expr_for_decl->collisions ());
6815 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6817 static void
6818 print_value_expr_statistics (void)
6820 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6821 (long) value_expr_for_decl->size (),
6822 (long) value_expr_for_decl->elements (),
6823 value_expr_for_decl->collisions ());
6826 /* Lookup a debug expression for FROM, and return it if we find one. */
6828 tree
6829 decl_debug_expr_lookup (tree from)
6831 struct tree_decl_map *h, in;
6832 in.base.from = from;
6834 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6835 if (h)
6836 return h->to;
6837 return NULL_TREE;
6840 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6842 void
6843 decl_debug_expr_insert (tree from, tree to)
6845 struct tree_decl_map *h;
6847 h = ggc_alloc<tree_decl_map> ();
6848 h->base.from = from;
6849 h->to = to;
6850 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6853 /* Lookup a value expression for FROM, and return it if we find one. */
6855 tree
6856 decl_value_expr_lookup (tree from)
6858 struct tree_decl_map *h, in;
6859 in.base.from = from;
6861 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6862 if (h)
6863 return h->to;
6864 return NULL_TREE;
6867 /* Insert a mapping FROM->TO in the value expression hashtable. */
6869 void
6870 decl_value_expr_insert (tree from, tree to)
6872 struct tree_decl_map *h;
6874 h = ggc_alloc<tree_decl_map> ();
6875 h->base.from = from;
6876 h->to = to;
6877 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6880 /* Lookup a vector of debug arguments for FROM, and return it if we
6881 find one. */
6883 vec<tree, va_gc> **
6884 decl_debug_args_lookup (tree from)
6886 struct tree_vec_map *h, in;
6888 if (!DECL_HAS_DEBUG_ARGS_P (from))
6889 return NULL;
6890 gcc_checking_assert (debug_args_for_decl != NULL);
6891 in.base.from = from;
6892 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6893 if (h)
6894 return &h->to;
6895 return NULL;
6898 /* Insert a mapping FROM->empty vector of debug arguments in the value
6899 expression hashtable. */
6901 vec<tree, va_gc> **
6902 decl_debug_args_insert (tree from)
6904 struct tree_vec_map *h;
6905 tree_vec_map **loc;
6907 if (DECL_HAS_DEBUG_ARGS_P (from))
6908 return decl_debug_args_lookup (from);
6909 if (debug_args_for_decl == NULL)
6910 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6911 h = ggc_alloc<tree_vec_map> ();
6912 h->base.from = from;
6913 h->to = NULL;
6914 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6915 *loc = h;
6916 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6917 return &h->to;
6920 /* Hashing of types so that we don't make duplicates.
6921 The entry point is `type_hash_canon'. */
6923 /* Generate the default hash code for TYPE. This is designed for
6924 speed, rather than maximum entropy. */
6926 hashval_t
6927 type_hash_canon_hash (tree type)
6929 inchash::hash hstate;
6931 hstate.add_int (TREE_CODE (type));
6933 if (TREE_TYPE (type))
6934 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
6936 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
6937 /* Just the identifier is adequate to distinguish. */
6938 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
6940 switch (TREE_CODE (type))
6942 case METHOD_TYPE:
6943 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
6944 /* FALLTHROUGH. */
6945 case FUNCTION_TYPE:
6946 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6947 if (TREE_VALUE (t) != error_mark_node)
6948 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
6949 break;
6951 case OFFSET_TYPE:
6952 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
6953 break;
6955 case ARRAY_TYPE:
6957 if (TYPE_DOMAIN (type))
6958 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
6959 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
6961 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
6962 hstate.add_object (typeless);
6965 break;
6967 case INTEGER_TYPE:
6969 tree t = TYPE_MAX_VALUE (type);
6970 if (!t)
6971 t = TYPE_MIN_VALUE (type);
6972 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
6973 hstate.add_object (TREE_INT_CST_ELT (t, i));
6974 break;
6977 case REAL_TYPE:
6978 case FIXED_POINT_TYPE:
6980 unsigned prec = TYPE_PRECISION (type);
6981 hstate.add_object (prec);
6982 break;
6985 case VECTOR_TYPE:
6986 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
6987 break;
6989 default:
6990 break;
6993 return hstate.end ();
6996 /* These are the Hashtable callback functions. */
6998 /* Returns true iff the types are equivalent. */
7000 bool
7001 type_cache_hasher::equal (type_hash *a, type_hash *b)
7003 /* First test the things that are the same for all types. */
7004 if (a->hash != b->hash
7005 || TREE_CODE (a->type) != TREE_CODE (b->type)
7006 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
7007 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
7008 TYPE_ATTRIBUTES (b->type))
7009 || (TREE_CODE (a->type) != COMPLEX_TYPE
7010 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
7011 return 0;
7013 /* Be careful about comparing arrays before and after the element type
7014 has been completed; don't compare TYPE_ALIGN unless both types are
7015 complete. */
7016 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
7017 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
7018 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
7019 return 0;
7021 switch (TREE_CODE (a->type))
7023 case VOID_TYPE:
7024 case COMPLEX_TYPE:
7025 case POINTER_TYPE:
7026 case REFERENCE_TYPE:
7027 case NULLPTR_TYPE:
7028 return 1;
7030 case VECTOR_TYPE:
7031 return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
7032 TYPE_VECTOR_SUBPARTS (b->type));
7034 case ENUMERAL_TYPE:
7035 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
7036 && !(TYPE_VALUES (a->type)
7037 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
7038 && TYPE_VALUES (b->type)
7039 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
7040 && type_list_equal (TYPE_VALUES (a->type),
7041 TYPE_VALUES (b->type))))
7042 return 0;
7044 /* fall through */
7046 case INTEGER_TYPE:
7047 case REAL_TYPE:
7048 case BOOLEAN_TYPE:
7049 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
7050 return false;
7051 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
7052 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
7053 TYPE_MAX_VALUE (b->type)))
7054 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
7055 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
7056 TYPE_MIN_VALUE (b->type))));
7058 case FIXED_POINT_TYPE:
7059 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
7061 case OFFSET_TYPE:
7062 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
7064 case METHOD_TYPE:
7065 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
7066 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7067 || (TYPE_ARG_TYPES (a->type)
7068 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7069 && TYPE_ARG_TYPES (b->type)
7070 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7071 && type_list_equal (TYPE_ARG_TYPES (a->type),
7072 TYPE_ARG_TYPES (b->type)))))
7073 break;
7074 return 0;
7075 case ARRAY_TYPE:
7076 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
7077 where the flag should be inherited from the element type
7078 and can change after ARRAY_TYPEs are created; on non-aggregates
7079 compare it and hash it, scalars will never have that flag set
7080 and we need to differentiate between arrays created by different
7081 front-ends or middle-end created arrays. */
7082 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
7083 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
7084 || (TYPE_TYPELESS_STORAGE (a->type)
7085 == TYPE_TYPELESS_STORAGE (b->type))));
7087 case RECORD_TYPE:
7088 case UNION_TYPE:
7089 case QUAL_UNION_TYPE:
7090 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
7091 || (TYPE_FIELDS (a->type)
7092 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
7093 && TYPE_FIELDS (b->type)
7094 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
7095 && type_list_equal (TYPE_FIELDS (a->type),
7096 TYPE_FIELDS (b->type))));
7098 case FUNCTION_TYPE:
7099 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7100 || (TYPE_ARG_TYPES (a->type)
7101 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7102 && TYPE_ARG_TYPES (b->type)
7103 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7104 && type_list_equal (TYPE_ARG_TYPES (a->type),
7105 TYPE_ARG_TYPES (b->type))))
7106 break;
7107 return 0;
7109 default:
7110 return 0;
7113 if (lang_hooks.types.type_hash_eq != NULL)
7114 return lang_hooks.types.type_hash_eq (a->type, b->type);
7116 return 1;
7119 /* Given TYPE, and HASHCODE its hash code, return the canonical
7120 object for an identical type if one already exists.
7121 Otherwise, return TYPE, and record it as the canonical object.
7123 To use this function, first create a type of the sort you want.
7124 Then compute its hash code from the fields of the type that
7125 make it different from other similar types.
7126 Then call this function and use the value. */
7128 tree
7129 type_hash_canon (unsigned int hashcode, tree type)
7131 type_hash in;
7132 type_hash **loc;
7134 /* The hash table only contains main variants, so ensure that's what we're
7135 being passed. */
7136 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
7138 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7139 must call that routine before comparing TYPE_ALIGNs. */
7140 layout_type (type);
7142 in.hash = hashcode;
7143 in.type = type;
7145 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7146 if (*loc)
7148 tree t1 = ((type_hash *) *loc)->type;
7149 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
7150 && t1 != type);
7151 if (TYPE_UID (type) + 1 == next_type_uid)
7152 --next_type_uid;
7153 /* Free also min/max values and the cache for integer
7154 types. This can't be done in free_node, as LTO frees
7155 those on its own. */
7156 if (TREE_CODE (type) == INTEGER_TYPE)
7158 if (TYPE_MIN_VALUE (type)
7159 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
7161 /* Zero is always in TYPE_CACHED_VALUES. */
7162 if (! TYPE_UNSIGNED (type))
7163 int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
7164 ggc_free (TYPE_MIN_VALUE (type));
7166 if (TYPE_MAX_VALUE (type)
7167 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
7169 int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
7170 ggc_free (TYPE_MAX_VALUE (type));
7172 if (TYPE_CACHED_VALUES_P (type))
7173 ggc_free (TYPE_CACHED_VALUES (type));
7175 free_node (type);
7176 return t1;
7178 else
7180 struct type_hash *h;
7182 h = ggc_alloc<type_hash> ();
7183 h->hash = hashcode;
7184 h->type = type;
7185 *loc = h;
7187 return type;
7191 static void
7192 print_type_hash_statistics (void)
7194 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7195 (long) type_hash_table->size (),
7196 (long) type_hash_table->elements (),
7197 type_hash_table->collisions ());
7200 /* Given two lists of types
7201 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7202 return 1 if the lists contain the same types in the same order.
7203 Also, the TREE_PURPOSEs must match. */
7205 bool
7206 type_list_equal (const_tree l1, const_tree l2)
7208 const_tree t1, t2;
7210 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7211 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7212 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7213 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7214 && (TREE_TYPE (TREE_PURPOSE (t1))
7215 == TREE_TYPE (TREE_PURPOSE (t2))))))
7216 return false;
7218 return t1 == t2;
7221 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7222 given by TYPE. If the argument list accepts variable arguments,
7223 then this function counts only the ordinary arguments. */
7226 type_num_arguments (const_tree fntype)
7228 int i = 0;
7230 for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
7231 /* If the function does not take a variable number of arguments,
7232 the last element in the list will have type `void'. */
7233 if (VOID_TYPE_P (TREE_VALUE (t)))
7234 break;
7235 else
7236 ++i;
7238 return i;
7241 /* Return the type of the function TYPE's argument ARGNO if known.
7242 For vararg function's where ARGNO refers to one of the variadic
7243 arguments return null. Otherwise, return a void_type_node for
7244 out-of-bounds ARGNO. */
7246 tree
7247 type_argument_type (const_tree fntype, unsigned argno)
7249 /* Treat zero the same as an out-of-bounds argument number. */
7250 if (!argno)
7251 return void_type_node;
7253 function_args_iterator iter;
7255 tree argtype;
7256 unsigned i = 1;
7257 FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
7259 /* A vararg function's argument list ends in a null. Otherwise,
7260 an ordinary function's argument list ends with void. Return
7261 null if ARGNO refers to a vararg argument, void_type_node if
7262 it's out of bounds, and the formal argument type otherwise. */
7263 if (!argtype)
7264 break;
7266 if (i == argno || VOID_TYPE_P (argtype))
7267 return argtype;
7269 ++i;
7272 return NULL_TREE;
7275 /* Nonzero if integer constants T1 and T2
7276 represent the same constant value. */
7279 tree_int_cst_equal (const_tree t1, const_tree t2)
7281 if (t1 == t2)
7282 return 1;
7284 if (t1 == 0 || t2 == 0)
7285 return 0;
7287 STRIP_ANY_LOCATION_WRAPPER (t1);
7288 STRIP_ANY_LOCATION_WRAPPER (t2);
7290 if (TREE_CODE (t1) == INTEGER_CST
7291 && TREE_CODE (t2) == INTEGER_CST
7292 && wi::to_widest (t1) == wi::to_widest (t2))
7293 return 1;
7295 return 0;
7298 /* Return true if T is an INTEGER_CST whose numerical value (extended
7299 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7301 bool
7302 tree_fits_shwi_p (const_tree t)
7304 return (t != NULL_TREE
7305 && TREE_CODE (t) == INTEGER_CST
7306 && wi::fits_shwi_p (wi::to_widest (t)));
7309 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
7310 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
7312 bool
7313 tree_fits_poly_int64_p (const_tree t)
7315 if (t == NULL_TREE)
7316 return false;
7317 if (POLY_INT_CST_P (t))
7319 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
7320 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
7321 return false;
7322 return true;
7324 return (TREE_CODE (t) == INTEGER_CST
7325 && wi::fits_shwi_p (wi::to_widest (t)));
7328 /* Return true if T is an INTEGER_CST whose numerical value (extended
7329 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7331 bool
7332 tree_fits_uhwi_p (const_tree t)
7334 return (t != NULL_TREE
7335 && TREE_CODE (t) == INTEGER_CST
7336 && wi::fits_uhwi_p (wi::to_widest (t)));
7339 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
7340 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
7342 bool
7343 tree_fits_poly_uint64_p (const_tree t)
7345 if (t == NULL_TREE)
7346 return false;
7347 if (POLY_INT_CST_P (t))
7349 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
7350 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
7351 return false;
7352 return true;
7354 return (TREE_CODE (t) == INTEGER_CST
7355 && wi::fits_uhwi_p (wi::to_widest (t)));
7358 /* T is an INTEGER_CST whose numerical value (extended according to
7359 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7360 HOST_WIDE_INT. */
7362 HOST_WIDE_INT
7363 tree_to_shwi (const_tree t)
7365 gcc_assert (tree_fits_shwi_p (t));
7366 return TREE_INT_CST_LOW (t);
7369 /* T is an INTEGER_CST whose numerical value (extended according to
7370 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7371 HOST_WIDE_INT. */
7373 unsigned HOST_WIDE_INT
7374 tree_to_uhwi (const_tree t)
7376 gcc_assert (tree_fits_uhwi_p (t));
7377 return TREE_INT_CST_LOW (t);
7380 /* Return the most significant (sign) bit of T. */
7383 tree_int_cst_sign_bit (const_tree t)
7385 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7387 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
7390 /* Return an indication of the sign of the integer constant T.
7391 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7392 Note that -1 will never be returned if T's type is unsigned. */
7395 tree_int_cst_sgn (const_tree t)
7397 if (wi::to_wide (t) == 0)
7398 return 0;
7399 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7400 return 1;
7401 else if (wi::neg_p (wi::to_wide (t)))
7402 return -1;
7403 else
7404 return 1;
7407 /* Return the minimum number of bits needed to represent VALUE in a
7408 signed or unsigned type, UNSIGNEDP says which. */
7410 unsigned int
7411 tree_int_cst_min_precision (tree value, signop sgn)
7413 /* If the value is negative, compute its negative minus 1. The latter
7414 adjustment is because the absolute value of the largest negative value
7415 is one larger than the largest positive value. This is equivalent to
7416 a bit-wise negation, so use that operation instead. */
7418 if (tree_int_cst_sgn (value) < 0)
7419 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7421 /* Return the number of bits needed, taking into account the fact
7422 that we need one more bit for a signed than unsigned type.
7423 If value is 0 or -1, the minimum precision is 1 no matter
7424 whether unsignedp is true or false. */
7426 if (integer_zerop (value))
7427 return 1;
7428 else
7429 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7432 /* Return truthvalue of whether T1 is the same tree structure as T2.
7433 Return 1 if they are the same.
7434 Return 0 if they are understandably different.
7435 Return -1 if either contains tree structure not understood by
7436 this function. */
7439 simple_cst_equal (const_tree t1, const_tree t2)
7441 enum tree_code code1, code2;
7442 int cmp;
7443 int i;
7445 if (t1 == t2)
7446 return 1;
7447 if (t1 == 0 || t2 == 0)
7448 return 0;
7450 /* For location wrappers to be the same, they must be at the same
7451 source location (and wrap the same thing). */
7452 if (location_wrapper_p (t1) && location_wrapper_p (t2))
7454 if (EXPR_LOCATION (t1) != EXPR_LOCATION (t2))
7455 return 0;
7456 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7459 code1 = TREE_CODE (t1);
7460 code2 = TREE_CODE (t2);
7462 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7464 if (CONVERT_EXPR_CODE_P (code2)
7465 || code2 == NON_LVALUE_EXPR)
7466 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7467 else
7468 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7471 else if (CONVERT_EXPR_CODE_P (code2)
7472 || code2 == NON_LVALUE_EXPR)
7473 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7475 if (code1 != code2)
7476 return 0;
7478 switch (code1)
7480 case INTEGER_CST:
7481 return wi::to_widest (t1) == wi::to_widest (t2);
7483 case REAL_CST:
7484 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
7486 case FIXED_CST:
7487 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7489 case STRING_CST:
7490 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7491 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7492 TREE_STRING_LENGTH (t1)));
7494 case CONSTRUCTOR:
7496 unsigned HOST_WIDE_INT idx;
7497 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7498 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7500 if (vec_safe_length (v1) != vec_safe_length (v2))
7501 return false;
7503 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7504 /* ??? Should we handle also fields here? */
7505 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7506 return false;
7507 return true;
7510 case SAVE_EXPR:
7511 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7513 case CALL_EXPR:
7514 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7515 if (cmp <= 0)
7516 return cmp;
7517 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7518 return 0;
7520 const_tree arg1, arg2;
7521 const_call_expr_arg_iterator iter1, iter2;
7522 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7523 arg2 = first_const_call_expr_arg (t2, &iter2);
7524 arg1 && arg2;
7525 arg1 = next_const_call_expr_arg (&iter1),
7526 arg2 = next_const_call_expr_arg (&iter2))
7528 cmp = simple_cst_equal (arg1, arg2);
7529 if (cmp <= 0)
7530 return cmp;
7532 return arg1 == arg2;
7535 case TARGET_EXPR:
7536 /* Special case: if either target is an unallocated VAR_DECL,
7537 it means that it's going to be unified with whatever the
7538 TARGET_EXPR is really supposed to initialize, so treat it
7539 as being equivalent to anything. */
7540 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7541 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7542 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7543 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7544 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7545 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7546 cmp = 1;
7547 else
7548 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7550 if (cmp <= 0)
7551 return cmp;
7553 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7555 case WITH_CLEANUP_EXPR:
7556 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7557 if (cmp <= 0)
7558 return cmp;
7560 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7562 case COMPONENT_REF:
7563 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7564 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7566 return 0;
7568 case VAR_DECL:
7569 case PARM_DECL:
7570 case CONST_DECL:
7571 case FUNCTION_DECL:
7572 return 0;
7574 default:
7575 if (POLY_INT_CST_P (t1))
7576 /* A false return means maybe_ne rather than known_ne. */
7577 return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
7578 TYPE_SIGN (TREE_TYPE (t1))),
7579 poly_widest_int::from (poly_int_cst_value (t2),
7580 TYPE_SIGN (TREE_TYPE (t2))));
7581 break;
7584 /* This general rule works for most tree codes. All exceptions should be
7585 handled above. If this is a language-specific tree code, we can't
7586 trust what might be in the operand, so say we don't know
7587 the situation. */
7588 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7589 return -1;
7591 switch (TREE_CODE_CLASS (code1))
7593 case tcc_unary:
7594 case tcc_binary:
7595 case tcc_comparison:
7596 case tcc_expression:
7597 case tcc_reference:
7598 case tcc_statement:
7599 cmp = 1;
7600 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7602 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7603 if (cmp <= 0)
7604 return cmp;
7607 return cmp;
7609 default:
7610 return -1;
7614 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7615 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7616 than U, respectively. */
7619 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7621 if (tree_int_cst_sgn (t) < 0)
7622 return -1;
7623 else if (!tree_fits_uhwi_p (t))
7624 return 1;
7625 else if (TREE_INT_CST_LOW (t) == u)
7626 return 0;
7627 else if (TREE_INT_CST_LOW (t) < u)
7628 return -1;
7629 else
7630 return 1;
7633 /* Return true if SIZE represents a constant size that is in bounds of
7634 what the middle-end and the backend accepts (covering not more than
7635 half of the address-space).
7636 When PERR is non-null, set *PERR on failure to the description of
7637 why SIZE is not valid. */
7639 bool
7640 valid_constant_size_p (const_tree size, cst_size_error *perr /* = NULL */)
7642 if (POLY_INT_CST_P (size))
7644 if (TREE_OVERFLOW (size))
7645 return false;
7646 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
7647 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
7648 return false;
7649 return true;
7652 cst_size_error error;
7653 if (!perr)
7654 perr = &error;
7656 if (TREE_CODE (size) != INTEGER_CST)
7658 *perr = cst_size_not_constant;
7659 return false;
7662 if (TREE_OVERFLOW_P (size))
7664 *perr = cst_size_overflow;
7665 return false;
7668 if (tree_int_cst_sgn (size) < 0)
7670 *perr = cst_size_negative;
7671 return false;
7673 if (!tree_fits_uhwi_p (size)
7674 || (wi::to_widest (TYPE_MAX_VALUE (sizetype))
7675 < wi::to_widest (size) * 2))
7677 *perr = cst_size_too_big;
7678 return false;
7681 return true;
7684 /* Return the precision of the type, or for a complex or vector type the
7685 precision of the type of its elements. */
7687 unsigned int
7688 element_precision (const_tree type)
7690 if (!TYPE_P (type))
7691 type = TREE_TYPE (type);
7692 enum tree_code code = TREE_CODE (type);
7693 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7694 type = TREE_TYPE (type);
7696 return TYPE_PRECISION (type);
7699 /* Return true if CODE represents an associative tree code. Otherwise
7700 return false. */
7701 bool
7702 associative_tree_code (enum tree_code code)
7704 switch (code)
7706 case BIT_IOR_EXPR:
7707 case BIT_AND_EXPR:
7708 case BIT_XOR_EXPR:
7709 case PLUS_EXPR:
7710 case MULT_EXPR:
7711 case MIN_EXPR:
7712 case MAX_EXPR:
7713 return true;
7715 default:
7716 break;
7718 return false;
7721 /* Return true if CODE represents a commutative tree code. Otherwise
7722 return false. */
7723 bool
7724 commutative_tree_code (enum tree_code code)
7726 switch (code)
7728 case PLUS_EXPR:
7729 case MULT_EXPR:
7730 case MULT_HIGHPART_EXPR:
7731 case MIN_EXPR:
7732 case MAX_EXPR:
7733 case BIT_IOR_EXPR:
7734 case BIT_XOR_EXPR:
7735 case BIT_AND_EXPR:
7736 case NE_EXPR:
7737 case EQ_EXPR:
7738 case UNORDERED_EXPR:
7739 case ORDERED_EXPR:
7740 case UNEQ_EXPR:
7741 case LTGT_EXPR:
7742 case TRUTH_AND_EXPR:
7743 case TRUTH_XOR_EXPR:
7744 case TRUTH_OR_EXPR:
7745 case WIDEN_MULT_EXPR:
7746 case VEC_WIDEN_MULT_HI_EXPR:
7747 case VEC_WIDEN_MULT_LO_EXPR:
7748 case VEC_WIDEN_MULT_EVEN_EXPR:
7749 case VEC_WIDEN_MULT_ODD_EXPR:
7750 return true;
7752 default:
7753 break;
7755 return false;
7758 /* Return true if CODE represents a ternary tree code for which the
7759 first two operands are commutative. Otherwise return false. */
7760 bool
7761 commutative_ternary_tree_code (enum tree_code code)
7763 switch (code)
7765 case WIDEN_MULT_PLUS_EXPR:
7766 case WIDEN_MULT_MINUS_EXPR:
7767 case DOT_PROD_EXPR:
7768 return true;
7770 default:
7771 break;
7773 return false;
7776 /* Returns true if CODE can overflow. */
7778 bool
7779 operation_can_overflow (enum tree_code code)
7781 switch (code)
7783 case PLUS_EXPR:
7784 case MINUS_EXPR:
7785 case MULT_EXPR:
7786 case LSHIFT_EXPR:
7787 /* Can overflow in various ways. */
7788 return true;
7789 case TRUNC_DIV_EXPR:
7790 case EXACT_DIV_EXPR:
7791 case FLOOR_DIV_EXPR:
7792 case CEIL_DIV_EXPR:
7793 /* For INT_MIN / -1. */
7794 return true;
7795 case NEGATE_EXPR:
7796 case ABS_EXPR:
7797 /* For -INT_MIN. */
7798 return true;
7799 default:
7800 /* These operators cannot overflow. */
7801 return false;
7805 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7806 ftrapv doesn't generate trapping insns for CODE. */
7808 bool
7809 operation_no_trapping_overflow (tree type, enum tree_code code)
7811 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7813 /* We don't generate instructions that trap on overflow for complex or vector
7814 types. */
7815 if (!INTEGRAL_TYPE_P (type))
7816 return true;
7818 if (!TYPE_OVERFLOW_TRAPS (type))
7819 return true;
7821 switch (code)
7823 case PLUS_EXPR:
7824 case MINUS_EXPR:
7825 case MULT_EXPR:
7826 case NEGATE_EXPR:
7827 case ABS_EXPR:
7828 /* These operators can overflow, and -ftrapv generates trapping code for
7829 these. */
7830 return false;
7831 case TRUNC_DIV_EXPR:
7832 case EXACT_DIV_EXPR:
7833 case FLOOR_DIV_EXPR:
7834 case CEIL_DIV_EXPR:
7835 case LSHIFT_EXPR:
7836 /* These operators can overflow, but -ftrapv does not generate trapping
7837 code for these. */
7838 return true;
7839 default:
7840 /* These operators cannot overflow. */
7841 return true;
7845 /* Constructors for pointer, array and function types.
7846 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7847 constructed by language-dependent code, not here.) */
7849 /* Construct, lay out and return the type of pointers to TO_TYPE with
7850 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7851 reference all of memory. If such a type has already been
7852 constructed, reuse it. */
7854 tree
7855 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7856 bool can_alias_all)
7858 tree t;
7859 bool could_alias = can_alias_all;
7861 if (to_type == error_mark_node)
7862 return error_mark_node;
7864 /* If the pointed-to type has the may_alias attribute set, force
7865 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7866 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7867 can_alias_all = true;
7869 /* In some cases, languages will have things that aren't a POINTER_TYPE
7870 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7871 In that case, return that type without regard to the rest of our
7872 operands.
7874 ??? This is a kludge, but consistent with the way this function has
7875 always operated and there doesn't seem to be a good way to avoid this
7876 at the moment. */
7877 if (TYPE_POINTER_TO (to_type) != 0
7878 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7879 return TYPE_POINTER_TO (to_type);
7881 /* First, if we already have a type for pointers to TO_TYPE and it's
7882 the proper mode, use it. */
7883 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7884 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7885 return t;
7887 t = make_node (POINTER_TYPE);
7889 TREE_TYPE (t) = to_type;
7890 SET_TYPE_MODE (t, mode);
7891 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7892 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7893 TYPE_POINTER_TO (to_type) = t;
7895 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7896 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7897 SET_TYPE_STRUCTURAL_EQUALITY (t);
7898 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7899 TYPE_CANONICAL (t)
7900 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7901 mode, false);
7903 /* Lay out the type. This function has many callers that are concerned
7904 with expression-construction, and this simplifies them all. */
7905 layout_type (t);
7907 return t;
7910 /* By default build pointers in ptr_mode. */
7912 tree
7913 build_pointer_type (tree to_type)
7915 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7916 : TYPE_ADDR_SPACE (to_type);
7917 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7918 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7921 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7923 tree
7924 build_reference_type_for_mode (tree to_type, machine_mode mode,
7925 bool can_alias_all)
7927 tree t;
7928 bool could_alias = can_alias_all;
7930 if (to_type == error_mark_node)
7931 return error_mark_node;
7933 /* If the pointed-to type has the may_alias attribute set, force
7934 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7935 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7936 can_alias_all = true;
7938 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7939 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7940 In that case, return that type without regard to the rest of our
7941 operands.
7943 ??? This is a kludge, but consistent with the way this function has
7944 always operated and there doesn't seem to be a good way to avoid this
7945 at the moment. */
7946 if (TYPE_REFERENCE_TO (to_type) != 0
7947 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7948 return TYPE_REFERENCE_TO (to_type);
7950 /* First, if we already have a type for pointers to TO_TYPE and it's
7951 the proper mode, use it. */
7952 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7953 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7954 return t;
7956 t = make_node (REFERENCE_TYPE);
7958 TREE_TYPE (t) = to_type;
7959 SET_TYPE_MODE (t, mode);
7960 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7961 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7962 TYPE_REFERENCE_TO (to_type) = t;
7964 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7965 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7966 SET_TYPE_STRUCTURAL_EQUALITY (t);
7967 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7968 TYPE_CANONICAL (t)
7969 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7970 mode, false);
7972 layout_type (t);
7974 return t;
7978 /* Build the node for the type of references-to-TO_TYPE by default
7979 in ptr_mode. */
7981 tree
7982 build_reference_type (tree to_type)
7984 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7985 : TYPE_ADDR_SPACE (to_type);
7986 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7987 return build_reference_type_for_mode (to_type, pointer_mode, false);
7990 #define MAX_INT_CACHED_PREC \
7991 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7992 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7994 /* Builds a signed or unsigned integer type of precision PRECISION.
7995 Used for C bitfields whose precision does not match that of
7996 built-in target types. */
7997 tree
7998 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7999 int unsignedp)
8001 tree itype, ret;
8003 if (unsignedp)
8004 unsignedp = MAX_INT_CACHED_PREC + 1;
8006 if (precision <= MAX_INT_CACHED_PREC)
8008 itype = nonstandard_integer_type_cache[precision + unsignedp];
8009 if (itype)
8010 return itype;
8013 itype = make_node (INTEGER_TYPE);
8014 TYPE_PRECISION (itype) = precision;
8016 if (unsignedp)
8017 fixup_unsigned_type (itype);
8018 else
8019 fixup_signed_type (itype);
8021 inchash::hash hstate;
8022 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
8023 ret = type_hash_canon (hstate.end (), itype);
8024 if (precision <= MAX_INT_CACHED_PREC)
8025 nonstandard_integer_type_cache[precision + unsignedp] = ret;
8027 return ret;
8030 #define MAX_BOOL_CACHED_PREC \
8031 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8032 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
8034 /* Builds a boolean type of precision PRECISION.
8035 Used for boolean vectors to choose proper vector element size. */
8036 tree
8037 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
8039 tree type;
8041 if (precision <= MAX_BOOL_CACHED_PREC)
8043 type = nonstandard_boolean_type_cache[precision];
8044 if (type)
8045 return type;
8048 type = make_node (BOOLEAN_TYPE);
8049 TYPE_PRECISION (type) = precision;
8050 fixup_signed_type (type);
8052 if (precision <= MAX_INT_CACHED_PREC)
8053 nonstandard_boolean_type_cache[precision] = type;
8055 return type;
8058 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
8059 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
8060 is true, reuse such a type that has already been constructed. */
8062 static tree
8063 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
8065 tree itype = make_node (INTEGER_TYPE);
8067 TREE_TYPE (itype) = type;
8069 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
8070 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
8072 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
8073 SET_TYPE_MODE (itype, TYPE_MODE (type));
8074 TYPE_SIZE (itype) = TYPE_SIZE (type);
8075 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
8076 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
8077 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
8078 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
8080 if (!shared)
8081 return itype;
8083 if ((TYPE_MIN_VALUE (itype)
8084 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
8085 || (TYPE_MAX_VALUE (itype)
8086 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
8088 /* Since we cannot reliably merge this type, we need to compare it using
8089 structural equality checks. */
8090 SET_TYPE_STRUCTURAL_EQUALITY (itype);
8091 return itype;
8094 hashval_t hash = type_hash_canon_hash (itype);
8095 itype = type_hash_canon (hash, itype);
8097 return itype;
8100 /* Wrapper around build_range_type_1 with SHARED set to true. */
8102 tree
8103 build_range_type (tree type, tree lowval, tree highval)
8105 return build_range_type_1 (type, lowval, highval, true);
8108 /* Wrapper around build_range_type_1 with SHARED set to false. */
8110 tree
8111 build_nonshared_range_type (tree type, tree lowval, tree highval)
8113 return build_range_type_1 (type, lowval, highval, false);
8116 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
8117 MAXVAL should be the maximum value in the domain
8118 (one less than the length of the array).
8120 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
8121 We don't enforce this limit, that is up to caller (e.g. language front end).
8122 The limit exists because the result is a signed type and we don't handle
8123 sizes that use more than one HOST_WIDE_INT. */
8125 tree
8126 build_index_type (tree maxval)
8128 return build_range_type (sizetype, size_zero_node, maxval);
8131 /* Return true if the debug information for TYPE, a subtype, should be emitted
8132 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
8133 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
8134 debug info and doesn't reflect the source code. */
8136 bool
8137 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
8139 tree base_type = TREE_TYPE (type), low, high;
8141 /* Subrange types have a base type which is an integral type. */
8142 if (!INTEGRAL_TYPE_P (base_type))
8143 return false;
8145 /* Get the real bounds of the subtype. */
8146 if (lang_hooks.types.get_subrange_bounds)
8147 lang_hooks.types.get_subrange_bounds (type, &low, &high);
8148 else
8150 low = TYPE_MIN_VALUE (type);
8151 high = TYPE_MAX_VALUE (type);
8154 /* If the type and its base type have the same representation and the same
8155 name, then the type is not a subrange but a copy of the base type. */
8156 if ((TREE_CODE (base_type) == INTEGER_TYPE
8157 || TREE_CODE (base_type) == BOOLEAN_TYPE)
8158 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
8159 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
8160 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
8161 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
8162 return false;
8164 if (lowval)
8165 *lowval = low;
8166 if (highval)
8167 *highval = high;
8168 return true;
8171 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8172 and number of elements specified by the range of values of INDEX_TYPE.
8173 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
8174 If SHARED is true, reuse such a type that has already been constructed.
8175 If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type. */
8177 static tree
8178 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
8179 bool shared, bool set_canonical)
8181 tree t;
8183 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8185 error ("arrays of functions are not meaningful");
8186 elt_type = integer_type_node;
8189 t = make_node (ARRAY_TYPE);
8190 TREE_TYPE (t) = elt_type;
8191 TYPE_DOMAIN (t) = index_type;
8192 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8193 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
8194 layout_type (t);
8196 if (shared)
8198 hashval_t hash = type_hash_canon_hash (t);
8199 t = type_hash_canon (hash, t);
8202 if (TYPE_CANONICAL (t) == t && set_canonical)
8204 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8205 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
8206 || in_lto_p)
8207 SET_TYPE_STRUCTURAL_EQUALITY (t);
8208 else if (TYPE_CANONICAL (elt_type) != elt_type
8209 || (index_type && TYPE_CANONICAL (index_type) != index_type))
8210 TYPE_CANONICAL (t)
8211 = build_array_type_1 (TYPE_CANONICAL (elt_type),
8212 index_type
8213 ? TYPE_CANONICAL (index_type) : NULL_TREE,
8214 typeless_storage, shared, set_canonical);
8217 return t;
8220 /* Wrapper around build_array_type_1 with SHARED set to true. */
8222 tree
8223 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
8225 return
8226 build_array_type_1 (elt_type, index_type, typeless_storage, true, true);
8229 /* Wrapper around build_array_type_1 with SHARED set to false. */
8231 tree
8232 build_nonshared_array_type (tree elt_type, tree index_type)
8234 return build_array_type_1 (elt_type, index_type, false, false, true);
8237 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8238 sizetype. */
8240 tree
8241 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
8243 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8246 /* Recursively examines the array elements of TYPE, until a non-array
8247 element type is found. */
8249 tree
8250 strip_array_types (tree type)
8252 while (TREE_CODE (type) == ARRAY_TYPE)
8253 type = TREE_TYPE (type);
8255 return type;
8258 /* Computes the canonical argument types from the argument type list
8259 ARGTYPES.
8261 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8262 on entry to this function, or if any of the ARGTYPES are
8263 structural.
8265 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8266 true on entry to this function, or if any of the ARGTYPES are
8267 non-canonical.
8269 Returns a canonical argument list, which may be ARGTYPES when the
8270 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8271 true) or would not differ from ARGTYPES. */
8273 static tree
8274 maybe_canonicalize_argtypes (tree argtypes,
8275 bool *any_structural_p,
8276 bool *any_noncanonical_p)
8278 tree arg;
8279 bool any_noncanonical_argtypes_p = false;
8281 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8283 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8284 /* Fail gracefully by stating that the type is structural. */
8285 *any_structural_p = true;
8286 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8287 *any_structural_p = true;
8288 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8289 || TREE_PURPOSE (arg))
8290 /* If the argument has a default argument, we consider it
8291 non-canonical even though the type itself is canonical.
8292 That way, different variants of function and method types
8293 with default arguments will all point to the variant with
8294 no defaults as their canonical type. */
8295 any_noncanonical_argtypes_p = true;
8298 if (*any_structural_p)
8299 return argtypes;
8301 if (any_noncanonical_argtypes_p)
8303 /* Build the canonical list of argument types. */
8304 tree canon_argtypes = NULL_TREE;
8305 bool is_void = false;
8307 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8309 if (arg == void_list_node)
8310 is_void = true;
8311 else
8312 canon_argtypes = tree_cons (NULL_TREE,
8313 TYPE_CANONICAL (TREE_VALUE (arg)),
8314 canon_argtypes);
8317 canon_argtypes = nreverse (canon_argtypes);
8318 if (is_void)
8319 canon_argtypes = chainon (canon_argtypes, void_list_node);
8321 /* There is a non-canonical type. */
8322 *any_noncanonical_p = true;
8323 return canon_argtypes;
8326 /* The canonical argument types are the same as ARGTYPES. */
8327 return argtypes;
8330 /* Construct, lay out and return
8331 the type of functions returning type VALUE_TYPE
8332 given arguments of types ARG_TYPES.
8333 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8334 are data type nodes for the arguments of the function.
8335 If such a type has already been constructed, reuse it. */
8337 tree
8338 build_function_type (tree value_type, tree arg_types)
8340 tree t;
8341 inchash::hash hstate;
8342 bool any_structural_p, any_noncanonical_p;
8343 tree canon_argtypes;
8345 gcc_assert (arg_types != error_mark_node);
8347 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8349 error ("function return type cannot be function");
8350 value_type = integer_type_node;
8353 /* Make a node of the sort we want. */
8354 t = make_node (FUNCTION_TYPE);
8355 TREE_TYPE (t) = value_type;
8356 TYPE_ARG_TYPES (t) = arg_types;
8358 /* If we already have such a type, use the old one. */
8359 hashval_t hash = type_hash_canon_hash (t);
8360 t = type_hash_canon (hash, t);
8362 /* Set up the canonical type. */
8363 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8364 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8365 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8366 &any_structural_p,
8367 &any_noncanonical_p);
8368 if (any_structural_p)
8369 SET_TYPE_STRUCTURAL_EQUALITY (t);
8370 else if (any_noncanonical_p)
8371 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8372 canon_argtypes);
8374 if (!COMPLETE_TYPE_P (t))
8375 layout_type (t);
8376 return t;
8379 /* Build a function type. The RETURN_TYPE is the type returned by the
8380 function. If VAARGS is set, no void_type_node is appended to the
8381 list. ARGP must be always be terminated be a NULL_TREE. */
8383 static tree
8384 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8386 tree t, args, last;
8388 t = va_arg (argp, tree);
8389 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8390 args = tree_cons (NULL_TREE, t, args);
8392 if (vaargs)
8394 last = args;
8395 if (args != NULL_TREE)
8396 args = nreverse (args);
8397 gcc_assert (last != void_list_node);
8399 else if (args == NULL_TREE)
8400 args = void_list_node;
8401 else
8403 last = args;
8404 args = nreverse (args);
8405 TREE_CHAIN (last) = void_list_node;
8407 args = build_function_type (return_type, args);
8409 return args;
8412 /* Build a function type. The RETURN_TYPE is the type returned by the
8413 function. If additional arguments are provided, they are
8414 additional argument types. The list of argument types must always
8415 be terminated by NULL_TREE. */
8417 tree
8418 build_function_type_list (tree return_type, ...)
8420 tree args;
8421 va_list p;
8423 va_start (p, return_type);
8424 args = build_function_type_list_1 (false, return_type, p);
8425 va_end (p);
8426 return args;
8429 /* Build a variable argument function type. The RETURN_TYPE is the
8430 type returned by the function. If additional arguments are provided,
8431 they are additional argument types. The list of argument types must
8432 always be terminated by NULL_TREE. */
8434 tree
8435 build_varargs_function_type_list (tree return_type, ...)
8437 tree args;
8438 va_list p;
8440 va_start (p, return_type);
8441 args = build_function_type_list_1 (true, return_type, p);
8442 va_end (p);
8444 return args;
8447 /* Build a function type. RETURN_TYPE is the type returned by the
8448 function; VAARGS indicates whether the function takes varargs. The
8449 function takes N named arguments, the types of which are provided in
8450 ARG_TYPES. */
8452 static tree
8453 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8454 tree *arg_types)
8456 int i;
8457 tree t = vaargs ? NULL_TREE : void_list_node;
8459 for (i = n - 1; i >= 0; i--)
8460 t = tree_cons (NULL_TREE, arg_types[i], t);
8462 return build_function_type (return_type, t);
8465 /* Build a function type. RETURN_TYPE is the type returned by the
8466 function. The function takes N named arguments, the types of which
8467 are provided in ARG_TYPES. */
8469 tree
8470 build_function_type_array (tree return_type, int n, tree *arg_types)
8472 return build_function_type_array_1 (false, return_type, n, arg_types);
8475 /* Build a variable argument function type. RETURN_TYPE is the type
8476 returned by the function. The function takes N named arguments, the
8477 types of which are provided in ARG_TYPES. */
8479 tree
8480 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8482 return build_function_type_array_1 (true, return_type, n, arg_types);
8485 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8486 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8487 for the method. An implicit additional parameter (of type
8488 pointer-to-BASETYPE) is added to the ARGTYPES. */
8490 tree
8491 build_method_type_directly (tree basetype,
8492 tree rettype,
8493 tree argtypes)
8495 tree t;
8496 tree ptype;
8497 bool any_structural_p, any_noncanonical_p;
8498 tree canon_argtypes;
8500 /* Make a node of the sort we want. */
8501 t = make_node (METHOD_TYPE);
8503 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8504 TREE_TYPE (t) = rettype;
8505 ptype = build_pointer_type (basetype);
8507 /* The actual arglist for this function includes a "hidden" argument
8508 which is "this". Put it into the list of argument types. */
8509 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8510 TYPE_ARG_TYPES (t) = argtypes;
8512 /* If we already have such a type, use the old one. */
8513 hashval_t hash = type_hash_canon_hash (t);
8514 t = type_hash_canon (hash, t);
8516 /* Set up the canonical type. */
8517 any_structural_p
8518 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8519 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8520 any_noncanonical_p
8521 = (TYPE_CANONICAL (basetype) != basetype
8522 || TYPE_CANONICAL (rettype) != rettype);
8523 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8524 &any_structural_p,
8525 &any_noncanonical_p);
8526 if (any_structural_p)
8527 SET_TYPE_STRUCTURAL_EQUALITY (t);
8528 else if (any_noncanonical_p)
8529 TYPE_CANONICAL (t)
8530 = build_method_type_directly (TYPE_CANONICAL (basetype),
8531 TYPE_CANONICAL (rettype),
8532 canon_argtypes);
8533 if (!COMPLETE_TYPE_P (t))
8534 layout_type (t);
8536 return t;
8539 /* Construct, lay out and return the type of methods belonging to class
8540 BASETYPE and whose arguments and values are described by TYPE.
8541 If that type exists already, reuse it.
8542 TYPE must be a FUNCTION_TYPE node. */
8544 tree
8545 build_method_type (tree basetype, tree type)
8547 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8549 return build_method_type_directly (basetype,
8550 TREE_TYPE (type),
8551 TYPE_ARG_TYPES (type));
8554 /* Construct, lay out and return the type of offsets to a value
8555 of type TYPE, within an object of type BASETYPE.
8556 If a suitable offset type exists already, reuse it. */
8558 tree
8559 build_offset_type (tree basetype, tree type)
8561 tree t;
8563 /* Make a node of the sort we want. */
8564 t = make_node (OFFSET_TYPE);
8566 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8567 TREE_TYPE (t) = type;
8569 /* If we already have such a type, use the old one. */
8570 hashval_t hash = type_hash_canon_hash (t);
8571 t = type_hash_canon (hash, t);
8573 if (!COMPLETE_TYPE_P (t))
8574 layout_type (t);
8576 if (TYPE_CANONICAL (t) == t)
8578 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8579 || TYPE_STRUCTURAL_EQUALITY_P (type))
8580 SET_TYPE_STRUCTURAL_EQUALITY (t);
8581 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8582 || TYPE_CANONICAL (type) != type)
8583 TYPE_CANONICAL (t)
8584 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8585 TYPE_CANONICAL (type));
8588 return t;
8591 /* Create a complex type whose components are COMPONENT_TYPE.
8593 If NAMED is true, the type is given a TYPE_NAME. We do not always
8594 do so because this creates a DECL node and thus make the DECL_UIDs
8595 dependent on the type canonicalization hashtable, which is GC-ed,
8596 so the DECL_UIDs would not be stable wrt garbage collection. */
8598 tree
8599 build_complex_type (tree component_type, bool named)
8601 gcc_assert (INTEGRAL_TYPE_P (component_type)
8602 || SCALAR_FLOAT_TYPE_P (component_type)
8603 || FIXED_POINT_TYPE_P (component_type));
8605 /* Make a node of the sort we want. */
8606 tree probe = make_node (COMPLEX_TYPE);
8608 TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
8610 /* If we already have such a type, use the old one. */
8611 hashval_t hash = type_hash_canon_hash (probe);
8612 tree t = type_hash_canon (hash, probe);
8614 if (t == probe)
8616 /* We created a new type. The hash insertion will have laid
8617 out the type. We need to check the canonicalization and
8618 maybe set the name. */
8619 gcc_checking_assert (COMPLETE_TYPE_P (t)
8620 && !TYPE_NAME (t)
8621 && TYPE_CANONICAL (t) == t);
8623 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
8624 SET_TYPE_STRUCTURAL_EQUALITY (t);
8625 else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
8626 TYPE_CANONICAL (t)
8627 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
8629 /* We need to create a name, since complex is a fundamental type. */
8630 if (named)
8632 const char *name = NULL;
8634 if (TREE_TYPE (t) == char_type_node)
8635 name = "complex char";
8636 else if (TREE_TYPE (t) == signed_char_type_node)
8637 name = "complex signed char";
8638 else if (TREE_TYPE (t) == unsigned_char_type_node)
8639 name = "complex unsigned char";
8640 else if (TREE_TYPE (t) == short_integer_type_node)
8641 name = "complex short int";
8642 else if (TREE_TYPE (t) == short_unsigned_type_node)
8643 name = "complex short unsigned int";
8644 else if (TREE_TYPE (t) == integer_type_node)
8645 name = "complex int";
8646 else if (TREE_TYPE (t) == unsigned_type_node)
8647 name = "complex unsigned int";
8648 else if (TREE_TYPE (t) == long_integer_type_node)
8649 name = "complex long int";
8650 else if (TREE_TYPE (t) == long_unsigned_type_node)
8651 name = "complex long unsigned int";
8652 else if (TREE_TYPE (t) == long_long_integer_type_node)
8653 name = "complex long long int";
8654 else if (TREE_TYPE (t) == long_long_unsigned_type_node)
8655 name = "complex long long unsigned int";
8657 if (name != NULL)
8658 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8659 get_identifier (name), t);
8663 return build_qualified_type (t, TYPE_QUALS (component_type));
8666 /* If TYPE is a real or complex floating-point type and the target
8667 does not directly support arithmetic on TYPE then return the wider
8668 type to be used for arithmetic on TYPE. Otherwise, return
8669 NULL_TREE. */
8671 tree
8672 excess_precision_type (tree type)
8674 /* The target can give two different responses to the question of
8675 which excess precision mode it would like depending on whether we
8676 are in -fexcess-precision=standard or -fexcess-precision=fast. */
8678 enum excess_precision_type requested_type
8679 = (flag_excess_precision == EXCESS_PRECISION_FAST
8680 ? EXCESS_PRECISION_TYPE_FAST
8681 : EXCESS_PRECISION_TYPE_STANDARD);
8683 enum flt_eval_method target_flt_eval_method
8684 = targetm.c.excess_precision (requested_type);
8686 /* The target should not ask for unpredictable float evaluation (though
8687 it might advertise that implicitly the evaluation is unpredictable,
8688 but we don't care about that here, it will have been reported
8689 elsewhere). If it does ask for unpredictable evaluation, we have
8690 nothing to do here. */
8691 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
8693 /* Nothing to do. The target has asked for all types we know about
8694 to be computed with their native precision and range. */
8695 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
8696 return NULL_TREE;
8698 /* The target will promote this type in a target-dependent way, so excess
8699 precision ought to leave it alone. */
8700 if (targetm.promoted_type (type) != NULL_TREE)
8701 return NULL_TREE;
8703 machine_mode float16_type_mode = (float16_type_node
8704 ? TYPE_MODE (float16_type_node)
8705 : VOIDmode);
8706 machine_mode float_type_mode = TYPE_MODE (float_type_node);
8707 machine_mode double_type_mode = TYPE_MODE (double_type_node);
8709 switch (TREE_CODE (type))
8711 case REAL_TYPE:
8713 machine_mode type_mode = TYPE_MODE (type);
8714 switch (target_flt_eval_method)
8716 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8717 if (type_mode == float16_type_mode)
8718 return float_type_node;
8719 break;
8720 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8721 if (type_mode == float16_type_mode
8722 || type_mode == float_type_mode)
8723 return double_type_node;
8724 break;
8725 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8726 if (type_mode == float16_type_mode
8727 || type_mode == float_type_mode
8728 || type_mode == double_type_mode)
8729 return long_double_type_node;
8730 break;
8731 default:
8732 gcc_unreachable ();
8734 break;
8736 case COMPLEX_TYPE:
8738 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8739 return NULL_TREE;
8740 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
8741 switch (target_flt_eval_method)
8743 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8744 if (type_mode == float16_type_mode)
8745 return complex_float_type_node;
8746 break;
8747 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8748 if (type_mode == float16_type_mode
8749 || type_mode == float_type_mode)
8750 return complex_double_type_node;
8751 break;
8752 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8753 if (type_mode == float16_type_mode
8754 || type_mode == float_type_mode
8755 || type_mode == double_type_mode)
8756 return complex_long_double_type_node;
8757 break;
8758 default:
8759 gcc_unreachable ();
8761 break;
8763 default:
8764 break;
8767 return NULL_TREE;
8770 /* Return OP, stripped of any conversions to wider types as much as is safe.
8771 Converting the value back to OP's type makes a value equivalent to OP.
8773 If FOR_TYPE is nonzero, we return a value which, if converted to
8774 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8776 OP must have integer, real or enumeral type. Pointers are not allowed!
8778 There are some cases where the obvious value we could return
8779 would regenerate to OP if converted to OP's type,
8780 but would not extend like OP to wider types.
8781 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8782 For example, if OP is (unsigned short)(signed char)-1,
8783 we avoid returning (signed char)-1 if FOR_TYPE is int,
8784 even though extending that to an unsigned short would regenerate OP,
8785 since the result of extending (signed char)-1 to (int)
8786 is different from (int) OP. */
8788 tree
8789 get_unwidened (tree op, tree for_type)
8791 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8792 tree type = TREE_TYPE (op);
8793 unsigned final_prec
8794 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8795 int uns
8796 = (for_type != 0 && for_type != type
8797 && final_prec > TYPE_PRECISION (type)
8798 && TYPE_UNSIGNED (type));
8799 tree win = op;
8801 while (CONVERT_EXPR_P (op))
8803 int bitschange;
8805 /* TYPE_PRECISION on vector types has different meaning
8806 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8807 so avoid them here. */
8808 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8809 break;
8811 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8812 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8814 /* Truncations are many-one so cannot be removed.
8815 Unless we are later going to truncate down even farther. */
8816 if (bitschange < 0
8817 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8818 break;
8820 /* See what's inside this conversion. If we decide to strip it,
8821 we will set WIN. */
8822 op = TREE_OPERAND (op, 0);
8824 /* If we have not stripped any zero-extensions (uns is 0),
8825 we can strip any kind of extension.
8826 If we have previously stripped a zero-extension,
8827 only zero-extensions can safely be stripped.
8828 Any extension can be stripped if the bits it would produce
8829 are all going to be discarded later by truncating to FOR_TYPE. */
8831 if (bitschange > 0)
8833 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8834 win = op;
8835 /* TYPE_UNSIGNED says whether this is a zero-extension.
8836 Let's avoid computing it if it does not affect WIN
8837 and if UNS will not be needed again. */
8838 if ((uns
8839 || CONVERT_EXPR_P (op))
8840 && TYPE_UNSIGNED (TREE_TYPE (op)))
8842 uns = 1;
8843 win = op;
8848 /* If we finally reach a constant see if it fits in sth smaller and
8849 in that case convert it. */
8850 if (TREE_CODE (win) == INTEGER_CST)
8852 tree wtype = TREE_TYPE (win);
8853 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
8854 if (for_type)
8855 prec = MAX (prec, final_prec);
8856 if (prec < TYPE_PRECISION (wtype))
8858 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
8859 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
8860 win = fold_convert (t, win);
8864 return win;
8867 /* Return OP or a simpler expression for a narrower value
8868 which can be sign-extended or zero-extended to give back OP.
8869 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8870 or 0 if the value should be sign-extended. */
8872 tree
8873 get_narrower (tree op, int *unsignedp_ptr)
8875 int uns = 0;
8876 int first = 1;
8877 tree win = op;
8878 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8880 if (TREE_CODE (op) == COMPOUND_EXPR)
8883 op = TREE_OPERAND (op, 1);
8884 while (TREE_CODE (op) == COMPOUND_EXPR);
8885 tree ret = get_narrower (op, unsignedp_ptr);
8886 if (ret == op)
8887 return win;
8888 auto_vec <tree, 16> v;
8889 unsigned int i;
8890 for (op = win; TREE_CODE (op) == COMPOUND_EXPR;
8891 op = TREE_OPERAND (op, 1))
8892 v.safe_push (op);
8893 FOR_EACH_VEC_ELT_REVERSE (v, i, op)
8894 ret = build2_loc (EXPR_LOCATION (op), COMPOUND_EXPR,
8895 TREE_TYPE (ret), TREE_OPERAND (op, 0),
8896 ret);
8897 return ret;
8899 while (TREE_CODE (op) == NOP_EXPR)
8901 int bitschange
8902 = (TYPE_PRECISION (TREE_TYPE (op))
8903 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8905 /* Truncations are many-one so cannot be removed. */
8906 if (bitschange < 0)
8907 break;
8909 /* See what's inside this conversion. If we decide to strip it,
8910 we will set WIN. */
8912 if (bitschange > 0)
8914 op = TREE_OPERAND (op, 0);
8915 /* An extension: the outermost one can be stripped,
8916 but remember whether it is zero or sign extension. */
8917 if (first)
8918 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8919 /* Otherwise, if a sign extension has been stripped,
8920 only sign extensions can now be stripped;
8921 if a zero extension has been stripped, only zero-extensions. */
8922 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8923 break;
8924 first = 0;
8926 else /* bitschange == 0 */
8928 /* A change in nominal type can always be stripped, but we must
8929 preserve the unsignedness. */
8930 if (first)
8931 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8932 first = 0;
8933 op = TREE_OPERAND (op, 0);
8934 /* Keep trying to narrow, but don't assign op to win if it
8935 would turn an integral type into something else. */
8936 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8937 continue;
8940 win = op;
8943 if (TREE_CODE (op) == COMPONENT_REF
8944 /* Since type_for_size always gives an integer type. */
8945 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8946 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8947 /* Ensure field is laid out already. */
8948 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8949 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8951 unsigned HOST_WIDE_INT innerprec
8952 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8953 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8954 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8955 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8957 /* We can get this structure field in a narrower type that fits it,
8958 but the resulting extension to its nominal type (a fullword type)
8959 must satisfy the same conditions as for other extensions.
8961 Do this only for fields that are aligned (not bit-fields),
8962 because when bit-field insns will be used there is no
8963 advantage in doing this. */
8965 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8966 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8967 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8968 && type != 0)
8970 if (first)
8971 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8972 win = fold_convert (type, op);
8976 *unsignedp_ptr = uns;
8977 return win;
8980 /* Return true if integer constant C has a value that is permissible
8981 for TYPE, an integral type. */
8983 bool
8984 int_fits_type_p (const_tree c, const_tree type)
8986 tree type_low_bound, type_high_bound;
8987 bool ok_for_low_bound, ok_for_high_bound;
8988 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8990 /* Non-standard boolean types can have arbitrary precision but various
8991 transformations assume that they can only take values 0 and +/-1. */
8992 if (TREE_CODE (type) == BOOLEAN_TYPE)
8993 return wi::fits_to_boolean_p (wi::to_wide (c), type);
8995 retry:
8996 type_low_bound = TYPE_MIN_VALUE (type);
8997 type_high_bound = TYPE_MAX_VALUE (type);
8999 /* If at least one bound of the type is a constant integer, we can check
9000 ourselves and maybe make a decision. If no such decision is possible, but
9001 this type is a subtype, try checking against that. Otherwise, use
9002 fits_to_tree_p, which checks against the precision.
9004 Compute the status for each possibly constant bound, and return if we see
9005 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
9006 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
9007 for "constant known to fit". */
9009 /* Check if c >= type_low_bound. */
9010 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
9012 if (tree_int_cst_lt (c, type_low_bound))
9013 return false;
9014 ok_for_low_bound = true;
9016 else
9017 ok_for_low_bound = false;
9019 /* Check if c <= type_high_bound. */
9020 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
9022 if (tree_int_cst_lt (type_high_bound, c))
9023 return false;
9024 ok_for_high_bound = true;
9026 else
9027 ok_for_high_bound = false;
9029 /* If the constant fits both bounds, the result is known. */
9030 if (ok_for_low_bound && ok_for_high_bound)
9031 return true;
9033 /* Perform some generic filtering which may allow making a decision
9034 even if the bounds are not constant. First, negative integers
9035 never fit in unsigned types, */
9036 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
9037 return false;
9039 /* Second, narrower types always fit in wider ones. */
9040 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
9041 return true;
9043 /* Third, unsigned integers with top bit set never fit signed types. */
9044 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
9046 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
9047 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
9049 /* When a tree_cst is converted to a wide-int, the precision
9050 is taken from the type. However, if the precision of the
9051 mode underneath the type is smaller than that, it is
9052 possible that the value will not fit. The test below
9053 fails if any bit is set between the sign bit of the
9054 underlying mode and the top bit of the type. */
9055 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
9056 return false;
9058 else if (wi::neg_p (wi::to_wide (c)))
9059 return false;
9062 /* If we haven't been able to decide at this point, there nothing more we
9063 can check ourselves here. Look at the base type if we have one and it
9064 has the same precision. */
9065 if (TREE_CODE (type) == INTEGER_TYPE
9066 && TREE_TYPE (type) != 0
9067 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
9069 type = TREE_TYPE (type);
9070 goto retry;
9073 /* Or to fits_to_tree_p, if nothing else. */
9074 return wi::fits_to_tree_p (wi::to_wide (c), type);
9077 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
9078 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
9079 represented (assuming two's-complement arithmetic) within the bit
9080 precision of the type are returned instead. */
9082 void
9083 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
9085 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
9086 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
9087 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
9088 else
9090 if (TYPE_UNSIGNED (type))
9091 mpz_set_ui (min, 0);
9092 else
9094 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
9095 wi::to_mpz (mn, min, SIGNED);
9099 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
9100 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
9101 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
9102 else
9104 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
9105 wi::to_mpz (mn, max, TYPE_SIGN (type));
9109 /* Return true if VAR is an automatic variable. */
9111 bool
9112 auto_var_p (const_tree var)
9114 return ((((VAR_P (var) && ! DECL_EXTERNAL (var))
9115 || TREE_CODE (var) == PARM_DECL)
9116 && ! TREE_STATIC (var))
9117 || TREE_CODE (var) == RESULT_DECL);
9120 /* Return true if VAR is an automatic variable defined in function FN. */
9122 bool
9123 auto_var_in_fn_p (const_tree var, const_tree fn)
9125 return (DECL_P (var) && DECL_CONTEXT (var) == fn
9126 && (auto_var_p (var)
9127 || TREE_CODE (var) == LABEL_DECL));
9130 /* Subprogram of following function. Called by walk_tree.
9132 Return *TP if it is an automatic variable or parameter of the
9133 function passed in as DATA. */
9135 static tree
9136 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
9138 tree fn = (tree) data;
9140 if (TYPE_P (*tp))
9141 *walk_subtrees = 0;
9143 else if (DECL_P (*tp)
9144 && auto_var_in_fn_p (*tp, fn))
9145 return *tp;
9147 return NULL_TREE;
9150 /* Returns true if T is, contains, or refers to a type with variable
9151 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
9152 arguments, but not the return type. If FN is nonzero, only return
9153 true if a modifier of the type or position of FN is a variable or
9154 parameter inside FN.
9156 This concept is more general than that of C99 'variably modified types':
9157 in C99, a struct type is never variably modified because a VLA may not
9158 appear as a structure member. However, in GNU C code like:
9160 struct S { int i[f()]; };
9162 is valid, and other languages may define similar constructs. */
9164 bool
9165 variably_modified_type_p (tree type, tree fn)
9167 tree t;
9169 /* Test if T is either variable (if FN is zero) or an expression containing
9170 a variable in FN. If TYPE isn't gimplified, return true also if
9171 gimplify_one_sizepos would gimplify the expression into a local
9172 variable. */
9173 #define RETURN_TRUE_IF_VAR(T) \
9174 do { tree _t = (T); \
9175 if (_t != NULL_TREE \
9176 && _t != error_mark_node \
9177 && !CONSTANT_CLASS_P (_t) \
9178 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
9179 && (!fn \
9180 || (!TYPE_SIZES_GIMPLIFIED (type) \
9181 && (TREE_CODE (_t) != VAR_DECL \
9182 && !CONTAINS_PLACEHOLDER_P (_t))) \
9183 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
9184 return true; } while (0)
9186 if (type == error_mark_node)
9187 return false;
9189 /* If TYPE itself has variable size, it is variably modified. */
9190 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
9191 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
9193 switch (TREE_CODE (type))
9195 case POINTER_TYPE:
9196 case REFERENCE_TYPE:
9197 case VECTOR_TYPE:
9198 /* Ada can have pointer types refering to themselves indirectly. */
9199 if (TREE_VISITED (type))
9200 return false;
9201 TREE_VISITED (type) = true;
9202 if (variably_modified_type_p (TREE_TYPE (type), fn))
9204 TREE_VISITED (type) = false;
9205 return true;
9207 TREE_VISITED (type) = false;
9208 break;
9210 case FUNCTION_TYPE:
9211 case METHOD_TYPE:
9212 /* If TYPE is a function type, it is variably modified if the
9213 return type is variably modified. */
9214 if (variably_modified_type_p (TREE_TYPE (type), fn))
9215 return true;
9216 break;
9218 case INTEGER_TYPE:
9219 case REAL_TYPE:
9220 case FIXED_POINT_TYPE:
9221 case ENUMERAL_TYPE:
9222 case BOOLEAN_TYPE:
9223 /* Scalar types are variably modified if their end points
9224 aren't constant. */
9225 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
9226 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
9227 break;
9229 case RECORD_TYPE:
9230 case UNION_TYPE:
9231 case QUAL_UNION_TYPE:
9232 /* We can't see if any of the fields are variably-modified by the
9233 definition we normally use, since that would produce infinite
9234 recursion via pointers. */
9235 /* This is variably modified if some field's type is. */
9236 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
9237 if (TREE_CODE (t) == FIELD_DECL)
9239 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
9240 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
9241 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
9243 /* If the type is a qualified union, then the DECL_QUALIFIER
9244 of fields can also be an expression containing a variable. */
9245 if (TREE_CODE (type) == QUAL_UNION_TYPE)
9246 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
9248 /* If the field is a qualified union, then it's only a container
9249 for what's inside so we look into it. That's necessary in LTO
9250 mode because the sizes of the field tested above have been set
9251 to PLACEHOLDER_EXPRs by free_lang_data. */
9252 if (TREE_CODE (TREE_TYPE (t)) == QUAL_UNION_TYPE
9253 && variably_modified_type_p (TREE_TYPE (t), fn))
9254 return true;
9256 break;
9258 case ARRAY_TYPE:
9259 /* Do not call ourselves to avoid infinite recursion. This is
9260 variably modified if the element type is. */
9261 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
9262 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9263 break;
9265 default:
9266 break;
9269 /* The current language may have other cases to check, but in general,
9270 all other types are not variably modified. */
9271 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9273 #undef RETURN_TRUE_IF_VAR
9276 /* Given a DECL or TYPE, return the scope in which it was declared, or
9277 NULL_TREE if there is no containing scope. */
9279 tree
9280 get_containing_scope (const_tree t)
9282 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9285 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
9287 const_tree
9288 get_ultimate_context (const_tree decl)
9290 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
9292 if (TREE_CODE (decl) == BLOCK)
9293 decl = BLOCK_SUPERCONTEXT (decl);
9294 else
9295 decl = get_containing_scope (decl);
9297 return decl;
9300 /* Return the innermost context enclosing DECL that is
9301 a FUNCTION_DECL, or zero if none. */
9303 tree
9304 decl_function_context (const_tree decl)
9306 tree context;
9308 if (TREE_CODE (decl) == ERROR_MARK)
9309 return 0;
9311 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9312 where we look up the function at runtime. Such functions always take
9313 a first argument of type 'pointer to real context'.
9315 C++ should really be fixed to use DECL_CONTEXT for the real context,
9316 and use something else for the "virtual context". */
9317 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
9318 context
9319 = TYPE_MAIN_VARIANT
9320 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9321 else
9322 context = DECL_CONTEXT (decl);
9324 while (context && TREE_CODE (context) != FUNCTION_DECL)
9326 if (TREE_CODE (context) == BLOCK)
9327 context = BLOCK_SUPERCONTEXT (context);
9328 else
9329 context = get_containing_scope (context);
9332 return context;
9335 /* Return the innermost context enclosing DECL that is
9336 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9337 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9339 tree
9340 decl_type_context (const_tree decl)
9342 tree context = DECL_CONTEXT (decl);
9344 while (context)
9345 switch (TREE_CODE (context))
9347 case NAMESPACE_DECL:
9348 case TRANSLATION_UNIT_DECL:
9349 return NULL_TREE;
9351 case RECORD_TYPE:
9352 case UNION_TYPE:
9353 case QUAL_UNION_TYPE:
9354 return context;
9356 case TYPE_DECL:
9357 case FUNCTION_DECL:
9358 context = DECL_CONTEXT (context);
9359 break;
9361 case BLOCK:
9362 context = BLOCK_SUPERCONTEXT (context);
9363 break;
9365 default:
9366 gcc_unreachable ();
9369 return NULL_TREE;
9372 /* CALL is a CALL_EXPR. Return the declaration for the function
9373 called, or NULL_TREE if the called function cannot be
9374 determined. */
9376 tree
9377 get_callee_fndecl (const_tree call)
9379 tree addr;
9381 if (call == error_mark_node)
9382 return error_mark_node;
9384 /* It's invalid to call this function with anything but a
9385 CALL_EXPR. */
9386 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9388 /* The first operand to the CALL is the address of the function
9389 called. */
9390 addr = CALL_EXPR_FN (call);
9392 /* If there is no function, return early. */
9393 if (addr == NULL_TREE)
9394 return NULL_TREE;
9396 STRIP_NOPS (addr);
9398 /* If this is a readonly function pointer, extract its initial value. */
9399 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9400 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9401 && DECL_INITIAL (addr))
9402 addr = DECL_INITIAL (addr);
9404 /* If the address is just `&f' for some function `f', then we know
9405 that `f' is being called. */
9406 if (TREE_CODE (addr) == ADDR_EXPR
9407 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9408 return TREE_OPERAND (addr, 0);
9410 /* We couldn't figure out what was being called. */
9411 return NULL_TREE;
9414 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
9415 return the associated function code, otherwise return CFN_LAST. */
9417 combined_fn
9418 get_call_combined_fn (const_tree call)
9420 /* It's invalid to call this function with anything but a CALL_EXPR. */
9421 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9423 if (!CALL_EXPR_FN (call))
9424 return as_combined_fn (CALL_EXPR_IFN (call));
9426 tree fndecl = get_callee_fndecl (call);
9427 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
9428 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
9430 return CFN_LAST;
9433 /* Comparator of indices based on tree_node_counts. */
9435 static int
9436 tree_nodes_cmp (const void *p1, const void *p2)
9438 const unsigned *n1 = (const unsigned *)p1;
9439 const unsigned *n2 = (const unsigned *)p2;
9441 return tree_node_counts[*n1] - tree_node_counts[*n2];
9444 /* Comparator of indices based on tree_code_counts. */
9446 static int
9447 tree_codes_cmp (const void *p1, const void *p2)
9449 const unsigned *n1 = (const unsigned *)p1;
9450 const unsigned *n2 = (const unsigned *)p2;
9452 return tree_code_counts[*n1] - tree_code_counts[*n2];
9455 #define TREE_MEM_USAGE_SPACES 40
9457 /* Print debugging information about tree nodes generated during the compile,
9458 and any language-specific information. */
9460 void
9461 dump_tree_statistics (void)
9463 if (GATHER_STATISTICS)
9465 uint64_t total_nodes, total_bytes;
9466 fprintf (stderr, "\nKind Nodes Bytes\n");
9467 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9468 total_nodes = total_bytes = 0;
9471 auto_vec<unsigned> indices (all_kinds);
9472 for (unsigned i = 0; i < all_kinds; i++)
9473 indices.quick_push (i);
9474 indices.qsort (tree_nodes_cmp);
9476 for (unsigned i = 0; i < (int) all_kinds; i++)
9478 unsigned j = indices[i];
9479 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
9480 tree_node_kind_names[j], SIZE_AMOUNT (tree_node_counts[j]),
9481 SIZE_AMOUNT (tree_node_sizes[j]));
9482 total_nodes += tree_node_counts[j];
9483 total_bytes += tree_node_sizes[j];
9485 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9486 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
9487 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
9488 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9492 fprintf (stderr, "Code Nodes\n");
9493 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9495 auto_vec<unsigned> indices (MAX_TREE_CODES);
9496 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
9497 indices.quick_push (i);
9498 indices.qsort (tree_codes_cmp);
9500 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
9502 unsigned j = indices[i];
9503 fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
9504 get_tree_code_name ((enum tree_code) j),
9505 SIZE_AMOUNT (tree_code_counts[j]));
9507 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9508 fprintf (stderr, "\n");
9509 ssanames_print_statistics ();
9510 fprintf (stderr, "\n");
9511 phinodes_print_statistics ();
9512 fprintf (stderr, "\n");
9515 else
9516 fprintf (stderr, "(No per-node statistics)\n");
9518 print_type_hash_statistics ();
9519 print_debug_expr_statistics ();
9520 print_value_expr_statistics ();
9521 lang_hooks.print_statistics ();
9524 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9526 /* Generate a crc32 of the low BYTES bytes of VALUE. */
9528 unsigned
9529 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
9531 /* This relies on the raw feedback's top 4 bits being zero. */
9532 #define FEEDBACK(X) ((X) * 0x04c11db7)
9533 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
9534 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
9535 static const unsigned syndromes[16] =
9537 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
9538 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
9539 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
9540 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
9542 #undef FEEDBACK
9543 #undef SYNDROME
9545 value <<= (32 - bytes * 8);
9546 for (unsigned ix = bytes * 2; ix--; value <<= 4)
9548 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
9550 chksum = (chksum << 4) ^ feedback;
9553 return chksum;
9556 /* Generate a crc32 of a string. */
9558 unsigned
9559 crc32_string (unsigned chksum, const char *string)
9562 chksum = crc32_byte (chksum, *string);
9563 while (*string++);
9564 return chksum;
9567 /* P is a string that will be used in a symbol. Mask out any characters
9568 that are not valid in that context. */
9570 void
9571 clean_symbol_name (char *p)
9573 for (; *p; p++)
9574 if (! (ISALNUM (*p)
9575 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9576 || *p == '$'
9577 #endif
9578 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9579 || *p == '.'
9580 #endif
9582 *p = '_';
9585 static GTY(()) unsigned anon_cnt = 0; /* Saved for PCH. */
9587 /* Create a unique anonymous identifier. The identifier is still a
9588 valid assembly label. */
9590 tree
9591 make_anon_name ()
9593 const char *fmt =
9594 #if !defined (NO_DOT_IN_LABEL)
9596 #elif !defined (NO_DOLLAR_IN_LABEL)
9598 #else
9600 #endif
9601 "_anon_%d";
9603 char buf[24];
9604 int len = snprintf (buf, sizeof (buf), fmt, anon_cnt++);
9605 gcc_checking_assert (len < int (sizeof (buf)));
9607 tree id = get_identifier_with_length (buf, len);
9608 IDENTIFIER_ANON_P (id) = true;
9610 return id;
9613 /* Generate a name for a special-purpose function.
9614 The generated name may need to be unique across the whole link.
9615 Changes to this function may also require corresponding changes to
9616 xstrdup_mask_random.
9617 TYPE is some string to identify the purpose of this function to the
9618 linker or collect2; it must start with an uppercase letter,
9619 one of:
9620 I - for constructors
9621 D - for destructors
9622 N - for C++ anonymous namespaces
9623 F - for DWARF unwind frame information. */
9625 tree
9626 get_file_function_name (const char *type)
9628 char *buf;
9629 const char *p;
9630 char *q;
9632 /* If we already have a name we know to be unique, just use that. */
9633 if (first_global_object_name)
9634 p = q = ASTRDUP (first_global_object_name);
9635 /* If the target is handling the constructors/destructors, they
9636 will be local to this file and the name is only necessary for
9637 debugging purposes.
9638 We also assign sub_I and sub_D sufixes to constructors called from
9639 the global static constructors. These are always local. */
9640 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9641 || (strncmp (type, "sub_", 4) == 0
9642 && (type[4] == 'I' || type[4] == 'D')))
9644 const char *file = main_input_filename;
9645 if (! file)
9646 file = LOCATION_FILE (input_location);
9647 /* Just use the file's basename, because the full pathname
9648 might be quite long. */
9649 p = q = ASTRDUP (lbasename (file));
9651 else
9653 /* Otherwise, the name must be unique across the entire link.
9654 We don't have anything that we know to be unique to this translation
9655 unit, so use what we do have and throw in some randomness. */
9656 unsigned len;
9657 const char *name = weak_global_object_name;
9658 const char *file = main_input_filename;
9660 if (! name)
9661 name = "";
9662 if (! file)
9663 file = LOCATION_FILE (input_location);
9665 len = strlen (file);
9666 q = (char *) alloca (9 + 19 + len + 1);
9667 memcpy (q, file, len + 1);
9669 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9670 crc32_string (0, name), get_random_seed (false));
9672 p = q;
9675 clean_symbol_name (q);
9676 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9677 + strlen (type));
9679 /* Set up the name of the file-level functions we may need.
9680 Use a global object (which is already required to be unique over
9681 the program) rather than the file name (which imposes extra
9682 constraints). */
9683 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9685 return get_identifier (buf);
9688 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9690 /* Complain that the tree code of NODE does not match the expected 0
9691 terminated list of trailing codes. The trailing code list can be
9692 empty, for a more vague error message. FILE, LINE, and FUNCTION
9693 are of the caller. */
9695 void
9696 tree_check_failed (const_tree node, const char *file,
9697 int line, const char *function, ...)
9699 va_list args;
9700 const char *buffer;
9701 unsigned length = 0;
9702 enum tree_code code;
9704 va_start (args, function);
9705 while ((code = (enum tree_code) va_arg (args, int)))
9706 length += 4 + strlen (get_tree_code_name (code));
9707 va_end (args);
9708 if (length)
9710 char *tmp;
9711 va_start (args, function);
9712 length += strlen ("expected ");
9713 buffer = tmp = (char *) alloca (length);
9714 length = 0;
9715 while ((code = (enum tree_code) va_arg (args, int)))
9717 const char *prefix = length ? " or " : "expected ";
9719 strcpy (tmp + length, prefix);
9720 length += strlen (prefix);
9721 strcpy (tmp + length, get_tree_code_name (code));
9722 length += strlen (get_tree_code_name (code));
9724 va_end (args);
9726 else
9727 buffer = "unexpected node";
9729 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9730 buffer, get_tree_code_name (TREE_CODE (node)),
9731 function, trim_filename (file), line);
9734 /* Complain that the tree code of NODE does match the expected 0
9735 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9736 the caller. */
9738 void
9739 tree_not_check_failed (const_tree node, const char *file,
9740 int line, const char *function, ...)
9742 va_list args;
9743 char *buffer;
9744 unsigned length = 0;
9745 enum tree_code code;
9747 va_start (args, function);
9748 while ((code = (enum tree_code) va_arg (args, int)))
9749 length += 4 + strlen (get_tree_code_name (code));
9750 va_end (args);
9751 va_start (args, function);
9752 buffer = (char *) alloca (length);
9753 length = 0;
9754 while ((code = (enum tree_code) va_arg (args, int)))
9756 if (length)
9758 strcpy (buffer + length, " or ");
9759 length += 4;
9761 strcpy (buffer + length, get_tree_code_name (code));
9762 length += strlen (get_tree_code_name (code));
9764 va_end (args);
9766 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9767 buffer, get_tree_code_name (TREE_CODE (node)),
9768 function, trim_filename (file), line);
9771 /* Similar to tree_check_failed, except that we check for a class of tree
9772 code, given in CL. */
9774 void
9775 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9776 const char *file, int line, const char *function)
9778 internal_error
9779 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9780 TREE_CODE_CLASS_STRING (cl),
9781 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9782 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9785 /* Similar to tree_check_failed, except that instead of specifying a
9786 dozen codes, use the knowledge that they're all sequential. */
9788 void
9789 tree_range_check_failed (const_tree node, const char *file, int line,
9790 const char *function, enum tree_code c1,
9791 enum tree_code c2)
9793 char *buffer;
9794 unsigned length = 0;
9795 unsigned int c;
9797 for (c = c1; c <= c2; ++c)
9798 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9800 length += strlen ("expected ");
9801 buffer = (char *) alloca (length);
9802 length = 0;
9804 for (c = c1; c <= c2; ++c)
9806 const char *prefix = length ? " or " : "expected ";
9808 strcpy (buffer + length, prefix);
9809 length += strlen (prefix);
9810 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9811 length += strlen (get_tree_code_name ((enum tree_code) c));
9814 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9815 buffer, get_tree_code_name (TREE_CODE (node)),
9816 function, trim_filename (file), line);
9820 /* Similar to tree_check_failed, except that we check that a tree does
9821 not have the specified code, given in CL. */
9823 void
9824 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9825 const char *file, int line, const char *function)
9827 internal_error
9828 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9829 TREE_CODE_CLASS_STRING (cl),
9830 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9831 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9835 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9837 void
9838 omp_clause_check_failed (const_tree node, const char *file, int line,
9839 const char *function, enum omp_clause_code code)
9841 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
9842 "in %s, at %s:%d",
9843 omp_clause_code_name[code],
9844 get_tree_code_name (TREE_CODE (node)),
9845 function, trim_filename (file), line);
9849 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9851 void
9852 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9853 const char *function, enum omp_clause_code c1,
9854 enum omp_clause_code c2)
9856 char *buffer;
9857 unsigned length = 0;
9858 unsigned int c;
9860 for (c = c1; c <= c2; ++c)
9861 length += 4 + strlen (omp_clause_code_name[c]);
9863 length += strlen ("expected ");
9864 buffer = (char *) alloca (length);
9865 length = 0;
9867 for (c = c1; c <= c2; ++c)
9869 const char *prefix = length ? " or " : "expected ";
9871 strcpy (buffer + length, prefix);
9872 length += strlen (prefix);
9873 strcpy (buffer + length, omp_clause_code_name[c]);
9874 length += strlen (omp_clause_code_name[c]);
9877 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9878 buffer, omp_clause_code_name[TREE_CODE (node)],
9879 function, trim_filename (file), line);
9883 #undef DEFTREESTRUCT
9884 #define DEFTREESTRUCT(VAL, NAME) NAME,
9886 static const char *ts_enum_names[] = {
9887 #include "treestruct.def"
9889 #undef DEFTREESTRUCT
9891 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9893 /* Similar to tree_class_check_failed, except that we check for
9894 whether CODE contains the tree structure identified by EN. */
9896 void
9897 tree_contains_struct_check_failed (const_tree node,
9898 const enum tree_node_structure_enum en,
9899 const char *file, int line,
9900 const char *function)
9902 internal_error
9903 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9904 TS_ENUM_NAME (en),
9905 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9909 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9910 (dynamically sized) vector. */
9912 void
9913 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9914 const char *function)
9916 internal_error
9917 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
9918 "at %s:%d",
9919 idx + 1, len, function, trim_filename (file), line);
9922 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9923 (dynamically sized) vector. */
9925 void
9926 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9927 const char *function)
9929 internal_error
9930 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
9931 idx + 1, len, function, trim_filename (file), line);
9934 /* Similar to above, except that the check is for the bounds of the operand
9935 vector of an expression node EXP. */
9937 void
9938 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9939 int line, const char *function)
9941 enum tree_code code = TREE_CODE (exp);
9942 internal_error
9943 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9944 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9945 function, trim_filename (file), line);
9948 /* Similar to above, except that the check is for the number of
9949 operands of an OMP_CLAUSE node. */
9951 void
9952 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9953 int line, const char *function)
9955 internal_error
9956 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
9957 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9958 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9959 trim_filename (file), line);
9961 #endif /* ENABLE_TREE_CHECKING */
9963 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
9964 and mapped to the machine mode MODE. Initialize its fields and build
9965 the information necessary for debugging output. */
9967 static tree
9968 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
9970 tree t;
9971 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9973 t = make_node (VECTOR_TYPE);
9974 TREE_TYPE (t) = mv_innertype;
9975 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9976 SET_TYPE_MODE (t, mode);
9978 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9979 SET_TYPE_STRUCTURAL_EQUALITY (t);
9980 else if ((TYPE_CANONICAL (mv_innertype) != innertype
9981 || mode != VOIDmode)
9982 && !VECTOR_BOOLEAN_TYPE_P (t))
9983 TYPE_CANONICAL (t)
9984 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
9986 layout_type (t);
9988 hashval_t hash = type_hash_canon_hash (t);
9989 t = type_hash_canon (hash, t);
9991 /* We have built a main variant, based on the main variant of the
9992 inner type. Use it to build the variant we return. */
9993 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9994 && TREE_TYPE (t) != innertype)
9995 return build_type_attribute_qual_variant (t,
9996 TYPE_ATTRIBUTES (innertype),
9997 TYPE_QUALS (innertype));
9999 return t;
10002 static tree
10003 make_or_reuse_type (unsigned size, int unsignedp)
10005 int i;
10007 if (size == INT_TYPE_SIZE)
10008 return unsignedp ? unsigned_type_node : integer_type_node;
10009 if (size == CHAR_TYPE_SIZE)
10010 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
10011 if (size == SHORT_TYPE_SIZE)
10012 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
10013 if (size == LONG_TYPE_SIZE)
10014 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
10015 if (size == LONG_LONG_TYPE_SIZE)
10016 return (unsignedp ? long_long_unsigned_type_node
10017 : long_long_integer_type_node);
10019 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10020 if (size == int_n_data[i].bitsize
10021 && int_n_enabled_p[i])
10022 return (unsignedp ? int_n_trees[i].unsigned_type
10023 : int_n_trees[i].signed_type);
10025 if (unsignedp)
10026 return make_unsigned_type (size);
10027 else
10028 return make_signed_type (size);
10031 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
10033 static tree
10034 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
10036 if (satp)
10038 if (size == SHORT_FRACT_TYPE_SIZE)
10039 return unsignedp ? sat_unsigned_short_fract_type_node
10040 : sat_short_fract_type_node;
10041 if (size == FRACT_TYPE_SIZE)
10042 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
10043 if (size == LONG_FRACT_TYPE_SIZE)
10044 return unsignedp ? sat_unsigned_long_fract_type_node
10045 : sat_long_fract_type_node;
10046 if (size == LONG_LONG_FRACT_TYPE_SIZE)
10047 return unsignedp ? sat_unsigned_long_long_fract_type_node
10048 : sat_long_long_fract_type_node;
10050 else
10052 if (size == SHORT_FRACT_TYPE_SIZE)
10053 return unsignedp ? unsigned_short_fract_type_node
10054 : short_fract_type_node;
10055 if (size == FRACT_TYPE_SIZE)
10056 return unsignedp ? unsigned_fract_type_node : fract_type_node;
10057 if (size == LONG_FRACT_TYPE_SIZE)
10058 return unsignedp ? unsigned_long_fract_type_node
10059 : long_fract_type_node;
10060 if (size == LONG_LONG_FRACT_TYPE_SIZE)
10061 return unsignedp ? unsigned_long_long_fract_type_node
10062 : long_long_fract_type_node;
10065 return make_fract_type (size, unsignedp, satp);
10068 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
10070 static tree
10071 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
10073 if (satp)
10075 if (size == SHORT_ACCUM_TYPE_SIZE)
10076 return unsignedp ? sat_unsigned_short_accum_type_node
10077 : sat_short_accum_type_node;
10078 if (size == ACCUM_TYPE_SIZE)
10079 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
10080 if (size == LONG_ACCUM_TYPE_SIZE)
10081 return unsignedp ? sat_unsigned_long_accum_type_node
10082 : sat_long_accum_type_node;
10083 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10084 return unsignedp ? sat_unsigned_long_long_accum_type_node
10085 : sat_long_long_accum_type_node;
10087 else
10089 if (size == SHORT_ACCUM_TYPE_SIZE)
10090 return unsignedp ? unsigned_short_accum_type_node
10091 : short_accum_type_node;
10092 if (size == ACCUM_TYPE_SIZE)
10093 return unsignedp ? unsigned_accum_type_node : accum_type_node;
10094 if (size == LONG_ACCUM_TYPE_SIZE)
10095 return unsignedp ? unsigned_long_accum_type_node
10096 : long_accum_type_node;
10097 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10098 return unsignedp ? unsigned_long_long_accum_type_node
10099 : long_long_accum_type_node;
10102 return make_accum_type (size, unsignedp, satp);
10106 /* Create an atomic variant node for TYPE. This routine is called
10107 during initialization of data types to create the 5 basic atomic
10108 types. The generic build_variant_type function requires these to
10109 already be set up in order to function properly, so cannot be
10110 called from there. If ALIGN is non-zero, then ensure alignment is
10111 overridden to this value. */
10113 static tree
10114 build_atomic_base (tree type, unsigned int align)
10116 tree t;
10118 /* Make sure its not already registered. */
10119 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
10120 return t;
10122 t = build_variant_type_copy (type);
10123 set_type_quals (t, TYPE_QUAL_ATOMIC);
10125 if (align)
10126 SET_TYPE_ALIGN (t, align);
10128 return t;
10131 /* Information about the _FloatN and _FloatNx types. This must be in
10132 the same order as the corresponding TI_* enum values. */
10133 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
10135 { 16, false },
10136 { 32, false },
10137 { 64, false },
10138 { 128, false },
10139 { 32, true },
10140 { 64, true },
10141 { 128, true },
10145 /* Create nodes for all integer types (and error_mark_node) using the sizes
10146 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
10148 void
10149 build_common_tree_nodes (bool signed_char)
10151 int i;
10153 error_mark_node = make_node (ERROR_MARK);
10154 TREE_TYPE (error_mark_node) = error_mark_node;
10156 initialize_sizetypes ();
10158 /* Define both `signed char' and `unsigned char'. */
10159 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
10160 TYPE_STRING_FLAG (signed_char_type_node) = 1;
10161 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
10162 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
10164 /* Define `char', which is like either `signed char' or `unsigned char'
10165 but not the same as either. */
10166 char_type_node
10167 = (signed_char
10168 ? make_signed_type (CHAR_TYPE_SIZE)
10169 : make_unsigned_type (CHAR_TYPE_SIZE));
10170 TYPE_STRING_FLAG (char_type_node) = 1;
10172 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
10173 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
10174 integer_type_node = make_signed_type (INT_TYPE_SIZE);
10175 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
10176 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
10177 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
10178 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
10179 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
10181 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10183 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
10184 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
10186 if (int_n_enabled_p[i])
10188 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
10189 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
10193 /* Define a boolean type. This type only represents boolean values but
10194 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
10195 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
10196 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
10197 TYPE_PRECISION (boolean_type_node) = 1;
10198 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
10200 /* Define what type to use for size_t. */
10201 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
10202 size_type_node = unsigned_type_node;
10203 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
10204 size_type_node = long_unsigned_type_node;
10205 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
10206 size_type_node = long_long_unsigned_type_node;
10207 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
10208 size_type_node = short_unsigned_type_node;
10209 else
10211 int i;
10213 size_type_node = NULL_TREE;
10214 for (i = 0; i < NUM_INT_N_ENTS; i++)
10215 if (int_n_enabled_p[i])
10217 char name[50], altname[50];
10218 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
10219 sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
10221 if (strcmp (name, SIZE_TYPE) == 0
10222 || strcmp (altname, SIZE_TYPE) == 0)
10224 size_type_node = int_n_trees[i].unsigned_type;
10227 if (size_type_node == NULL_TREE)
10228 gcc_unreachable ();
10231 /* Define what type to use for ptrdiff_t. */
10232 if (strcmp (PTRDIFF_TYPE, "int") == 0)
10233 ptrdiff_type_node = integer_type_node;
10234 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
10235 ptrdiff_type_node = long_integer_type_node;
10236 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
10237 ptrdiff_type_node = long_long_integer_type_node;
10238 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
10239 ptrdiff_type_node = short_integer_type_node;
10240 else
10242 ptrdiff_type_node = NULL_TREE;
10243 for (int i = 0; i < NUM_INT_N_ENTS; i++)
10244 if (int_n_enabled_p[i])
10246 char name[50], altname[50];
10247 sprintf (name, "__int%d", int_n_data[i].bitsize);
10248 sprintf (altname, "__int%d__", int_n_data[i].bitsize);
10250 if (strcmp (name, PTRDIFF_TYPE) == 0
10251 || strcmp (altname, PTRDIFF_TYPE) == 0)
10252 ptrdiff_type_node = int_n_trees[i].signed_type;
10254 if (ptrdiff_type_node == NULL_TREE)
10255 gcc_unreachable ();
10258 /* Fill in the rest of the sized types. Reuse existing type nodes
10259 when possible. */
10260 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
10261 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
10262 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
10263 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
10264 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
10266 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
10267 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
10268 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
10269 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
10270 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
10272 /* Don't call build_qualified type for atomics. That routine does
10273 special processing for atomics, and until they are initialized
10274 it's better not to make that call.
10276 Check to see if there is a target override for atomic types. */
10278 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
10279 targetm.atomic_align_for_mode (QImode));
10280 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
10281 targetm.atomic_align_for_mode (HImode));
10282 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
10283 targetm.atomic_align_for_mode (SImode));
10284 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
10285 targetm.atomic_align_for_mode (DImode));
10286 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
10287 targetm.atomic_align_for_mode (TImode));
10289 access_public_node = get_identifier ("public");
10290 access_protected_node = get_identifier ("protected");
10291 access_private_node = get_identifier ("private");
10293 /* Define these next since types below may used them. */
10294 integer_zero_node = build_int_cst (integer_type_node, 0);
10295 integer_one_node = build_int_cst (integer_type_node, 1);
10296 integer_three_node = build_int_cst (integer_type_node, 3);
10297 integer_minus_one_node = build_int_cst (integer_type_node, -1);
10299 size_zero_node = size_int (0);
10300 size_one_node = size_int (1);
10301 bitsize_zero_node = bitsize_int (0);
10302 bitsize_one_node = bitsize_int (1);
10303 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
10305 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
10306 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
10308 void_type_node = make_node (VOID_TYPE);
10309 layout_type (void_type_node);
10311 /* We are not going to have real types in C with less than byte alignment,
10312 so we might as well not have any types that claim to have it. */
10313 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
10314 TYPE_USER_ALIGN (void_type_node) = 0;
10316 void_node = make_node (VOID_CST);
10317 TREE_TYPE (void_node) = void_type_node;
10319 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
10320 layout_type (TREE_TYPE (null_pointer_node));
10322 ptr_type_node = build_pointer_type (void_type_node);
10323 const_ptr_type_node
10324 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
10325 for (unsigned i = 0;
10326 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
10327 ++i)
10328 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
10330 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
10332 float_type_node = make_node (REAL_TYPE);
10333 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
10334 layout_type (float_type_node);
10336 double_type_node = make_node (REAL_TYPE);
10337 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
10338 layout_type (double_type_node);
10340 long_double_type_node = make_node (REAL_TYPE);
10341 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
10342 layout_type (long_double_type_node);
10344 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
10346 int n = floatn_nx_types[i].n;
10347 bool extended = floatn_nx_types[i].extended;
10348 scalar_float_mode mode;
10349 if (!targetm.floatn_mode (n, extended).exists (&mode))
10350 continue;
10351 int precision = GET_MODE_PRECISION (mode);
10352 /* Work around the rs6000 KFmode having precision 113 not
10353 128. */
10354 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
10355 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
10356 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
10357 if (!extended)
10358 gcc_assert (min_precision == n);
10359 if (precision < min_precision)
10360 precision = min_precision;
10361 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
10362 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
10363 layout_type (FLOATN_NX_TYPE_NODE (i));
10364 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
10367 float_ptr_type_node = build_pointer_type (float_type_node);
10368 double_ptr_type_node = build_pointer_type (double_type_node);
10369 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
10370 integer_ptr_type_node = build_pointer_type (integer_type_node);
10372 /* Fixed size integer types. */
10373 uint16_type_node = make_or_reuse_type (16, 1);
10374 uint32_type_node = make_or_reuse_type (32, 1);
10375 uint64_type_node = make_or_reuse_type (64, 1);
10376 if (targetm.scalar_mode_supported_p (TImode))
10377 uint128_type_node = make_or_reuse_type (128, 1);
10379 /* Decimal float types. */
10380 if (targetm.decimal_float_supported_p ())
10382 dfloat32_type_node = make_node (REAL_TYPE);
10383 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
10384 SET_TYPE_MODE (dfloat32_type_node, SDmode);
10385 layout_type (dfloat32_type_node);
10387 dfloat64_type_node = make_node (REAL_TYPE);
10388 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
10389 SET_TYPE_MODE (dfloat64_type_node, DDmode);
10390 layout_type (dfloat64_type_node);
10392 dfloat128_type_node = make_node (REAL_TYPE);
10393 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
10394 SET_TYPE_MODE (dfloat128_type_node, TDmode);
10395 layout_type (dfloat128_type_node);
10398 complex_integer_type_node = build_complex_type (integer_type_node, true);
10399 complex_float_type_node = build_complex_type (float_type_node, true);
10400 complex_double_type_node = build_complex_type (double_type_node, true);
10401 complex_long_double_type_node = build_complex_type (long_double_type_node,
10402 true);
10404 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
10406 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
10407 COMPLEX_FLOATN_NX_TYPE_NODE (i)
10408 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
10411 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
10412 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10413 sat_ ## KIND ## _type_node = \
10414 make_sat_signed_ ## KIND ## _type (SIZE); \
10415 sat_unsigned_ ## KIND ## _type_node = \
10416 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10417 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10418 unsigned_ ## KIND ## _type_node = \
10419 make_unsigned_ ## KIND ## _type (SIZE);
10421 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10422 sat_ ## WIDTH ## KIND ## _type_node = \
10423 make_sat_signed_ ## KIND ## _type (SIZE); \
10424 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10425 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10426 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10427 unsigned_ ## WIDTH ## KIND ## _type_node = \
10428 make_unsigned_ ## KIND ## _type (SIZE);
10430 /* Make fixed-point type nodes based on four different widths. */
10431 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10432 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10433 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10434 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10435 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10437 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
10438 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10439 NAME ## _type_node = \
10440 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10441 u ## NAME ## _type_node = \
10442 make_or_reuse_unsigned_ ## KIND ## _type \
10443 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10444 sat_ ## NAME ## _type_node = \
10445 make_or_reuse_sat_signed_ ## KIND ## _type \
10446 (GET_MODE_BITSIZE (MODE ## mode)); \
10447 sat_u ## NAME ## _type_node = \
10448 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10449 (GET_MODE_BITSIZE (U ## MODE ## mode));
10451 /* Fixed-point type and mode nodes. */
10452 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10453 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10454 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10455 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10456 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10457 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10458 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10459 MAKE_FIXED_MODE_NODE (accum, ha, HA)
10460 MAKE_FIXED_MODE_NODE (accum, sa, SA)
10461 MAKE_FIXED_MODE_NODE (accum, da, DA)
10462 MAKE_FIXED_MODE_NODE (accum, ta, TA)
10465 tree t = targetm.build_builtin_va_list ();
10467 /* Many back-ends define record types without setting TYPE_NAME.
10468 If we copied the record type here, we'd keep the original
10469 record type without a name. This breaks name mangling. So,
10470 don't copy record types and let c_common_nodes_and_builtins()
10471 declare the type to be __builtin_va_list. */
10472 if (TREE_CODE (t) != RECORD_TYPE)
10473 t = build_variant_type_copy (t);
10475 va_list_type_node = t;
10478 /* SCEV analyzer global shared trees. */
10479 chrec_dont_know = make_node (SCEV_NOT_KNOWN);
10480 TREE_TYPE (chrec_dont_know) = void_type_node;
10481 chrec_known = make_node (SCEV_KNOWN);
10482 TREE_TYPE (chrec_known) = void_type_node;
10485 /* Modify DECL for given flags.
10486 TM_PURE attribute is set only on types, so the function will modify
10487 DECL's type when ECF_TM_PURE is used. */
10489 void
10490 set_call_expr_flags (tree decl, int flags)
10492 if (flags & ECF_NOTHROW)
10493 TREE_NOTHROW (decl) = 1;
10494 if (flags & ECF_CONST)
10495 TREE_READONLY (decl) = 1;
10496 if (flags & ECF_PURE)
10497 DECL_PURE_P (decl) = 1;
10498 if (flags & ECF_LOOPING_CONST_OR_PURE)
10499 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10500 if (flags & ECF_NOVOPS)
10501 DECL_IS_NOVOPS (decl) = 1;
10502 if (flags & ECF_NORETURN)
10503 TREE_THIS_VOLATILE (decl) = 1;
10504 if (flags & ECF_MALLOC)
10505 DECL_IS_MALLOC (decl) = 1;
10506 if (flags & ECF_RETURNS_TWICE)
10507 DECL_IS_RETURNS_TWICE (decl) = 1;
10508 if (flags & ECF_LEAF)
10509 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10510 NULL, DECL_ATTRIBUTES (decl));
10511 if (flags & ECF_COLD)
10512 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
10513 NULL, DECL_ATTRIBUTES (decl));
10514 if (flags & ECF_RET1)
10515 DECL_ATTRIBUTES (decl)
10516 = tree_cons (get_identifier ("fn spec"),
10517 build_tree_list (NULL_TREE, build_string (1, "1")),
10518 DECL_ATTRIBUTES (decl));
10519 if ((flags & ECF_TM_PURE) && flag_tm)
10520 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10521 /* Looping const or pure is implied by noreturn.
10522 There is currently no way to declare looping const or looping pure alone. */
10523 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10524 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10528 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10530 static void
10531 local_define_builtin (const char *name, tree type, enum built_in_function code,
10532 const char *library_name, int ecf_flags)
10534 tree decl;
10536 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10537 library_name, NULL_TREE);
10538 set_call_expr_flags (decl, ecf_flags);
10540 set_builtin_decl (code, decl, true);
10543 /* Call this function after instantiating all builtins that the language
10544 front end cares about. This will build the rest of the builtins
10545 and internal functions that are relied upon by the tree optimizers and
10546 the middle-end. */
10548 void
10549 build_common_builtin_nodes (void)
10551 tree tmp, ftype;
10552 int ecf_flags;
10554 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
10555 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
10557 ftype = build_function_type (void_type_node, void_list_node);
10558 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10559 local_define_builtin ("__builtin_unreachable", ftype,
10560 BUILT_IN_UNREACHABLE,
10561 "__builtin_unreachable",
10562 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10563 | ECF_CONST | ECF_COLD);
10564 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
10565 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
10566 "abort",
10567 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
10570 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10571 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10573 ftype = build_function_type_list (ptr_type_node,
10574 ptr_type_node, const_ptr_type_node,
10575 size_type_node, NULL_TREE);
10577 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10578 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10579 "memcpy", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10580 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10581 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10582 "memmove", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10585 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10587 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10588 const_ptr_type_node, size_type_node,
10589 NULL_TREE);
10590 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10591 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10594 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10596 ftype = build_function_type_list (ptr_type_node,
10597 ptr_type_node, integer_type_node,
10598 size_type_node, NULL_TREE);
10599 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10600 "memset", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10603 /* If we're checking the stack, `alloca' can throw. */
10604 const int alloca_flags
10605 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
10607 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10609 ftype = build_function_type_list (ptr_type_node,
10610 size_type_node, NULL_TREE);
10611 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10612 "alloca", alloca_flags);
10615 ftype = build_function_type_list (ptr_type_node, size_type_node,
10616 size_type_node, NULL_TREE);
10617 local_define_builtin ("__builtin_alloca_with_align", ftype,
10618 BUILT_IN_ALLOCA_WITH_ALIGN,
10619 "__builtin_alloca_with_align",
10620 alloca_flags);
10622 ftype = build_function_type_list (ptr_type_node, size_type_node,
10623 size_type_node, size_type_node, NULL_TREE);
10624 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
10625 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
10626 "__builtin_alloca_with_align_and_max",
10627 alloca_flags);
10629 ftype = build_function_type_list (void_type_node,
10630 ptr_type_node, ptr_type_node,
10631 ptr_type_node, NULL_TREE);
10632 local_define_builtin ("__builtin_init_trampoline", ftype,
10633 BUILT_IN_INIT_TRAMPOLINE,
10634 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10635 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10636 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10637 "__builtin_init_heap_trampoline",
10638 ECF_NOTHROW | ECF_LEAF);
10639 local_define_builtin ("__builtin_init_descriptor", ftype,
10640 BUILT_IN_INIT_DESCRIPTOR,
10641 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
10643 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10644 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10645 BUILT_IN_ADJUST_TRAMPOLINE,
10646 "__builtin_adjust_trampoline",
10647 ECF_CONST | ECF_NOTHROW);
10648 local_define_builtin ("__builtin_adjust_descriptor", ftype,
10649 BUILT_IN_ADJUST_DESCRIPTOR,
10650 "__builtin_adjust_descriptor",
10651 ECF_CONST | ECF_NOTHROW);
10653 ftype = build_function_type_list (void_type_node,
10654 ptr_type_node, ptr_type_node, NULL_TREE);
10655 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10656 BUILT_IN_NONLOCAL_GOTO,
10657 "__builtin_nonlocal_goto",
10658 ECF_NORETURN | ECF_NOTHROW);
10660 ftype = build_function_type_list (void_type_node,
10661 ptr_type_node, ptr_type_node, NULL_TREE);
10662 local_define_builtin ("__builtin_setjmp_setup", ftype,
10663 BUILT_IN_SETJMP_SETUP,
10664 "__builtin_setjmp_setup", ECF_NOTHROW);
10666 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10667 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10668 BUILT_IN_SETJMP_RECEIVER,
10669 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10671 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10672 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10673 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10675 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10676 local_define_builtin ("__builtin_stack_restore", ftype,
10677 BUILT_IN_STACK_RESTORE,
10678 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10680 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10681 const_ptr_type_node, size_type_node,
10682 NULL_TREE);
10683 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
10684 "__builtin_memcmp_eq",
10685 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10687 local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
10688 "__builtin_strncmp_eq",
10689 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10691 local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
10692 "__builtin_strcmp_eq",
10693 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10695 /* If there's a possibility that we might use the ARM EABI, build the
10696 alternate __cxa_end_cleanup node used to resume from C++. */
10697 if (targetm.arm_eabi_unwinder)
10699 ftype = build_function_type_list (void_type_node, NULL_TREE);
10700 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10701 BUILT_IN_CXA_END_CLEANUP,
10702 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10705 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10706 local_define_builtin ("__builtin_unwind_resume", ftype,
10707 BUILT_IN_UNWIND_RESUME,
10708 ((targetm_common.except_unwind_info (&global_options)
10709 == UI_SJLJ)
10710 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10711 ECF_NORETURN);
10713 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10715 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10716 NULL_TREE);
10717 local_define_builtin ("__builtin_return_address", ftype,
10718 BUILT_IN_RETURN_ADDRESS,
10719 "__builtin_return_address",
10720 ECF_NOTHROW);
10723 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10724 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10726 ftype = build_function_type_list (void_type_node, ptr_type_node,
10727 ptr_type_node, NULL_TREE);
10728 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10729 local_define_builtin ("__cyg_profile_func_enter", ftype,
10730 BUILT_IN_PROFILE_FUNC_ENTER,
10731 "__cyg_profile_func_enter", 0);
10732 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10733 local_define_builtin ("__cyg_profile_func_exit", ftype,
10734 BUILT_IN_PROFILE_FUNC_EXIT,
10735 "__cyg_profile_func_exit", 0);
10738 /* The exception object and filter values from the runtime. The argument
10739 must be zero before exception lowering, i.e. from the front end. After
10740 exception lowering, it will be the region number for the exception
10741 landing pad. These functions are PURE instead of CONST to prevent
10742 them from being hoisted past the exception edge that will initialize
10743 its value in the landing pad. */
10744 ftype = build_function_type_list (ptr_type_node,
10745 integer_type_node, NULL_TREE);
10746 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10747 /* Only use TM_PURE if we have TM language support. */
10748 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10749 ecf_flags |= ECF_TM_PURE;
10750 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10751 "__builtin_eh_pointer", ecf_flags);
10753 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10754 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10755 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10756 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10758 ftype = build_function_type_list (void_type_node,
10759 integer_type_node, integer_type_node,
10760 NULL_TREE);
10761 local_define_builtin ("__builtin_eh_copy_values", ftype,
10762 BUILT_IN_EH_COPY_VALUES,
10763 "__builtin_eh_copy_values", ECF_NOTHROW);
10765 /* Complex multiplication and division. These are handled as builtins
10766 rather than optabs because emit_library_call_value doesn't support
10767 complex. Further, we can do slightly better with folding these
10768 beasties if the real and complex parts of the arguments are separate. */
10770 int mode;
10772 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10774 char mode_name_buf[4], *q;
10775 const char *p;
10776 enum built_in_function mcode, dcode;
10777 tree type, inner_type;
10778 const char *prefix = "__";
10780 if (targetm.libfunc_gnu_prefix)
10781 prefix = "__gnu_";
10783 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10784 if (type == NULL)
10785 continue;
10786 inner_type = TREE_TYPE (type);
10788 ftype = build_function_type_list (type, inner_type, inner_type,
10789 inner_type, inner_type, NULL_TREE);
10791 mcode = ((enum built_in_function)
10792 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10793 dcode = ((enum built_in_function)
10794 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10796 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10797 *q = TOLOWER (*p);
10798 *q = '\0';
10800 /* For -ftrapping-math these should throw from a former
10801 -fnon-call-exception stmt. */
10802 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10803 NULL);
10804 local_define_builtin (built_in_names[mcode], ftype, mcode,
10805 built_in_names[mcode],
10806 ECF_CONST | ECF_LEAF);
10808 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10809 NULL);
10810 local_define_builtin (built_in_names[dcode], ftype, dcode,
10811 built_in_names[dcode],
10812 ECF_CONST | ECF_LEAF);
10816 init_internal_fns ();
10819 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10820 better way.
10822 If we requested a pointer to a vector, build up the pointers that
10823 we stripped off while looking for the inner type. Similarly for
10824 return values from functions.
10826 The argument TYPE is the top of the chain, and BOTTOM is the
10827 new type which we will point to. */
10829 tree
10830 reconstruct_complex_type (tree type, tree bottom)
10832 tree inner, outer;
10834 if (TREE_CODE (type) == POINTER_TYPE)
10836 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10837 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10838 TYPE_REF_CAN_ALIAS_ALL (type));
10840 else if (TREE_CODE (type) == REFERENCE_TYPE)
10842 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10843 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10844 TYPE_REF_CAN_ALIAS_ALL (type));
10846 else if (TREE_CODE (type) == ARRAY_TYPE)
10848 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10849 outer = build_array_type (inner, TYPE_DOMAIN (type));
10851 else if (TREE_CODE (type) == FUNCTION_TYPE)
10853 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10854 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10856 else if (TREE_CODE (type) == METHOD_TYPE)
10858 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10859 /* The build_method_type_directly() routine prepends 'this' to argument list,
10860 so we must compensate by getting rid of it. */
10861 outer
10862 = build_method_type_directly
10863 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10864 inner,
10865 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10867 else if (TREE_CODE (type) == OFFSET_TYPE)
10869 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10870 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10872 else
10873 return bottom;
10875 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10876 TYPE_QUALS (type));
10879 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10880 the inner type. */
10881 tree
10882 build_vector_type_for_mode (tree innertype, machine_mode mode)
10884 poly_int64 nunits;
10885 unsigned int bitsize;
10887 switch (GET_MODE_CLASS (mode))
10889 case MODE_VECTOR_BOOL:
10890 case MODE_VECTOR_INT:
10891 case MODE_VECTOR_FLOAT:
10892 case MODE_VECTOR_FRACT:
10893 case MODE_VECTOR_UFRACT:
10894 case MODE_VECTOR_ACCUM:
10895 case MODE_VECTOR_UACCUM:
10896 nunits = GET_MODE_NUNITS (mode);
10897 break;
10899 case MODE_INT:
10900 /* Check that there are no leftover bits. */
10901 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
10902 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10903 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10904 break;
10906 default:
10907 gcc_unreachable ();
10910 return make_vector_type (innertype, nunits, mode);
10913 /* Similarly, but takes the inner type and number of units, which must be
10914 a power of two. */
10916 tree
10917 build_vector_type (tree innertype, poly_int64 nunits)
10919 return make_vector_type (innertype, nunits, VOIDmode);
10922 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE. */
10924 tree
10925 build_truth_vector_type_for_mode (poly_uint64 nunits, machine_mode mask_mode)
10927 gcc_assert (mask_mode != BLKmode);
10929 poly_uint64 vsize = GET_MODE_BITSIZE (mask_mode);
10930 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
10931 tree bool_type = build_nonstandard_boolean_type (esize);
10933 return make_vector_type (bool_type, nunits, mask_mode);
10936 /* Build a vector type that holds one boolean result for each element of
10937 vector type VECTYPE. The public interface for this operation is
10938 truth_type_for. */
10940 static tree
10941 build_truth_vector_type_for (tree vectype)
10943 machine_mode vector_mode = TYPE_MODE (vectype);
10944 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
10946 machine_mode mask_mode;
10947 if (VECTOR_MODE_P (vector_mode)
10948 && targetm.vectorize.get_mask_mode (vector_mode).exists (&mask_mode))
10949 return build_truth_vector_type_for_mode (nunits, mask_mode);
10951 poly_uint64 vsize = tree_to_poly_uint64 (TYPE_SIZE (vectype));
10952 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
10953 tree bool_type = build_nonstandard_boolean_type (esize);
10955 return make_vector_type (bool_type, nunits, VOIDmode);
10958 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
10959 set. */
10961 tree
10962 build_opaque_vector_type (tree innertype, poly_int64 nunits)
10964 tree t = make_vector_type (innertype, nunits, VOIDmode);
10965 tree cand;
10966 /* We always build the non-opaque variant before the opaque one,
10967 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10968 cand = TYPE_NEXT_VARIANT (t);
10969 if (cand
10970 && TYPE_VECTOR_OPAQUE (cand)
10971 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10972 return cand;
10973 /* Othewise build a variant type and make sure to queue it after
10974 the non-opaque type. */
10975 cand = build_distinct_type_copy (t);
10976 TYPE_VECTOR_OPAQUE (cand) = true;
10977 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10978 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10979 TYPE_NEXT_VARIANT (t) = cand;
10980 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10981 return cand;
10984 /* Return the value of element I of VECTOR_CST T as a wide_int. */
10986 wide_int
10987 vector_cst_int_elt (const_tree t, unsigned int i)
10989 /* First handle elements that are directly encoded. */
10990 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10991 if (i < encoded_nelts)
10992 return wi::to_wide (VECTOR_CST_ENCODED_ELT (t, i));
10994 /* Identify the pattern that contains element I and work out the index of
10995 the last encoded element for that pattern. */
10996 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10997 unsigned int pattern = i % npatterns;
10998 unsigned int count = i / npatterns;
10999 unsigned int final_i = encoded_nelts - npatterns + pattern;
11001 /* If there are no steps, the final encoded value is the right one. */
11002 if (!VECTOR_CST_STEPPED_P (t))
11003 return wi::to_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
11005 /* Otherwise work out the value from the last two encoded elements. */
11006 tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
11007 tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
11008 wide_int diff = wi::to_wide (v2) - wi::to_wide (v1);
11009 return wi::to_wide (v2) + (count - 2) * diff;
11012 /* Return the value of element I of VECTOR_CST T. */
11014 tree
11015 vector_cst_elt (const_tree t, unsigned int i)
11017 /* First handle elements that are directly encoded. */
11018 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
11019 if (i < encoded_nelts)
11020 return VECTOR_CST_ENCODED_ELT (t, i);
11022 /* If there are no steps, the final encoded value is the right one. */
11023 if (!VECTOR_CST_STEPPED_P (t))
11025 /* Identify the pattern that contains element I and work out the index of
11026 the last encoded element for that pattern. */
11027 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
11028 unsigned int pattern = i % npatterns;
11029 unsigned int final_i = encoded_nelts - npatterns + pattern;
11030 return VECTOR_CST_ENCODED_ELT (t, final_i);
11033 /* Otherwise work out the value from the last two encoded elements. */
11034 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
11035 vector_cst_int_elt (t, i));
11038 /* Given an initializer INIT, return TRUE if INIT is zero or some
11039 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
11040 null, set *NONZERO if and only if INIT is known not to be all
11041 zeros. The combination of return value of false and *NONZERO
11042 false implies that INIT may but need not be all zeros. Other
11043 combinations indicate definitive answers. */
11045 bool
11046 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
11048 bool dummy;
11049 if (!nonzero)
11050 nonzero = &dummy;
11052 /* Conservatively clear NONZERO and set it only if INIT is definitely
11053 not all zero. */
11054 *nonzero = false;
11056 STRIP_NOPS (init);
11058 unsigned HOST_WIDE_INT off = 0;
11060 switch (TREE_CODE (init))
11062 case INTEGER_CST:
11063 if (integer_zerop (init))
11064 return true;
11066 *nonzero = true;
11067 return false;
11069 case REAL_CST:
11070 /* ??? Note that this is not correct for C4X float formats. There,
11071 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
11072 negative exponent. */
11073 if (real_zerop (init)
11074 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
11075 return true;
11077 *nonzero = true;
11078 return false;
11080 case FIXED_CST:
11081 if (fixed_zerop (init))
11082 return true;
11084 *nonzero = true;
11085 return false;
11087 case COMPLEX_CST:
11088 if (integer_zerop (init)
11089 || (real_zerop (init)
11090 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
11091 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
11092 return true;
11094 *nonzero = true;
11095 return false;
11097 case VECTOR_CST:
11098 if (VECTOR_CST_NPATTERNS (init) == 1
11099 && VECTOR_CST_DUPLICATE_P (init)
11100 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
11101 return true;
11103 *nonzero = true;
11104 return false;
11106 case CONSTRUCTOR:
11108 if (TREE_CLOBBER_P (init))
11109 return false;
11111 unsigned HOST_WIDE_INT idx;
11112 tree elt;
11114 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
11115 if (!initializer_zerop (elt, nonzero))
11116 return false;
11118 return true;
11121 case MEM_REF:
11123 tree arg = TREE_OPERAND (init, 0);
11124 if (TREE_CODE (arg) != ADDR_EXPR)
11125 return false;
11126 tree offset = TREE_OPERAND (init, 1);
11127 if (TREE_CODE (offset) != INTEGER_CST
11128 || !tree_fits_uhwi_p (offset))
11129 return false;
11130 off = tree_to_uhwi (offset);
11131 if (INT_MAX < off)
11132 return false;
11133 arg = TREE_OPERAND (arg, 0);
11134 if (TREE_CODE (arg) != STRING_CST)
11135 return false;
11136 init = arg;
11138 /* Fall through. */
11140 case STRING_CST:
11142 gcc_assert (off <= INT_MAX);
11144 int i = off;
11145 int n = TREE_STRING_LENGTH (init);
11146 if (n <= i)
11147 return false;
11149 /* We need to loop through all elements to handle cases like
11150 "\0" and "\0foobar". */
11151 for (i = 0; i < n; ++i)
11152 if (TREE_STRING_POINTER (init)[i] != '\0')
11154 *nonzero = true;
11155 return false;
11158 return true;
11161 default:
11162 return false;
11166 /* Return true if EXPR is an initializer expression in which every element
11167 is a constant that is numerically equal to 0 or 1. The elements do not
11168 need to be equal to each other. */
11170 bool
11171 initializer_each_zero_or_onep (const_tree expr)
11173 STRIP_ANY_LOCATION_WRAPPER (expr);
11175 switch (TREE_CODE (expr))
11177 case INTEGER_CST:
11178 return integer_zerop (expr) || integer_onep (expr);
11180 case REAL_CST:
11181 return real_zerop (expr) || real_onep (expr);
11183 case VECTOR_CST:
11185 unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (expr);
11186 if (VECTOR_CST_STEPPED_P (expr)
11187 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)).is_constant (&nelts))
11188 return false;
11190 for (unsigned int i = 0; i < nelts; ++i)
11192 tree elt = vector_cst_elt (expr, i);
11193 if (!initializer_each_zero_or_onep (elt))
11194 return false;
11197 return true;
11200 default:
11201 return false;
11205 /* Check if vector VEC consists of all the equal elements and
11206 that the number of elements corresponds to the type of VEC.
11207 The function returns first element of the vector
11208 or NULL_TREE if the vector is not uniform. */
11209 tree
11210 uniform_vector_p (const_tree vec)
11212 tree first, t;
11213 unsigned HOST_WIDE_INT i, nelts;
11215 if (vec == NULL_TREE)
11216 return NULL_TREE;
11218 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
11220 if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
11221 return TREE_OPERAND (vec, 0);
11223 else if (TREE_CODE (vec) == VECTOR_CST)
11225 if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
11226 return VECTOR_CST_ENCODED_ELT (vec, 0);
11227 return NULL_TREE;
11230 else if (TREE_CODE (vec) == CONSTRUCTOR
11231 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
11233 first = error_mark_node;
11235 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
11237 if (i == 0)
11239 first = t;
11240 continue;
11242 if (!operand_equal_p (first, t, 0))
11243 return NULL_TREE;
11245 if (i != nelts)
11246 return NULL_TREE;
11248 return first;
11251 return NULL_TREE;
11254 /* If the argument is INTEGER_CST, return it. If the argument is vector
11255 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
11256 return NULL_TREE.
11257 Look through location wrappers. */
11259 tree
11260 uniform_integer_cst_p (tree t)
11262 STRIP_ANY_LOCATION_WRAPPER (t);
11264 if (TREE_CODE (t) == INTEGER_CST)
11265 return t;
11267 if (VECTOR_TYPE_P (TREE_TYPE (t)))
11269 t = uniform_vector_p (t);
11270 if (t && TREE_CODE (t) == INTEGER_CST)
11271 return t;
11274 return NULL_TREE;
11277 /* If VECTOR_CST T has a single nonzero element, return the index of that
11278 element, otherwise return -1. */
11281 single_nonzero_element (const_tree t)
11283 unsigned HOST_WIDE_INT nelts;
11284 unsigned int repeat_nelts;
11285 if (VECTOR_CST_NELTS (t).is_constant (&nelts))
11286 repeat_nelts = nelts;
11287 else if (VECTOR_CST_NELTS_PER_PATTERN (t) == 2)
11289 nelts = vector_cst_encoded_nelts (t);
11290 repeat_nelts = VECTOR_CST_NPATTERNS (t);
11292 else
11293 return -1;
11295 int res = -1;
11296 for (unsigned int i = 0; i < nelts; ++i)
11298 tree elt = vector_cst_elt (t, i);
11299 if (!integer_zerop (elt) && !real_zerop (elt))
11301 if (res >= 0 || i >= repeat_nelts)
11302 return -1;
11303 res = i;
11306 return res;
11309 /* Build an empty statement at location LOC. */
11311 tree
11312 build_empty_stmt (location_t loc)
11314 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
11315 SET_EXPR_LOCATION (t, loc);
11316 return t;
11320 /* Build an OpenMP clause with code CODE. LOC is the location of the
11321 clause. */
11323 tree
11324 build_omp_clause (location_t loc, enum omp_clause_code code)
11326 tree t;
11327 int size, length;
11329 length = omp_clause_num_ops[code];
11330 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
11332 record_node_allocation_statistics (OMP_CLAUSE, size);
11334 t = (tree) ggc_internal_alloc (size);
11335 memset (t, 0, size);
11336 TREE_SET_CODE (t, OMP_CLAUSE);
11337 OMP_CLAUSE_SET_CODE (t, code);
11338 OMP_CLAUSE_LOCATION (t) = loc;
11340 return t;
11343 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
11344 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
11345 Except for the CODE and operand count field, other storage for the
11346 object is initialized to zeros. */
11348 tree
11349 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
11351 tree t;
11352 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
11354 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
11355 gcc_assert (len >= 1);
11357 record_node_allocation_statistics (code, length);
11359 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
11361 TREE_SET_CODE (t, code);
11363 /* Can't use TREE_OPERAND to store the length because if checking is
11364 enabled, it will try to check the length before we store it. :-P */
11365 t->exp.operands[0] = build_int_cst (sizetype, len);
11367 return t;
11370 /* Helper function for build_call_* functions; build a CALL_EXPR with
11371 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
11372 the argument slots. */
11374 static tree
11375 build_call_1 (tree return_type, tree fn, int nargs)
11377 tree t;
11379 t = build_vl_exp (CALL_EXPR, nargs + 3);
11380 TREE_TYPE (t) = return_type;
11381 CALL_EXPR_FN (t) = fn;
11382 CALL_EXPR_STATIC_CHAIN (t) = NULL;
11384 return t;
11387 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11388 FN and a null static chain slot. NARGS is the number of call arguments
11389 which are specified as "..." arguments. */
11391 tree
11392 build_call_nary (tree return_type, tree fn, int nargs, ...)
11394 tree ret;
11395 va_list args;
11396 va_start (args, nargs);
11397 ret = build_call_valist (return_type, fn, nargs, args);
11398 va_end (args);
11399 return ret;
11402 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11403 FN and a null static chain slot. NARGS is the number of call arguments
11404 which are specified as a va_list ARGS. */
11406 tree
11407 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
11409 tree t;
11410 int i;
11412 t = build_call_1 (return_type, fn, nargs);
11413 for (i = 0; i < nargs; i++)
11414 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
11415 process_call_operands (t);
11416 return t;
11419 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11420 FN and a null static chain slot. NARGS is the number of call arguments
11421 which are specified as a tree array ARGS. */
11423 tree
11424 build_call_array_loc (location_t loc, tree return_type, tree fn,
11425 int nargs, const tree *args)
11427 tree t;
11428 int i;
11430 t = build_call_1 (return_type, fn, nargs);
11431 for (i = 0; i < nargs; i++)
11432 CALL_EXPR_ARG (t, i) = args[i];
11433 process_call_operands (t);
11434 SET_EXPR_LOCATION (t, loc);
11435 return t;
11438 /* Like build_call_array, but takes a vec. */
11440 tree
11441 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
11443 tree ret, t;
11444 unsigned int ix;
11446 ret = build_call_1 (return_type, fn, vec_safe_length (args));
11447 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
11448 CALL_EXPR_ARG (ret, ix) = t;
11449 process_call_operands (ret);
11450 return ret;
11453 /* Conveniently construct a function call expression. FNDECL names the
11454 function to be called and N arguments are passed in the array
11455 ARGARRAY. */
11457 tree
11458 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11460 tree fntype = TREE_TYPE (fndecl);
11461 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11463 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
11466 /* Conveniently construct a function call expression. FNDECL names the
11467 function to be called and the arguments are passed in the vector
11468 VEC. */
11470 tree
11471 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11473 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11474 vec_safe_address (vec));
11478 /* Conveniently construct a function call expression. FNDECL names the
11479 function to be called, N is the number of arguments, and the "..."
11480 parameters are the argument expressions. */
11482 tree
11483 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11485 va_list ap;
11486 tree *argarray = XALLOCAVEC (tree, n);
11487 int i;
11489 va_start (ap, n);
11490 for (i = 0; i < n; i++)
11491 argarray[i] = va_arg (ap, tree);
11492 va_end (ap);
11493 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11496 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11497 varargs macros aren't supported by all bootstrap compilers. */
11499 tree
11500 build_call_expr (tree fndecl, int n, ...)
11502 va_list ap;
11503 tree *argarray = XALLOCAVEC (tree, n);
11504 int i;
11506 va_start (ap, n);
11507 for (i = 0; i < n; i++)
11508 argarray[i] = va_arg (ap, tree);
11509 va_end (ap);
11510 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11513 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
11514 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
11515 It will get gimplified later into an ordinary internal function. */
11517 tree
11518 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
11519 tree type, int n, const tree *args)
11521 tree t = build_call_1 (type, NULL_TREE, n);
11522 for (int i = 0; i < n; ++i)
11523 CALL_EXPR_ARG (t, i) = args[i];
11524 SET_EXPR_LOCATION (t, loc);
11525 CALL_EXPR_IFN (t) = ifn;
11526 process_call_operands (t);
11527 return t;
11530 /* Build internal call expression. This is just like CALL_EXPR, except
11531 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
11532 internal function. */
11534 tree
11535 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
11536 tree type, int n, ...)
11538 va_list ap;
11539 tree *argarray = XALLOCAVEC (tree, n);
11540 int i;
11542 va_start (ap, n);
11543 for (i = 0; i < n; i++)
11544 argarray[i] = va_arg (ap, tree);
11545 va_end (ap);
11546 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11549 /* Return a function call to FN, if the target is guaranteed to support it,
11550 or null otherwise.
11552 N is the number of arguments, passed in the "...", and TYPE is the
11553 type of the return value. */
11555 tree
11556 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
11557 int n, ...)
11559 va_list ap;
11560 tree *argarray = XALLOCAVEC (tree, n);
11561 int i;
11563 va_start (ap, n);
11564 for (i = 0; i < n; i++)
11565 argarray[i] = va_arg (ap, tree);
11566 va_end (ap);
11567 if (internal_fn_p (fn))
11569 internal_fn ifn = as_internal_fn (fn);
11570 if (direct_internal_fn_p (ifn))
11572 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
11573 if (!direct_internal_fn_supported_p (ifn, types,
11574 OPTIMIZE_FOR_BOTH))
11575 return NULL_TREE;
11577 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11579 else
11581 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
11582 if (!fndecl)
11583 return NULL_TREE;
11584 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11588 /* Return a function call to the appropriate builtin alloca variant.
11590 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
11591 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
11592 bound for SIZE in case it is not a fixed value. */
11594 tree
11595 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
11597 if (max_size >= 0)
11599 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
11600 return
11601 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
11603 else if (align > 0)
11605 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
11606 return build_call_expr (t, 2, size, size_int (align));
11608 else
11610 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
11611 return build_call_expr (t, 1, size);
11615 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
11616 if SIZE == -1) and return a tree node representing char* pointer to
11617 it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). When STR is nonnull
11618 the STRING_CST value is the LEN bytes at STR (the representation
11619 of the string, which may be wide). Otherwise it's all zeros. */
11621 tree
11622 build_string_literal (unsigned len, const char *str /* = NULL */,
11623 tree eltype /* = char_type_node */,
11624 unsigned HOST_WIDE_INT size /* = -1 */)
11626 tree t = build_string (len, str);
11627 /* Set the maximum valid index based on the string length or SIZE. */
11628 unsigned HOST_WIDE_INT maxidx
11629 = (size == HOST_WIDE_INT_M1U ? len : size) - 1;
11631 tree index = build_index_type (size_int (maxidx));
11632 eltype = build_type_variant (eltype, 1, 0);
11633 tree type = build_array_type (eltype, index);
11634 TREE_TYPE (t) = type;
11635 TREE_CONSTANT (t) = 1;
11636 TREE_READONLY (t) = 1;
11637 TREE_STATIC (t) = 1;
11639 type = build_pointer_type (eltype);
11640 t = build1 (ADDR_EXPR, type,
11641 build4 (ARRAY_REF, eltype,
11642 t, integer_zero_node, NULL_TREE, NULL_TREE));
11643 return t;
11648 /* Return true if T (assumed to be a DECL) must be assigned a memory
11649 location. */
11651 bool
11652 needs_to_live_in_memory (const_tree t)
11654 return (TREE_ADDRESSABLE (t)
11655 || is_global_var (t)
11656 || (TREE_CODE (t) == RESULT_DECL
11657 && !DECL_BY_REFERENCE (t)
11658 && aggregate_value_p (t, current_function_decl)));
11661 /* Return value of a constant X and sign-extend it. */
11663 HOST_WIDE_INT
11664 int_cst_value (const_tree x)
11666 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11667 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11669 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11670 gcc_assert (cst_and_fits_in_hwi (x));
11672 if (bits < HOST_BITS_PER_WIDE_INT)
11674 bool negative = ((val >> (bits - 1)) & 1) != 0;
11675 if (negative)
11676 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
11677 else
11678 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
11681 return val;
11684 /* If TYPE is an integral or pointer type, return an integer type with
11685 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11686 if TYPE is already an integer type of signedness UNSIGNEDP.
11687 If TYPE is a floating-point type, return an integer type with the same
11688 bitsize and with the signedness given by UNSIGNEDP; this is useful
11689 when doing bit-level operations on a floating-point value. */
11691 tree
11692 signed_or_unsigned_type_for (int unsignedp, tree type)
11694 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
11695 return type;
11697 if (TREE_CODE (type) == VECTOR_TYPE)
11699 tree inner = TREE_TYPE (type);
11700 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11701 if (!inner2)
11702 return NULL_TREE;
11703 if (inner == inner2)
11704 return type;
11705 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11708 if (TREE_CODE (type) == COMPLEX_TYPE)
11710 tree inner = TREE_TYPE (type);
11711 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11712 if (!inner2)
11713 return NULL_TREE;
11714 if (inner == inner2)
11715 return type;
11716 return build_complex_type (inner2);
11719 unsigned int bits;
11720 if (INTEGRAL_TYPE_P (type)
11721 || POINTER_TYPE_P (type)
11722 || TREE_CODE (type) == OFFSET_TYPE)
11723 bits = TYPE_PRECISION (type);
11724 else if (TREE_CODE (type) == REAL_TYPE)
11725 bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type));
11726 else
11727 return NULL_TREE;
11729 return build_nonstandard_integer_type (bits, unsignedp);
11732 /* If TYPE is an integral or pointer type, return an integer type with
11733 the same precision which is unsigned, or itself if TYPE is already an
11734 unsigned integer type. If TYPE is a floating-point type, return an
11735 unsigned integer type with the same bitsize as TYPE. */
11737 tree
11738 unsigned_type_for (tree type)
11740 return signed_or_unsigned_type_for (1, type);
11743 /* If TYPE is an integral or pointer type, return an integer type with
11744 the same precision which is signed, or itself if TYPE is already a
11745 signed integer type. If TYPE is a floating-point type, return a
11746 signed integer type with the same bitsize as TYPE. */
11748 tree
11749 signed_type_for (tree type)
11751 return signed_or_unsigned_type_for (0, type);
11754 /* If TYPE is a vector type, return a signed integer vector type with the
11755 same width and number of subparts. Otherwise return boolean_type_node. */
11757 tree
11758 truth_type_for (tree type)
11760 if (TREE_CODE (type) == VECTOR_TYPE)
11762 if (VECTOR_BOOLEAN_TYPE_P (type))
11763 return type;
11764 return build_truth_vector_type_for (type);
11766 else
11767 return boolean_type_node;
11770 /* Returns the largest value obtainable by casting something in INNER type to
11771 OUTER type. */
11773 tree
11774 upper_bound_in_type (tree outer, tree inner)
11776 unsigned int det = 0;
11777 unsigned oprec = TYPE_PRECISION (outer);
11778 unsigned iprec = TYPE_PRECISION (inner);
11779 unsigned prec;
11781 /* Compute a unique number for every combination. */
11782 det |= (oprec > iprec) ? 4 : 0;
11783 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11784 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11786 /* Determine the exponent to use. */
11787 switch (det)
11789 case 0:
11790 case 1:
11791 /* oprec <= iprec, outer: signed, inner: don't care. */
11792 prec = oprec - 1;
11793 break;
11794 case 2:
11795 case 3:
11796 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11797 prec = oprec;
11798 break;
11799 case 4:
11800 /* oprec > iprec, outer: signed, inner: signed. */
11801 prec = iprec - 1;
11802 break;
11803 case 5:
11804 /* oprec > iprec, outer: signed, inner: unsigned. */
11805 prec = iprec;
11806 break;
11807 case 6:
11808 /* oprec > iprec, outer: unsigned, inner: signed. */
11809 prec = oprec;
11810 break;
11811 case 7:
11812 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11813 prec = iprec;
11814 break;
11815 default:
11816 gcc_unreachable ();
11819 return wide_int_to_tree (outer,
11820 wi::mask (prec, false, TYPE_PRECISION (outer)));
11823 /* Returns the smallest value obtainable by casting something in INNER type to
11824 OUTER type. */
11826 tree
11827 lower_bound_in_type (tree outer, tree inner)
11829 unsigned oprec = TYPE_PRECISION (outer);
11830 unsigned iprec = TYPE_PRECISION (inner);
11832 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11833 and obtain 0. */
11834 if (TYPE_UNSIGNED (outer)
11835 /* If we are widening something of an unsigned type, OUTER type
11836 contains all values of INNER type. In particular, both INNER
11837 and OUTER types have zero in common. */
11838 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11839 return build_int_cst (outer, 0);
11840 else
11842 /* If we are widening a signed type to another signed type, we
11843 want to obtain -2^^(iprec-1). If we are keeping the
11844 precision or narrowing to a signed type, we want to obtain
11845 -2^(oprec-1). */
11846 unsigned prec = oprec > iprec ? iprec : oprec;
11847 return wide_int_to_tree (outer,
11848 wi::mask (prec - 1, true,
11849 TYPE_PRECISION (outer)));
11853 /* Return nonzero if two operands that are suitable for PHI nodes are
11854 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11855 SSA_NAME or invariant. Note that this is strictly an optimization.
11856 That is, callers of this function can directly call operand_equal_p
11857 and get the same result, only slower. */
11860 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11862 if (arg0 == arg1)
11863 return 1;
11864 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11865 return 0;
11866 return operand_equal_p (arg0, arg1, 0);
11869 /* Returns number of zeros at the end of binary representation of X. */
11871 tree
11872 num_ending_zeros (const_tree x)
11874 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
11878 #define WALK_SUBTREE(NODE) \
11879 do \
11881 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11882 if (result) \
11883 return result; \
11885 while (0)
11887 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11888 be walked whenever a type is seen in the tree. Rest of operands and return
11889 value are as for walk_tree. */
11891 static tree
11892 walk_type_fields (tree type, walk_tree_fn func, void *data,
11893 hash_set<tree> *pset, walk_tree_lh lh)
11895 tree result = NULL_TREE;
11897 switch (TREE_CODE (type))
11899 case POINTER_TYPE:
11900 case REFERENCE_TYPE:
11901 case VECTOR_TYPE:
11902 /* We have to worry about mutually recursive pointers. These can't
11903 be written in C. They can in Ada. It's pathological, but
11904 there's an ACATS test (c38102a) that checks it. Deal with this
11905 by checking if we're pointing to another pointer, that one
11906 points to another pointer, that one does too, and we have no htab.
11907 If so, get a hash table. We check three levels deep to avoid
11908 the cost of the hash table if we don't need one. */
11909 if (POINTER_TYPE_P (TREE_TYPE (type))
11910 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11911 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11912 && !pset)
11914 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11915 func, data);
11916 if (result)
11917 return result;
11919 break;
11922 /* fall through */
11924 case COMPLEX_TYPE:
11925 WALK_SUBTREE (TREE_TYPE (type));
11926 break;
11928 case METHOD_TYPE:
11929 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11931 /* Fall through. */
11933 case FUNCTION_TYPE:
11934 WALK_SUBTREE (TREE_TYPE (type));
11936 tree arg;
11938 /* We never want to walk into default arguments. */
11939 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11940 WALK_SUBTREE (TREE_VALUE (arg));
11942 break;
11944 case ARRAY_TYPE:
11945 /* Don't follow this nodes's type if a pointer for fear that
11946 we'll have infinite recursion. If we have a PSET, then we
11947 need not fear. */
11948 if (pset
11949 || (!POINTER_TYPE_P (TREE_TYPE (type))
11950 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11951 WALK_SUBTREE (TREE_TYPE (type));
11952 WALK_SUBTREE (TYPE_DOMAIN (type));
11953 break;
11955 case OFFSET_TYPE:
11956 WALK_SUBTREE (TREE_TYPE (type));
11957 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11958 break;
11960 default:
11961 break;
11964 return NULL_TREE;
11967 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11968 called with the DATA and the address of each sub-tree. If FUNC returns a
11969 non-NULL value, the traversal is stopped, and the value returned by FUNC
11970 is returned. If PSET is non-NULL it is used to record the nodes visited,
11971 and to avoid visiting a node more than once. */
11973 tree
11974 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11975 hash_set<tree> *pset, walk_tree_lh lh)
11977 enum tree_code code;
11978 int walk_subtrees;
11979 tree result;
11981 #define WALK_SUBTREE_TAIL(NODE) \
11982 do \
11984 tp = & (NODE); \
11985 goto tail_recurse; \
11987 while (0)
11989 tail_recurse:
11990 /* Skip empty subtrees. */
11991 if (!*tp)
11992 return NULL_TREE;
11994 /* Don't walk the same tree twice, if the user has requested
11995 that we avoid doing so. */
11996 if (pset && pset->add (*tp))
11997 return NULL_TREE;
11999 /* Call the function. */
12000 walk_subtrees = 1;
12001 result = (*func) (tp, &walk_subtrees, data);
12003 /* If we found something, return it. */
12004 if (result)
12005 return result;
12007 code = TREE_CODE (*tp);
12009 /* Even if we didn't, FUNC may have decided that there was nothing
12010 interesting below this point in the tree. */
12011 if (!walk_subtrees)
12013 /* But we still need to check our siblings. */
12014 if (code == TREE_LIST)
12015 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
12016 else if (code == OMP_CLAUSE)
12017 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12018 else
12019 return NULL_TREE;
12022 if (lh)
12024 result = (*lh) (tp, &walk_subtrees, func, data, pset);
12025 if (result || !walk_subtrees)
12026 return result;
12029 switch (code)
12031 case ERROR_MARK:
12032 case IDENTIFIER_NODE:
12033 case INTEGER_CST:
12034 case REAL_CST:
12035 case FIXED_CST:
12036 case VECTOR_CST:
12037 case STRING_CST:
12038 case BLOCK:
12039 case PLACEHOLDER_EXPR:
12040 case SSA_NAME:
12041 case FIELD_DECL:
12042 case RESULT_DECL:
12043 /* None of these have subtrees other than those already walked
12044 above. */
12045 break;
12047 case TREE_LIST:
12048 WALK_SUBTREE (TREE_VALUE (*tp));
12049 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
12050 break;
12052 case TREE_VEC:
12054 int len = TREE_VEC_LENGTH (*tp);
12056 if (len == 0)
12057 break;
12059 /* Walk all elements but the first. */
12060 while (--len)
12061 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
12063 /* Now walk the first one as a tail call. */
12064 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
12067 case COMPLEX_CST:
12068 WALK_SUBTREE (TREE_REALPART (*tp));
12069 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
12071 case CONSTRUCTOR:
12073 unsigned HOST_WIDE_INT idx;
12074 constructor_elt *ce;
12076 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
12077 idx++)
12078 WALK_SUBTREE (ce->value);
12080 break;
12082 case SAVE_EXPR:
12083 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
12085 case BIND_EXPR:
12087 tree decl;
12088 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
12090 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
12091 into declarations that are just mentioned, rather than
12092 declared; they don't really belong to this part of the tree.
12093 And, we can see cycles: the initializer for a declaration
12094 can refer to the declaration itself. */
12095 WALK_SUBTREE (DECL_INITIAL (decl));
12096 WALK_SUBTREE (DECL_SIZE (decl));
12097 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
12099 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
12102 case STATEMENT_LIST:
12104 tree_stmt_iterator i;
12105 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
12106 WALK_SUBTREE (*tsi_stmt_ptr (i));
12108 break;
12110 case OMP_CLAUSE:
12111 switch (OMP_CLAUSE_CODE (*tp))
12113 case OMP_CLAUSE_GANG:
12114 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
12115 /* FALLTHRU */
12117 case OMP_CLAUSE_ASYNC:
12118 case OMP_CLAUSE_WAIT:
12119 case OMP_CLAUSE_WORKER:
12120 case OMP_CLAUSE_VECTOR:
12121 case OMP_CLAUSE_NUM_GANGS:
12122 case OMP_CLAUSE_NUM_WORKERS:
12123 case OMP_CLAUSE_VECTOR_LENGTH:
12124 case OMP_CLAUSE_PRIVATE:
12125 case OMP_CLAUSE_SHARED:
12126 case OMP_CLAUSE_FIRSTPRIVATE:
12127 case OMP_CLAUSE_COPYIN:
12128 case OMP_CLAUSE_COPYPRIVATE:
12129 case OMP_CLAUSE_FINAL:
12130 case OMP_CLAUSE_IF:
12131 case OMP_CLAUSE_NUM_THREADS:
12132 case OMP_CLAUSE_SCHEDULE:
12133 case OMP_CLAUSE_UNIFORM:
12134 case OMP_CLAUSE_DEPEND:
12135 case OMP_CLAUSE_NONTEMPORAL:
12136 case OMP_CLAUSE_NUM_TEAMS:
12137 case OMP_CLAUSE_THREAD_LIMIT:
12138 case OMP_CLAUSE_DEVICE:
12139 case OMP_CLAUSE_DIST_SCHEDULE:
12140 case OMP_CLAUSE_SAFELEN:
12141 case OMP_CLAUSE_SIMDLEN:
12142 case OMP_CLAUSE_ORDERED:
12143 case OMP_CLAUSE_PRIORITY:
12144 case OMP_CLAUSE_GRAINSIZE:
12145 case OMP_CLAUSE_NUM_TASKS:
12146 case OMP_CLAUSE_HINT:
12147 case OMP_CLAUSE_TO_DECLARE:
12148 case OMP_CLAUSE_LINK:
12149 case OMP_CLAUSE_USE_DEVICE_PTR:
12150 case OMP_CLAUSE_USE_DEVICE_ADDR:
12151 case OMP_CLAUSE_IS_DEVICE_PTR:
12152 case OMP_CLAUSE_INCLUSIVE:
12153 case OMP_CLAUSE_EXCLUSIVE:
12154 case OMP_CLAUSE__LOOPTEMP_:
12155 case OMP_CLAUSE__REDUCTEMP_:
12156 case OMP_CLAUSE__CONDTEMP_:
12157 case OMP_CLAUSE__SCANTEMP_:
12158 case OMP_CLAUSE__SIMDUID_:
12159 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
12160 /* FALLTHRU */
12162 case OMP_CLAUSE_INDEPENDENT:
12163 case OMP_CLAUSE_NOWAIT:
12164 case OMP_CLAUSE_DEFAULT:
12165 case OMP_CLAUSE_UNTIED:
12166 case OMP_CLAUSE_MERGEABLE:
12167 case OMP_CLAUSE_PROC_BIND:
12168 case OMP_CLAUSE_DEVICE_TYPE:
12169 case OMP_CLAUSE_INBRANCH:
12170 case OMP_CLAUSE_NOTINBRANCH:
12171 case OMP_CLAUSE_FOR:
12172 case OMP_CLAUSE_PARALLEL:
12173 case OMP_CLAUSE_SECTIONS:
12174 case OMP_CLAUSE_TASKGROUP:
12175 case OMP_CLAUSE_NOGROUP:
12176 case OMP_CLAUSE_THREADS:
12177 case OMP_CLAUSE_SIMD:
12178 case OMP_CLAUSE_DEFAULTMAP:
12179 case OMP_CLAUSE_ORDER:
12180 case OMP_CLAUSE_BIND:
12181 case OMP_CLAUSE_AUTO:
12182 case OMP_CLAUSE_SEQ:
12183 case OMP_CLAUSE_TILE:
12184 case OMP_CLAUSE__SIMT_:
12185 case OMP_CLAUSE_IF_PRESENT:
12186 case OMP_CLAUSE_FINALIZE:
12187 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12189 case OMP_CLAUSE_LASTPRIVATE:
12190 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12191 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
12192 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12194 case OMP_CLAUSE_COLLAPSE:
12196 int i;
12197 for (i = 0; i < 3; i++)
12198 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
12199 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12202 case OMP_CLAUSE_LINEAR:
12203 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12204 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
12205 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
12206 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12208 case OMP_CLAUSE_ALIGNED:
12209 case OMP_CLAUSE_FROM:
12210 case OMP_CLAUSE_TO:
12211 case OMP_CLAUSE_MAP:
12212 case OMP_CLAUSE__CACHE_:
12213 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12214 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
12215 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12217 case OMP_CLAUSE_REDUCTION:
12218 case OMP_CLAUSE_TASK_REDUCTION:
12219 case OMP_CLAUSE_IN_REDUCTION:
12221 int i;
12222 for (i = 0; i < 5; i++)
12223 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
12224 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12227 default:
12228 gcc_unreachable ();
12230 break;
12232 case TARGET_EXPR:
12234 int i, len;
12236 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
12237 But, we only want to walk once. */
12238 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
12239 for (i = 0; i < len; ++i)
12240 WALK_SUBTREE (TREE_OPERAND (*tp, i));
12241 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
12244 case DECL_EXPR:
12245 /* If this is a TYPE_DECL, walk into the fields of the type that it's
12246 defining. We only want to walk into these fields of a type in this
12247 case and not in the general case of a mere reference to the type.
12249 The criterion is as follows: if the field can be an expression, it
12250 must be walked only here. This should be in keeping with the fields
12251 that are directly gimplified in gimplify_type_sizes in order for the
12252 mark/copy-if-shared/unmark machinery of the gimplifier to work with
12253 variable-sized types.
12255 Note that DECLs get walked as part of processing the BIND_EXPR. */
12256 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
12258 /* Call the function for the decl so e.g. copy_tree_body_r can
12259 replace it with the remapped one. */
12260 result = (*func) (&DECL_EXPR_DECL (*tp), &walk_subtrees, data);
12261 if (result || !walk_subtrees)
12262 return result;
12264 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
12265 if (TREE_CODE (*type_p) == ERROR_MARK)
12266 return NULL_TREE;
12268 /* Call the function for the type. See if it returns anything or
12269 doesn't want us to continue. If we are to continue, walk both
12270 the normal fields and those for the declaration case. */
12271 result = (*func) (type_p, &walk_subtrees, data);
12272 if (result || !walk_subtrees)
12273 return result;
12275 /* But do not walk a pointed-to type since it may itself need to
12276 be walked in the declaration case if it isn't anonymous. */
12277 if (!POINTER_TYPE_P (*type_p))
12279 result = walk_type_fields (*type_p, func, data, pset, lh);
12280 if (result)
12281 return result;
12284 /* If this is a record type, also walk the fields. */
12285 if (RECORD_OR_UNION_TYPE_P (*type_p))
12287 tree field;
12289 for (field = TYPE_FIELDS (*type_p); field;
12290 field = DECL_CHAIN (field))
12292 /* We'd like to look at the type of the field, but we can
12293 easily get infinite recursion. So assume it's pointed
12294 to elsewhere in the tree. Also, ignore things that
12295 aren't fields. */
12296 if (TREE_CODE (field) != FIELD_DECL)
12297 continue;
12299 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
12300 WALK_SUBTREE (DECL_SIZE (field));
12301 WALK_SUBTREE (DECL_SIZE_UNIT (field));
12302 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
12303 WALK_SUBTREE (DECL_QUALIFIER (field));
12307 /* Same for scalar types. */
12308 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
12309 || TREE_CODE (*type_p) == ENUMERAL_TYPE
12310 || TREE_CODE (*type_p) == INTEGER_TYPE
12311 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
12312 || TREE_CODE (*type_p) == REAL_TYPE)
12314 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
12315 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
12318 WALK_SUBTREE (TYPE_SIZE (*type_p));
12319 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
12321 /* FALLTHRU */
12323 default:
12324 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
12326 int i, len;
12328 /* Walk over all the sub-trees of this operand. */
12329 len = TREE_OPERAND_LENGTH (*tp);
12331 /* Go through the subtrees. We need to do this in forward order so
12332 that the scope of a FOR_EXPR is handled properly. */
12333 if (len)
12335 for (i = 0; i < len - 1; ++i)
12336 WALK_SUBTREE (TREE_OPERAND (*tp, i));
12337 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
12340 /* If this is a type, walk the needed fields in the type. */
12341 else if (TYPE_P (*tp))
12342 return walk_type_fields (*tp, func, data, pset, lh);
12343 break;
12346 /* We didn't find what we were looking for. */
12347 return NULL_TREE;
12349 #undef WALK_SUBTREE_TAIL
12351 #undef WALK_SUBTREE
12353 /* Like walk_tree, but does not walk duplicate nodes more than once. */
12355 tree
12356 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
12357 walk_tree_lh lh)
12359 tree result;
12361 hash_set<tree> pset;
12362 result = walk_tree_1 (tp, func, data, &pset, lh);
12363 return result;
12367 tree
12368 tree_block (tree t)
12370 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12372 if (IS_EXPR_CODE_CLASS (c))
12373 return LOCATION_BLOCK (t->exp.locus);
12374 gcc_unreachable ();
12375 return NULL;
12378 void
12379 tree_set_block (tree t, tree b)
12381 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12383 if (IS_EXPR_CODE_CLASS (c))
12385 t->exp.locus = set_block (t->exp.locus, b);
12387 else
12388 gcc_unreachable ();
12391 /* Create a nameless artificial label and put it in the current
12392 function context. The label has a location of LOC. Returns the
12393 newly created label. */
12395 tree
12396 create_artificial_label (location_t loc)
12398 tree lab = build_decl (loc,
12399 LABEL_DECL, NULL_TREE, void_type_node);
12401 DECL_ARTIFICIAL (lab) = 1;
12402 DECL_IGNORED_P (lab) = 1;
12403 DECL_CONTEXT (lab) = current_function_decl;
12404 return lab;
12407 /* Given a tree, try to return a useful variable name that we can use
12408 to prefix a temporary that is being assigned the value of the tree.
12409 I.E. given <temp> = &A, return A. */
12411 const char *
12412 get_name (tree t)
12414 tree stripped_decl;
12416 stripped_decl = t;
12417 STRIP_NOPS (stripped_decl);
12418 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
12419 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
12420 else if (TREE_CODE (stripped_decl) == SSA_NAME)
12422 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
12423 if (!name)
12424 return NULL;
12425 return IDENTIFIER_POINTER (name);
12427 else
12429 switch (TREE_CODE (stripped_decl))
12431 case ADDR_EXPR:
12432 return get_name (TREE_OPERAND (stripped_decl, 0));
12433 default:
12434 return NULL;
12439 /* Return true if TYPE has a variable argument list. */
12441 bool
12442 stdarg_p (const_tree fntype)
12444 function_args_iterator args_iter;
12445 tree n = NULL_TREE, t;
12447 if (!fntype)
12448 return false;
12450 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
12452 n = t;
12455 return n != NULL_TREE && n != void_type_node;
12458 /* Return true if TYPE has a prototype. */
12460 bool
12461 prototype_p (const_tree fntype)
12463 tree t;
12465 gcc_assert (fntype != NULL_TREE);
12467 t = TYPE_ARG_TYPES (fntype);
12468 return (t != NULL_TREE);
12471 /* If BLOCK is inlined from an __attribute__((__artificial__))
12472 routine, return pointer to location from where it has been
12473 called. */
12474 location_t *
12475 block_nonartificial_location (tree block)
12477 location_t *ret = NULL;
12479 while (block && TREE_CODE (block) == BLOCK
12480 && BLOCK_ABSTRACT_ORIGIN (block))
12482 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
12483 if (TREE_CODE (ao) == FUNCTION_DECL)
12485 /* If AO is an artificial inline, point RET to the
12486 call site locus at which it has been inlined and continue
12487 the loop, in case AO's caller is also an artificial
12488 inline. */
12489 if (DECL_DECLARED_INLINE_P (ao)
12490 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
12491 ret = &BLOCK_SOURCE_LOCATION (block);
12492 else
12493 break;
12495 else if (TREE_CODE (ao) != BLOCK)
12496 break;
12498 block = BLOCK_SUPERCONTEXT (block);
12500 return ret;
12504 /* If EXP is inlined from an __attribute__((__artificial__))
12505 function, return the location of the original call expression. */
12507 location_t
12508 tree_nonartificial_location (tree exp)
12510 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
12512 if (loc)
12513 return *loc;
12514 else
12515 return EXPR_LOCATION (exp);
12519 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
12520 nodes. */
12522 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
12524 hashval_t
12525 cl_option_hasher::hash (tree x)
12527 const_tree const t = x;
12528 const char *p;
12529 size_t i;
12530 size_t len = 0;
12531 hashval_t hash = 0;
12533 if (TREE_CODE (t) == OPTIMIZATION_NODE)
12535 p = (const char *)TREE_OPTIMIZATION (t);
12536 len = sizeof (struct cl_optimization);
12539 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
12540 return cl_target_option_hash (TREE_TARGET_OPTION (t));
12542 else
12543 gcc_unreachable ();
12545 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
12546 something else. */
12547 for (i = 0; i < len; i++)
12548 if (p[i])
12549 hash = (hash << 4) ^ ((i << 2) | p[i]);
12551 return hash;
12554 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
12555 TARGET_OPTION tree node) is the same as that given by *Y, which is the
12556 same. */
12558 bool
12559 cl_option_hasher::equal (tree x, tree y)
12561 const_tree const xt = x;
12562 const_tree const yt = y;
12564 if (TREE_CODE (xt) != TREE_CODE (yt))
12565 return 0;
12567 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
12568 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
12569 TREE_OPTIMIZATION (yt));
12570 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
12571 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
12572 TREE_TARGET_OPTION (yt));
12573 else
12574 gcc_unreachable ();
12577 /* Build an OPTIMIZATION_NODE based on the options in OPTS and OPTS_SET. */
12579 tree
12580 build_optimization_node (struct gcc_options *opts,
12581 struct gcc_options *opts_set)
12583 tree t;
12585 /* Use the cache of optimization nodes. */
12587 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
12588 opts, opts_set);
12590 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
12591 t = *slot;
12592 if (!t)
12594 /* Insert this one into the hash table. */
12595 t = cl_optimization_node;
12596 *slot = t;
12598 /* Make a new node for next time round. */
12599 cl_optimization_node = make_node (OPTIMIZATION_NODE);
12602 return t;
12605 /* Build a TARGET_OPTION_NODE based on the options in OPTS and OPTS_SET. */
12607 tree
12608 build_target_option_node (struct gcc_options *opts,
12609 struct gcc_options *opts_set)
12611 tree t;
12613 /* Use the cache of optimization nodes. */
12615 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
12616 opts, opts_set);
12618 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
12619 t = *slot;
12620 if (!t)
12622 /* Insert this one into the hash table. */
12623 t = cl_target_option_node;
12624 *slot = t;
12626 /* Make a new node for next time round. */
12627 cl_target_option_node = make_node (TARGET_OPTION_NODE);
12630 return t;
12633 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
12634 so that they aren't saved during PCH writing. */
12636 void
12637 prepare_target_option_nodes_for_pch (void)
12639 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
12640 for (; iter != cl_option_hash_table->end (); ++iter)
12641 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
12642 TREE_TARGET_GLOBALS (*iter) = NULL;
12645 /* Determine the "ultimate origin" of a block. */
12647 tree
12648 block_ultimate_origin (const_tree block)
12650 tree origin = BLOCK_ABSTRACT_ORIGIN (block);
12652 if (origin == NULL_TREE)
12653 return NULL_TREE;
12654 else
12656 gcc_checking_assert ((DECL_P (origin)
12657 && DECL_ORIGIN (origin) == origin)
12658 || BLOCK_ORIGIN (origin) == origin);
12659 return origin;
12663 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12664 no instruction. */
12666 bool
12667 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
12669 /* Do not strip casts into or out of differing address spaces. */
12670 if (POINTER_TYPE_P (outer_type)
12671 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
12673 if (!POINTER_TYPE_P (inner_type)
12674 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
12675 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
12676 return false;
12678 else if (POINTER_TYPE_P (inner_type)
12679 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
12681 /* We already know that outer_type is not a pointer with
12682 a non-generic address space. */
12683 return false;
12686 /* Use precision rather then machine mode when we can, which gives
12687 the correct answer even for submode (bit-field) types. */
12688 if ((INTEGRAL_TYPE_P (outer_type)
12689 || POINTER_TYPE_P (outer_type)
12690 || TREE_CODE (outer_type) == OFFSET_TYPE)
12691 && (INTEGRAL_TYPE_P (inner_type)
12692 || POINTER_TYPE_P (inner_type)
12693 || TREE_CODE (inner_type) == OFFSET_TYPE))
12694 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12696 /* Otherwise fall back on comparing machine modes (e.g. for
12697 aggregate types, floats). */
12698 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12701 /* Return true iff conversion in EXP generates no instruction. Mark
12702 it inline so that we fully inline into the stripping functions even
12703 though we have two uses of this function. */
12705 static inline bool
12706 tree_nop_conversion (const_tree exp)
12708 tree outer_type, inner_type;
12710 if (location_wrapper_p (exp))
12711 return true;
12712 if (!CONVERT_EXPR_P (exp)
12713 && TREE_CODE (exp) != NON_LVALUE_EXPR)
12714 return false;
12716 outer_type = TREE_TYPE (exp);
12717 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12718 if (!inner_type || inner_type == error_mark_node)
12719 return false;
12721 return tree_nop_conversion_p (outer_type, inner_type);
12724 /* Return true iff conversion in EXP generates no instruction. Don't
12725 consider conversions changing the signedness. */
12727 static bool
12728 tree_sign_nop_conversion (const_tree exp)
12730 tree outer_type, inner_type;
12732 if (!tree_nop_conversion (exp))
12733 return false;
12735 outer_type = TREE_TYPE (exp);
12736 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12738 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12739 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12742 /* Strip conversions from EXP according to tree_nop_conversion and
12743 return the resulting expression. */
12745 tree
12746 tree_strip_nop_conversions (tree exp)
12748 while (tree_nop_conversion (exp))
12749 exp = TREE_OPERAND (exp, 0);
12750 return exp;
12753 /* Strip conversions from EXP according to tree_sign_nop_conversion
12754 and return the resulting expression. */
12756 tree
12757 tree_strip_sign_nop_conversions (tree exp)
12759 while (tree_sign_nop_conversion (exp))
12760 exp = TREE_OPERAND (exp, 0);
12761 return exp;
12764 /* Avoid any floating point extensions from EXP. */
12765 tree
12766 strip_float_extensions (tree exp)
12768 tree sub, expt, subt;
12770 /* For floating point constant look up the narrowest type that can hold
12771 it properly and handle it like (type)(narrowest_type)constant.
12772 This way we can optimize for instance a=a*2.0 where "a" is float
12773 but 2.0 is double constant. */
12774 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12776 REAL_VALUE_TYPE orig;
12777 tree type = NULL;
12779 orig = TREE_REAL_CST (exp);
12780 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12781 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12782 type = float_type_node;
12783 else if (TYPE_PRECISION (TREE_TYPE (exp))
12784 > TYPE_PRECISION (double_type_node)
12785 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12786 type = double_type_node;
12787 if (type)
12788 return build_real_truncate (type, orig);
12791 if (!CONVERT_EXPR_P (exp))
12792 return exp;
12794 sub = TREE_OPERAND (exp, 0);
12795 subt = TREE_TYPE (sub);
12796 expt = TREE_TYPE (exp);
12798 if (!FLOAT_TYPE_P (subt))
12799 return exp;
12801 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12802 return exp;
12804 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12805 return exp;
12807 return strip_float_extensions (sub);
12810 /* Strip out all handled components that produce invariant
12811 offsets. */
12813 const_tree
12814 strip_invariant_refs (const_tree op)
12816 while (handled_component_p (op))
12818 switch (TREE_CODE (op))
12820 case ARRAY_REF:
12821 case ARRAY_RANGE_REF:
12822 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12823 || TREE_OPERAND (op, 2) != NULL_TREE
12824 || TREE_OPERAND (op, 3) != NULL_TREE)
12825 return NULL;
12826 break;
12828 case COMPONENT_REF:
12829 if (TREE_OPERAND (op, 2) != NULL_TREE)
12830 return NULL;
12831 break;
12833 default:;
12835 op = TREE_OPERAND (op, 0);
12838 return op;
12841 static GTY(()) tree gcc_eh_personality_decl;
12843 /* Return the GCC personality function decl. */
12845 tree
12846 lhd_gcc_personality (void)
12848 if (!gcc_eh_personality_decl)
12849 gcc_eh_personality_decl = build_personality_function ("gcc");
12850 return gcc_eh_personality_decl;
12853 /* TARGET is a call target of GIMPLE call statement
12854 (obtained by gimple_call_fn). Return true if it is
12855 OBJ_TYPE_REF representing an virtual call of C++ method.
12856 (As opposed to OBJ_TYPE_REF representing objc calls
12857 through a cast where middle-end devirtualization machinery
12858 can't apply.) FOR_DUMP_P is true when being called from
12859 the dump routines. */
12861 bool
12862 virtual_method_call_p (const_tree target, bool for_dump_p)
12864 if (TREE_CODE (target) != OBJ_TYPE_REF)
12865 return false;
12866 tree t = TREE_TYPE (target);
12867 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12868 t = TREE_TYPE (t);
12869 if (TREE_CODE (t) == FUNCTION_TYPE)
12870 return false;
12871 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12872 /* If we do not have BINFO associated, it means that type was built
12873 without devirtualization enabled. Do not consider this a virtual
12874 call. */
12875 if (!TYPE_BINFO (obj_type_ref_class (target, for_dump_p)))
12876 return false;
12877 return true;
12880 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12882 static tree
12883 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12885 unsigned int i;
12886 tree base_binfo, b;
12888 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12889 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12890 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12891 return base_binfo;
12892 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12893 return b;
12894 return NULL;
12897 /* Try to find a base info of BINFO that would have its field decl at offset
12898 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12899 found, return, otherwise return NULL_TREE. */
12901 tree
12902 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
12904 tree type = BINFO_TYPE (binfo);
12906 while (true)
12908 HOST_WIDE_INT pos, size;
12909 tree fld;
12910 int i;
12912 if (types_same_for_odr (type, expected_type))
12913 return binfo;
12914 if (maybe_lt (offset, 0))
12915 return NULL_TREE;
12917 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12919 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12920 continue;
12922 pos = int_bit_position (fld);
12923 size = tree_to_uhwi (DECL_SIZE (fld));
12924 if (known_in_range_p (offset, pos, size))
12925 break;
12927 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12928 return NULL_TREE;
12930 /* Offset 0 indicates the primary base, whose vtable contents are
12931 represented in the binfo for the derived class. */
12932 else if (maybe_ne (offset, 0))
12934 tree found_binfo = NULL, base_binfo;
12935 /* Offsets in BINFO are in bytes relative to the whole structure
12936 while POS is in bits relative to the containing field. */
12937 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12938 / BITS_PER_UNIT);
12940 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12941 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12942 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12944 found_binfo = base_binfo;
12945 break;
12947 if (found_binfo)
12948 binfo = found_binfo;
12949 else
12950 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12951 binfo_offset);
12954 type = TREE_TYPE (fld);
12955 offset -= pos;
12959 /* Returns true if X is a typedef decl. */
12961 bool
12962 is_typedef_decl (const_tree x)
12964 return (x && TREE_CODE (x) == TYPE_DECL
12965 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12968 /* Returns true iff TYPE is a type variant created for a typedef. */
12970 bool
12971 typedef_variant_p (const_tree type)
12973 return is_typedef_decl (TYPE_NAME (type));
12976 /* PR 84195: Replace control characters in "unescaped" with their
12977 escaped equivalents. Allow newlines if -fmessage-length has
12978 been set to a non-zero value. This is done here, rather than
12979 where the attribute is recorded as the message length can
12980 change between these two locations. */
12982 void
12983 escaped_string::escape (const char *unescaped)
12985 char *escaped;
12986 size_t i, new_i, len;
12988 if (m_owned)
12989 free (m_str);
12991 m_str = const_cast<char *> (unescaped);
12992 m_owned = false;
12994 if (unescaped == NULL || *unescaped == 0)
12995 return;
12997 len = strlen (unescaped);
12998 escaped = NULL;
12999 new_i = 0;
13001 for (i = 0; i < len; i++)
13003 char c = unescaped[i];
13005 if (!ISCNTRL (c))
13007 if (escaped)
13008 escaped[new_i++] = c;
13009 continue;
13012 if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
13014 if (escaped == NULL)
13016 /* We only allocate space for a new string if we
13017 actually encounter a control character that
13018 needs replacing. */
13019 escaped = (char *) xmalloc (len * 2 + 1);
13020 strncpy (escaped, unescaped, i);
13021 new_i = i;
13024 escaped[new_i++] = '\\';
13026 switch (c)
13028 case '\a': escaped[new_i++] = 'a'; break;
13029 case '\b': escaped[new_i++] = 'b'; break;
13030 case '\f': escaped[new_i++] = 'f'; break;
13031 case '\n': escaped[new_i++] = 'n'; break;
13032 case '\r': escaped[new_i++] = 'r'; break;
13033 case '\t': escaped[new_i++] = 't'; break;
13034 case '\v': escaped[new_i++] = 'v'; break;
13035 default: escaped[new_i++] = '?'; break;
13038 else if (escaped)
13039 escaped[new_i++] = c;
13042 if (escaped)
13044 escaped[new_i] = 0;
13045 m_str = escaped;
13046 m_owned = true;
13050 /* Warn about a use of an identifier which was marked deprecated. Returns
13051 whether a warning was given. */
13053 bool
13054 warn_deprecated_use (tree node, tree attr)
13056 escaped_string msg;
13058 if (node == 0 || !warn_deprecated_decl)
13059 return false;
13061 if (!attr)
13063 if (DECL_P (node))
13064 attr = DECL_ATTRIBUTES (node);
13065 else if (TYPE_P (node))
13067 tree decl = TYPE_STUB_DECL (node);
13068 if (decl)
13069 attr = lookup_attribute ("deprecated",
13070 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
13074 if (attr)
13075 attr = lookup_attribute ("deprecated", attr);
13077 if (attr)
13078 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
13080 bool w = false;
13081 if (DECL_P (node))
13083 auto_diagnostic_group d;
13084 if (msg)
13085 w = warning (OPT_Wdeprecated_declarations,
13086 "%qD is deprecated: %s", node, (const char *) msg);
13087 else
13088 w = warning (OPT_Wdeprecated_declarations,
13089 "%qD is deprecated", node);
13090 if (w)
13091 inform (DECL_SOURCE_LOCATION (node), "declared here");
13093 else if (TYPE_P (node))
13095 tree what = NULL_TREE;
13096 tree decl = TYPE_STUB_DECL (node);
13098 if (TYPE_NAME (node))
13100 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
13101 what = TYPE_NAME (node);
13102 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
13103 && DECL_NAME (TYPE_NAME (node)))
13104 what = DECL_NAME (TYPE_NAME (node));
13107 auto_diagnostic_group d;
13108 if (what)
13110 if (msg)
13111 w = warning (OPT_Wdeprecated_declarations,
13112 "%qE is deprecated: %s", what, (const char *) msg);
13113 else
13114 w = warning (OPT_Wdeprecated_declarations,
13115 "%qE is deprecated", what);
13117 else
13119 if (msg)
13120 w = warning (OPT_Wdeprecated_declarations,
13121 "type is deprecated: %s", (const char *) msg);
13122 else
13123 w = warning (OPT_Wdeprecated_declarations,
13124 "type is deprecated");
13127 if (w && decl)
13128 inform (DECL_SOURCE_LOCATION (decl), "declared here");
13131 return w;
13134 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
13135 somewhere in it. */
13137 bool
13138 contains_bitfld_component_ref_p (const_tree ref)
13140 while (handled_component_p (ref))
13142 if (TREE_CODE (ref) == COMPONENT_REF
13143 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
13144 return true;
13145 ref = TREE_OPERAND (ref, 0);
13148 return false;
13151 /* Try to determine whether a TRY_CATCH expression can fall through.
13152 This is a subroutine of block_may_fallthru. */
13154 static bool
13155 try_catch_may_fallthru (const_tree stmt)
13157 tree_stmt_iterator i;
13159 /* If the TRY block can fall through, the whole TRY_CATCH can
13160 fall through. */
13161 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
13162 return true;
13164 i = tsi_start (TREE_OPERAND (stmt, 1));
13165 switch (TREE_CODE (tsi_stmt (i)))
13167 case CATCH_EXPR:
13168 /* We expect to see a sequence of CATCH_EXPR trees, each with a
13169 catch expression and a body. The whole TRY_CATCH may fall
13170 through iff any of the catch bodies falls through. */
13171 for (; !tsi_end_p (i); tsi_next (&i))
13173 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
13174 return true;
13176 return false;
13178 case EH_FILTER_EXPR:
13179 /* The exception filter expression only matters if there is an
13180 exception. If the exception does not match EH_FILTER_TYPES,
13181 we will execute EH_FILTER_FAILURE, and we will fall through
13182 if that falls through. If the exception does match
13183 EH_FILTER_TYPES, the stack unwinder will continue up the
13184 stack, so we will not fall through. We don't know whether we
13185 will throw an exception which matches EH_FILTER_TYPES or not,
13186 so we just ignore EH_FILTER_TYPES and assume that we might
13187 throw an exception which doesn't match. */
13188 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
13190 default:
13191 /* This case represents statements to be executed when an
13192 exception occurs. Those statements are implicitly followed
13193 by a RESX statement to resume execution after the exception.
13194 So in this case the TRY_CATCH never falls through. */
13195 return false;
13199 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
13200 need not be 100% accurate; simply be conservative and return true if we
13201 don't know. This is used only to avoid stupidly generating extra code.
13202 If we're wrong, we'll just delete the extra code later. */
13204 bool
13205 block_may_fallthru (const_tree block)
13207 /* This CONST_CAST is okay because expr_last returns its argument
13208 unmodified and we assign it to a const_tree. */
13209 const_tree stmt = expr_last (CONST_CAST_TREE (block));
13211 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
13213 case GOTO_EXPR:
13214 case RETURN_EXPR:
13215 /* Easy cases. If the last statement of the block implies
13216 control transfer, then we can't fall through. */
13217 return false;
13219 case SWITCH_EXPR:
13220 /* If there is a default: label or case labels cover all possible
13221 SWITCH_COND values, then the SWITCH_EXPR will transfer control
13222 to some case label in all cases and all we care is whether the
13223 SWITCH_BODY falls through. */
13224 if (SWITCH_ALL_CASES_P (stmt))
13225 return block_may_fallthru (SWITCH_BODY (stmt));
13226 return true;
13228 case COND_EXPR:
13229 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
13230 return true;
13231 return block_may_fallthru (COND_EXPR_ELSE (stmt));
13233 case BIND_EXPR:
13234 return block_may_fallthru (BIND_EXPR_BODY (stmt));
13236 case TRY_CATCH_EXPR:
13237 return try_catch_may_fallthru (stmt);
13239 case TRY_FINALLY_EXPR:
13240 /* The finally clause is always executed after the try clause,
13241 so if it does not fall through, then the try-finally will not
13242 fall through. Otherwise, if the try clause does not fall
13243 through, then when the finally clause falls through it will
13244 resume execution wherever the try clause was going. So the
13245 whole try-finally will only fall through if both the try
13246 clause and the finally clause fall through. */
13247 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
13248 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
13250 case EH_ELSE_EXPR:
13251 return block_may_fallthru (TREE_OPERAND (stmt, 0));
13253 case MODIFY_EXPR:
13254 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
13255 stmt = TREE_OPERAND (stmt, 1);
13256 else
13257 return true;
13258 /* FALLTHRU */
13260 case CALL_EXPR:
13261 /* Functions that do not return do not fall through. */
13262 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
13264 case CLEANUP_POINT_EXPR:
13265 return block_may_fallthru (TREE_OPERAND (stmt, 0));
13267 case TARGET_EXPR:
13268 return block_may_fallthru (TREE_OPERAND (stmt, 1));
13270 case ERROR_MARK:
13271 return true;
13273 default:
13274 return lang_hooks.block_may_fallthru (stmt);
13278 /* True if we are using EH to handle cleanups. */
13279 static bool using_eh_for_cleanups_flag = false;
13281 /* This routine is called from front ends to indicate eh should be used for
13282 cleanups. */
13283 void
13284 using_eh_for_cleanups (void)
13286 using_eh_for_cleanups_flag = true;
13289 /* Query whether EH is used for cleanups. */
13290 bool
13291 using_eh_for_cleanups_p (void)
13293 return using_eh_for_cleanups_flag;
13296 /* Wrapper for tree_code_name to ensure that tree code is valid */
13297 const char *
13298 get_tree_code_name (enum tree_code code)
13300 const char *invalid = "<invalid tree code>";
13302 /* The tree_code enum promotes to signed, but we could be getting
13303 invalid values, so force an unsigned comparison. */
13304 if (unsigned (code) >= MAX_TREE_CODES)
13306 if (code == 0xa5a5)
13307 return "ggc_freed";
13308 return invalid;
13311 return tree_code_name[code];
13314 /* Drops the TREE_OVERFLOW flag from T. */
13316 tree
13317 drop_tree_overflow (tree t)
13319 gcc_checking_assert (TREE_OVERFLOW (t));
13321 /* For tree codes with a sharing machinery re-build the result. */
13322 if (poly_int_tree_p (t))
13323 return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
13325 /* For VECTOR_CST, remove the overflow bits from the encoded elements
13326 and canonicalize the result. */
13327 if (TREE_CODE (t) == VECTOR_CST)
13329 tree_vector_builder builder;
13330 builder.new_unary_operation (TREE_TYPE (t), t, true);
13331 unsigned int count = builder.encoded_nelts ();
13332 for (unsigned int i = 0; i < count; ++i)
13334 tree elt = VECTOR_CST_ELT (t, i);
13335 if (TREE_OVERFLOW (elt))
13336 elt = drop_tree_overflow (elt);
13337 builder.quick_push (elt);
13339 return builder.build ();
13342 /* Otherwise, as all tcc_constants are possibly shared, copy the node
13343 and drop the flag. */
13344 t = copy_node (t);
13345 TREE_OVERFLOW (t) = 0;
13347 /* For constants that contain nested constants, drop the flag
13348 from those as well. */
13349 if (TREE_CODE (t) == COMPLEX_CST)
13351 if (TREE_OVERFLOW (TREE_REALPART (t)))
13352 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
13353 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
13354 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
13357 return t;
13360 /* Given a memory reference expression T, return its base address.
13361 The base address of a memory reference expression is the main
13362 object being referenced. For instance, the base address for
13363 'array[i].fld[j]' is 'array'. You can think of this as stripping
13364 away the offset part from a memory address.
13366 This function calls handled_component_p to strip away all the inner
13367 parts of the memory reference until it reaches the base object. */
13369 tree
13370 get_base_address (tree t)
13372 while (handled_component_p (t))
13373 t = TREE_OPERAND (t, 0);
13375 if ((TREE_CODE (t) == MEM_REF
13376 || TREE_CODE (t) == TARGET_MEM_REF)
13377 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
13378 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
13380 /* ??? Either the alias oracle or all callers need to properly deal
13381 with WITH_SIZE_EXPRs before we can look through those. */
13382 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13383 return NULL_TREE;
13385 return t;
13388 /* Return a tree of sizetype representing the size, in bytes, of the element
13389 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13391 tree
13392 array_ref_element_size (tree exp)
13394 tree aligned_size = TREE_OPERAND (exp, 3);
13395 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
13396 location_t loc = EXPR_LOCATION (exp);
13398 /* If a size was specified in the ARRAY_REF, it's the size measured
13399 in alignment units of the element type. So multiply by that value. */
13400 if (aligned_size)
13402 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13403 sizetype from another type of the same width and signedness. */
13404 if (TREE_TYPE (aligned_size) != sizetype)
13405 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
13406 return size_binop_loc (loc, MULT_EXPR, aligned_size,
13407 size_int (TYPE_ALIGN_UNIT (elmt_type)));
13410 /* Otherwise, take the size from that of the element type. Substitute
13411 any PLACEHOLDER_EXPR that we have. */
13412 else
13413 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
13416 /* Return a tree representing the lower bound of the array mentioned in
13417 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13419 tree
13420 array_ref_low_bound (tree exp)
13422 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13424 /* If a lower bound is specified in EXP, use it. */
13425 if (TREE_OPERAND (exp, 2))
13426 return TREE_OPERAND (exp, 2);
13428 /* Otherwise, if there is a domain type and it has a lower bound, use it,
13429 substituting for a PLACEHOLDER_EXPR as needed. */
13430 if (domain_type && TYPE_MIN_VALUE (domain_type))
13431 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
13433 /* Otherwise, return a zero of the appropriate type. */
13434 tree idxtype = TREE_TYPE (TREE_OPERAND (exp, 1));
13435 return (idxtype == error_mark_node
13436 ? integer_zero_node : build_int_cst (idxtype, 0));
13439 /* Return a tree representing the upper bound of the array mentioned in
13440 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13442 tree
13443 array_ref_up_bound (tree exp)
13445 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13447 /* If there is a domain type and it has an upper bound, use it, substituting
13448 for a PLACEHOLDER_EXPR as needed. */
13449 if (domain_type && TYPE_MAX_VALUE (domain_type))
13450 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
13452 /* Otherwise fail. */
13453 return NULL_TREE;
13456 /* Returns true if REF is an array reference, component reference,
13457 or memory reference to an array at the end of a structure.
13458 If this is the case, the array may be allocated larger
13459 than its upper bound implies. */
13461 bool
13462 array_at_struct_end_p (tree ref)
13464 tree atype;
13466 if (TREE_CODE (ref) == ARRAY_REF
13467 || TREE_CODE (ref) == ARRAY_RANGE_REF)
13469 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
13470 ref = TREE_OPERAND (ref, 0);
13472 else if (TREE_CODE (ref) == COMPONENT_REF
13473 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
13474 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
13475 else if (TREE_CODE (ref) == MEM_REF)
13477 tree arg = TREE_OPERAND (ref, 0);
13478 if (TREE_CODE (arg) == ADDR_EXPR)
13479 arg = TREE_OPERAND (arg, 0);
13480 tree argtype = TREE_TYPE (arg);
13481 if (TREE_CODE (argtype) == RECORD_TYPE)
13483 if (tree fld = last_field (argtype))
13485 atype = TREE_TYPE (fld);
13486 if (TREE_CODE (atype) != ARRAY_TYPE)
13487 return false;
13488 if (VAR_P (arg) && DECL_SIZE (fld))
13489 return false;
13491 else
13492 return false;
13494 else
13495 return false;
13497 else
13498 return false;
13500 if (TREE_CODE (ref) == STRING_CST)
13501 return false;
13503 tree ref_to_array = ref;
13504 while (handled_component_p (ref))
13506 /* If the reference chain contains a component reference to a
13507 non-union type and there follows another field the reference
13508 is not at the end of a structure. */
13509 if (TREE_CODE (ref) == COMPONENT_REF)
13511 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
13513 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
13514 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
13515 nextf = DECL_CHAIN (nextf);
13516 if (nextf)
13517 return false;
13520 /* If we have a multi-dimensional array we do not consider
13521 a non-innermost dimension as flex array if the whole
13522 multi-dimensional array is at struct end.
13523 Same for an array of aggregates with a trailing array
13524 member. */
13525 else if (TREE_CODE (ref) == ARRAY_REF)
13526 return false;
13527 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
13529 /* If we view an underlying object as sth else then what we
13530 gathered up to now is what we have to rely on. */
13531 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
13532 break;
13533 else
13534 gcc_unreachable ();
13536 ref = TREE_OPERAND (ref, 0);
13539 /* The array now is at struct end. Treat flexible arrays as
13540 always subject to extend, even into just padding constrained by
13541 an underlying decl. */
13542 if (! TYPE_SIZE (atype)
13543 || ! TYPE_DOMAIN (atype)
13544 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
13545 return true;
13547 if (TREE_CODE (ref) == MEM_REF
13548 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
13549 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
13551 /* If the reference is based on a declared entity, the size of the array
13552 is constrained by its given domain. (Do not trust commons PR/69368). */
13553 if (DECL_P (ref)
13554 && !(flag_unconstrained_commons
13555 && VAR_P (ref) && DECL_COMMON (ref))
13556 && DECL_SIZE_UNIT (ref)
13557 && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
13559 /* Check whether the array domain covers all of the available
13560 padding. */
13561 poly_int64 offset;
13562 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
13563 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
13564 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
13565 return true;
13566 if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
13567 return true;
13569 /* If at least one extra element fits it is a flexarray. */
13570 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
13571 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
13572 + 2)
13573 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
13574 wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
13575 return true;
13577 return false;
13580 return true;
13583 /* Return a tree representing the offset, in bytes, of the field referenced
13584 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
13586 tree
13587 component_ref_field_offset (tree exp)
13589 tree aligned_offset = TREE_OPERAND (exp, 2);
13590 tree field = TREE_OPERAND (exp, 1);
13591 location_t loc = EXPR_LOCATION (exp);
13593 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
13594 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
13595 value. */
13596 if (aligned_offset)
13598 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13599 sizetype from another type of the same width and signedness. */
13600 if (TREE_TYPE (aligned_offset) != sizetype)
13601 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
13602 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
13603 size_int (DECL_OFFSET_ALIGN (field)
13604 / BITS_PER_UNIT));
13607 /* Otherwise, take the offset from that of the field. Substitute
13608 any PLACEHOLDER_EXPR that we have. */
13609 else
13610 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
13613 /* Given the initializer INIT, return the initializer for the field
13614 DECL if it exists, otherwise null. Used to obtain the initializer
13615 for a flexible array member and determine its size. */
13617 static tree
13618 get_initializer_for (tree init, tree decl)
13620 STRIP_NOPS (init);
13622 tree fld, fld_init;
13623 unsigned HOST_WIDE_INT i;
13624 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), i, fld, fld_init)
13626 if (decl == fld)
13627 return fld_init;
13629 if (TREE_CODE (fld) == CONSTRUCTOR)
13631 fld_init = get_initializer_for (fld_init, decl);
13632 if (fld_init)
13633 return fld_init;
13637 return NULL_TREE;
13640 /* Determines the size of the member referenced by the COMPONENT_REF
13641 REF, using its initializer expression if necessary in order to
13642 determine the size of an initialized flexible array member.
13643 If non-null, *INTERIOR_ZERO_LENGTH is set when REF refers to
13644 an interior zero-length array.
13645 Returns the size as sizetype (which might be zero for an object
13646 with an uninitialized flexible array member) or null if the size
13647 cannot be determined. */
13649 tree
13650 component_ref_size (tree ref, bool *interior_zero_length /* = NULL */)
13652 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
13654 bool int_0_len = false;
13655 if (!interior_zero_length)
13656 interior_zero_length = &int_0_len;
13658 /* The object/argument referenced by the COMPONENT_REF and its type. */
13659 tree arg = TREE_OPERAND (ref, 0);
13660 tree argtype = TREE_TYPE (arg);
13661 /* The referenced member. */
13662 tree member = TREE_OPERAND (ref, 1);
13664 tree memsize = DECL_SIZE_UNIT (member);
13665 if (memsize)
13667 tree memtype = TREE_TYPE (member);
13668 if (TREE_CODE (memtype) != ARRAY_TYPE)
13669 return memsize;
13671 bool trailing = array_at_struct_end_p (ref);
13672 bool zero_length = integer_zerop (memsize);
13673 if (!trailing && !zero_length)
13674 /* MEMBER is either an interior array or is an array with
13675 more than one element. */
13676 return memsize;
13678 *interior_zero_length = zero_length && !trailing;
13679 if (*interior_zero_length)
13680 memsize = NULL_TREE;
13682 if (!zero_length)
13683 if (tree dom = TYPE_DOMAIN (memtype))
13684 if (tree min = TYPE_MIN_VALUE (dom))
13685 if (tree max = TYPE_MAX_VALUE (dom))
13686 if (TREE_CODE (min) == INTEGER_CST
13687 && TREE_CODE (max) == INTEGER_CST)
13689 offset_int minidx = wi::to_offset (min);
13690 offset_int maxidx = wi::to_offset (max);
13691 if (maxidx - minidx > 0)
13692 /* MEMBER is an array with more than one element. */
13693 return memsize;
13696 /* For a refernce to a zero- or one-element array member of a union
13697 use the size of the union instead of the size of the member. */
13698 if (TREE_CODE (argtype) == UNION_TYPE)
13699 memsize = TYPE_SIZE_UNIT (argtype);
13702 /* MEMBER is either a bona fide flexible array member, or a zero-length
13703 array member, or an array of length one treated as such. */
13705 /* If the reference is to a declared object and the member a true
13706 flexible array, try to determine its size from its initializer. */
13707 poly_int64 baseoff = 0;
13708 tree base = get_addr_base_and_unit_offset (ref, &baseoff);
13709 if (!base || !VAR_P (base))
13711 if (!*interior_zero_length)
13712 return NULL_TREE;
13714 if (TREE_CODE (arg) != COMPONENT_REF)
13715 return NULL_TREE;
13717 base = arg;
13718 while (TREE_CODE (base) == COMPONENT_REF)
13719 base = TREE_OPERAND (base, 0);
13720 baseoff = tree_to_poly_int64 (byte_position (TREE_OPERAND (ref, 1)));
13723 /* BASE is the declared object of which MEMBER is either a member
13724 or that is cast to ARGTYPE (e.g., a char buffer used to store
13725 an ARGTYPE object). */
13726 tree basetype = TREE_TYPE (base);
13728 /* Determine the base type of the referenced object. If it's
13729 the same as ARGTYPE and MEMBER has a known size, return it. */
13730 tree bt = basetype;
13731 if (!*interior_zero_length)
13732 while (TREE_CODE (bt) == ARRAY_TYPE)
13733 bt = TREE_TYPE (bt);
13734 bool typematch = useless_type_conversion_p (argtype, bt);
13735 if (memsize && typematch)
13736 return memsize;
13738 memsize = NULL_TREE;
13740 if (typematch)
13741 /* MEMBER is a true flexible array member. Compute its size from
13742 the initializer of the BASE object if it has one. */
13743 if (tree init = DECL_P (base) ? DECL_INITIAL (base) : NULL_TREE)
13744 if (init != error_mark_node)
13746 init = get_initializer_for (init, member);
13747 if (init)
13749 memsize = TYPE_SIZE_UNIT (TREE_TYPE (init));
13750 if (tree refsize = TYPE_SIZE_UNIT (argtype))
13752 /* Use the larger of the initializer size and the tail
13753 padding in the enclosing struct. */
13754 poly_int64 rsz = tree_to_poly_int64 (refsize);
13755 rsz -= baseoff;
13756 if (known_lt (tree_to_poly_int64 (memsize), rsz))
13757 memsize = wide_int_to_tree (TREE_TYPE (memsize), rsz);
13760 baseoff = 0;
13764 if (!memsize)
13766 if (typematch)
13768 if (DECL_P (base)
13769 && DECL_EXTERNAL (base)
13770 && bt == basetype
13771 && !*interior_zero_length)
13772 /* The size of a flexible array member of an extern struct
13773 with no initializer cannot be determined (it's defined
13774 in another translation unit and can have an initializer
13775 with an arbitrary number of elements). */
13776 return NULL_TREE;
13778 /* Use the size of the base struct or, for interior zero-length
13779 arrays, the size of the enclosing type. */
13780 memsize = TYPE_SIZE_UNIT (bt);
13782 else if (DECL_P (base))
13783 /* Use the size of the BASE object (possibly an array of some
13784 other type such as char used to store the struct). */
13785 memsize = DECL_SIZE_UNIT (base);
13786 else
13787 return NULL_TREE;
13790 /* If the flexible array member has a known size use the greater
13791 of it and the tail padding in the enclosing struct.
13792 Otherwise, when the size of the flexible array member is unknown
13793 and the referenced object is not a struct, use the size of its
13794 type when known. This detects sizes of array buffers when cast
13795 to struct types with flexible array members. */
13796 if (memsize)
13798 poly_int64 memsz64 = memsize ? tree_to_poly_int64 (memsize) : 0;
13799 if (known_lt (baseoff, memsz64))
13801 memsz64 -= baseoff;
13802 return wide_int_to_tree (TREE_TYPE (memsize), memsz64);
13804 return size_zero_node;
13807 /* Return "don't know" for an external non-array object since its
13808 flexible array member can be initialized to have any number of
13809 elements. Otherwise, return zero because the flexible array
13810 member has no elements. */
13811 return (DECL_P (base)
13812 && DECL_EXTERNAL (base)
13813 && (!typematch
13814 || TREE_CODE (basetype) != ARRAY_TYPE)
13815 ? NULL_TREE : size_zero_node);
13818 /* Return the machine mode of T. For vectors, returns the mode of the
13819 inner type. The main use case is to feed the result to HONOR_NANS,
13820 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
13822 machine_mode
13823 element_mode (const_tree t)
13825 if (!TYPE_P (t))
13826 t = TREE_TYPE (t);
13827 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
13828 t = TREE_TYPE (t);
13829 return TYPE_MODE (t);
13832 /* Vector types need to re-check the target flags each time we report
13833 the machine mode. We need to do this because attribute target can
13834 change the result of vector_mode_supported_p and have_regs_of_mode
13835 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
13836 change on a per-function basis. */
13837 /* ??? Possibly a better solution is to run through all the types
13838 referenced by a function and re-compute the TYPE_MODE once, rather
13839 than make the TYPE_MODE macro call a function. */
13841 machine_mode
13842 vector_type_mode (const_tree t)
13844 machine_mode mode;
13846 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
13848 mode = t->type_common.mode;
13849 if (VECTOR_MODE_P (mode)
13850 && (!targetm.vector_mode_supported_p (mode)
13851 || !have_regs_of_mode[mode]))
13853 scalar_int_mode innermode;
13855 /* For integers, try mapping it to a same-sized scalar mode. */
13856 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
13858 poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
13859 * GET_MODE_BITSIZE (innermode));
13860 scalar_int_mode mode;
13861 if (int_mode_for_size (size, 0).exists (&mode)
13862 && have_regs_of_mode[mode])
13863 return mode;
13866 return BLKmode;
13869 return mode;
13872 /* Return the size in bits of each element of vector type TYPE. */
13874 unsigned int
13875 vector_element_bits (const_tree type)
13877 gcc_checking_assert (VECTOR_TYPE_P (type));
13878 if (VECTOR_BOOLEAN_TYPE_P (type))
13879 return vector_element_size (tree_to_poly_uint64 (TYPE_SIZE (type)),
13880 TYPE_VECTOR_SUBPARTS (type));
13881 return tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
13884 /* Calculate the size in bits of each element of vector type TYPE
13885 and return the result as a tree of type bitsizetype. */
13887 tree
13888 vector_element_bits_tree (const_tree type)
13890 gcc_checking_assert (VECTOR_TYPE_P (type));
13891 if (VECTOR_BOOLEAN_TYPE_P (type))
13892 return bitsize_int (vector_element_bits (type));
13893 return TYPE_SIZE (TREE_TYPE (type));
13896 /* Verify that basic properties of T match TV and thus T can be a variant of
13897 TV. TV should be the more specified variant (i.e. the main variant). */
13899 static bool
13900 verify_type_variant (const_tree t, tree tv)
13902 /* Type variant can differ by:
13904 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13905 ENCODE_QUAL_ADDR_SPACE.
13906 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13907 in this case some values may not be set in the variant types
13908 (see TYPE_COMPLETE_P checks).
13909 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13910 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13911 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13912 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13913 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13914 this is necessary to make it possible to merge types form different TUs
13915 - arrays, pointers and references may have TREE_TYPE that is a variant
13916 of TREE_TYPE of their main variants.
13917 - aggregates may have new TYPE_FIELDS list that list variants of
13918 the main variant TYPE_FIELDS.
13919 - vector types may differ by TYPE_VECTOR_OPAQUE
13922 /* Convenience macro for matching individual fields. */
13923 #define verify_variant_match(flag) \
13924 do { \
13925 if (flag (tv) != flag (t)) \
13927 error ("type variant differs by %s", #flag); \
13928 debug_tree (tv); \
13929 return false; \
13931 } while (false)
13933 /* tree_base checks. */
13935 verify_variant_match (TREE_CODE);
13936 /* FIXME: Ada builds non-artificial variants of artificial types. */
13937 if (TYPE_ARTIFICIAL (tv) && 0)
13938 verify_variant_match (TYPE_ARTIFICIAL);
13939 if (POINTER_TYPE_P (tv))
13940 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13941 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13942 verify_variant_match (TYPE_UNSIGNED);
13943 verify_variant_match (TYPE_PACKED);
13944 if (TREE_CODE (t) == REFERENCE_TYPE)
13945 verify_variant_match (TYPE_REF_IS_RVALUE);
13946 if (AGGREGATE_TYPE_P (t))
13947 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13948 else
13949 verify_variant_match (TYPE_SATURATING);
13950 /* FIXME: This check trigger during libstdc++ build. */
13951 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
13952 verify_variant_match (TYPE_FINAL_P);
13954 /* tree_type_common checks. */
13956 if (COMPLETE_TYPE_P (t))
13958 verify_variant_match (TYPE_MODE);
13959 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
13960 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
13961 verify_variant_match (TYPE_SIZE);
13962 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
13963 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
13964 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
13966 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
13967 TYPE_SIZE_UNIT (tv), 0));
13968 error ("type variant has different %<TYPE_SIZE_UNIT%>");
13969 debug_tree (tv);
13970 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13971 debug_tree (TYPE_SIZE_UNIT (tv));
13972 error ("type%'s %<TYPE_SIZE_UNIT%>");
13973 debug_tree (TYPE_SIZE_UNIT (t));
13974 return false;
13976 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13978 verify_variant_match (TYPE_PRECISION);
13979 if (RECORD_OR_UNION_TYPE_P (t))
13980 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13981 else if (TREE_CODE (t) == ARRAY_TYPE)
13982 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13983 /* During LTO we merge variant lists from diferent translation units
13984 that may differ BY TYPE_CONTEXT that in turn may point
13985 to TRANSLATION_UNIT_DECL.
13986 Ada also builds variants of types with different TYPE_CONTEXT. */
13987 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
13988 verify_variant_match (TYPE_CONTEXT);
13989 if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == INTEGER_TYPE)
13990 verify_variant_match (TYPE_STRING_FLAG);
13991 if (TREE_CODE (t) == RECORD_TYPE || TREE_CODE (t) == UNION_TYPE)
13992 verify_variant_match (TYPE_CXX_ODR_P);
13993 if (TYPE_ALIAS_SET_KNOWN_P (t))
13995 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13996 debug_tree (tv);
13997 return false;
14000 /* tree_type_non_common checks. */
14002 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
14003 and dangle the pointer from time to time. */
14004 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
14005 && (in_lto_p || !TYPE_VFIELD (tv)
14006 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
14008 error ("type variant has different %<TYPE_VFIELD%>");
14009 debug_tree (tv);
14010 return false;
14012 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
14013 || TREE_CODE (t) == INTEGER_TYPE
14014 || TREE_CODE (t) == BOOLEAN_TYPE
14015 || TREE_CODE (t) == REAL_TYPE
14016 || TREE_CODE (t) == FIXED_POINT_TYPE)
14018 verify_variant_match (TYPE_MAX_VALUE);
14019 verify_variant_match (TYPE_MIN_VALUE);
14021 if (TREE_CODE (t) == METHOD_TYPE)
14022 verify_variant_match (TYPE_METHOD_BASETYPE);
14023 if (TREE_CODE (t) == OFFSET_TYPE)
14024 verify_variant_match (TYPE_OFFSET_BASETYPE);
14025 if (TREE_CODE (t) == ARRAY_TYPE)
14026 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
14027 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
14028 or even type's main variant. This is needed to make bootstrap pass
14029 and the bug seems new in GCC 5.
14030 C++ FE should be updated to make this consistent and we should check
14031 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
14032 is a match with main variant.
14034 Also disable the check for Java for now because of parser hack that builds
14035 first an dummy BINFO and then sometimes replace it by real BINFO in some
14036 of the copies. */
14037 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
14038 && TYPE_BINFO (t) != TYPE_BINFO (tv)
14039 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
14040 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
14041 at LTO time only. */
14042 && (in_lto_p && odr_type_p (t)))
14044 error ("type variant has different %<TYPE_BINFO%>");
14045 debug_tree (tv);
14046 error ("type variant%'s %<TYPE_BINFO%>");
14047 debug_tree (TYPE_BINFO (tv));
14048 error ("type%'s %<TYPE_BINFO%>");
14049 debug_tree (TYPE_BINFO (t));
14050 return false;
14053 /* Check various uses of TYPE_VALUES_RAW. */
14054 if (TREE_CODE (t) == ENUMERAL_TYPE
14055 && TYPE_VALUES (t))
14056 verify_variant_match (TYPE_VALUES);
14057 else if (TREE_CODE (t) == ARRAY_TYPE)
14058 verify_variant_match (TYPE_DOMAIN);
14059 /* Permit incomplete variants of complete type. While FEs may complete
14060 all variants, this does not happen for C++ templates in all cases. */
14061 else if (RECORD_OR_UNION_TYPE_P (t)
14062 && COMPLETE_TYPE_P (t)
14063 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
14065 tree f1, f2;
14067 /* Fortran builds qualified variants as new records with items of
14068 qualified type. Verify that they looks same. */
14069 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
14070 f1 && f2;
14071 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
14072 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
14073 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
14074 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
14075 /* FIXME: gfc_nonrestricted_type builds all types as variants
14076 with exception of pointer types. It deeply copies the type
14077 which means that we may end up with a variant type
14078 referring non-variant pointer. We may change it to
14079 produce types as variants, too, like
14080 objc_get_protocol_qualified_type does. */
14081 && !POINTER_TYPE_P (TREE_TYPE (f1)))
14082 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
14083 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
14084 break;
14085 if (f1 || f2)
14087 error ("type variant has different %<TYPE_FIELDS%>");
14088 debug_tree (tv);
14089 error ("first mismatch is field");
14090 debug_tree (f1);
14091 error ("and field");
14092 debug_tree (f2);
14093 return false;
14096 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
14097 verify_variant_match (TYPE_ARG_TYPES);
14098 /* For C++ the qualified variant of array type is really an array type
14099 of qualified TREE_TYPE.
14100 objc builds variants of pointer where pointer to type is a variant, too
14101 in objc_get_protocol_qualified_type. */
14102 if (TREE_TYPE (t) != TREE_TYPE (tv)
14103 && ((TREE_CODE (t) != ARRAY_TYPE
14104 && !POINTER_TYPE_P (t))
14105 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
14106 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
14108 error ("type variant has different %<TREE_TYPE%>");
14109 debug_tree (tv);
14110 error ("type variant%'s %<TREE_TYPE%>");
14111 debug_tree (TREE_TYPE (tv));
14112 error ("type%'s %<TREE_TYPE%>");
14113 debug_tree (TREE_TYPE (t));
14114 return false;
14116 if (type_with_alias_set_p (t)
14117 && !gimple_canonical_types_compatible_p (t, tv, false))
14119 error ("type is not compatible with its variant");
14120 debug_tree (tv);
14121 error ("type variant%'s %<TREE_TYPE%>");
14122 debug_tree (TREE_TYPE (tv));
14123 error ("type%'s %<TREE_TYPE%>");
14124 debug_tree (TREE_TYPE (t));
14125 return false;
14127 return true;
14128 #undef verify_variant_match
14132 /* The TYPE_CANONICAL merging machinery. It should closely resemble
14133 the middle-end types_compatible_p function. It needs to avoid
14134 claiming types are different for types that should be treated
14135 the same with respect to TBAA. Canonical types are also used
14136 for IL consistency checks via the useless_type_conversion_p
14137 predicate which does not handle all type kinds itself but falls
14138 back to pointer-comparison of TYPE_CANONICAL for aggregates
14139 for example. */
14141 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
14142 type calculation because we need to allow inter-operability between signed
14143 and unsigned variants. */
14145 bool
14146 type_with_interoperable_signedness (const_tree type)
14148 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
14149 signed char and unsigned char. Similarly fortran FE builds
14150 C_SIZE_T as signed type, while C defines it unsigned. */
14152 return tree_code_for_canonical_type_merging (TREE_CODE (type))
14153 == INTEGER_TYPE
14154 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
14155 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
14158 /* Return true iff T1 and T2 are structurally identical for what
14159 TBAA is concerned.
14160 This function is used both by lto.c canonical type merging and by the
14161 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
14162 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
14163 only for LTO because only in these cases TYPE_CANONICAL equivalence
14164 correspond to one defined by gimple_canonical_types_compatible_p. */
14166 bool
14167 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
14168 bool trust_type_canonical)
14170 /* Type variants should be same as the main variant. When not doing sanity
14171 checking to verify this fact, go to main variants and save some work. */
14172 if (trust_type_canonical)
14174 t1 = TYPE_MAIN_VARIANT (t1);
14175 t2 = TYPE_MAIN_VARIANT (t2);
14178 /* Check first for the obvious case of pointer identity. */
14179 if (t1 == t2)
14180 return true;
14182 /* Check that we have two types to compare. */
14183 if (t1 == NULL_TREE || t2 == NULL_TREE)
14184 return false;
14186 /* We consider complete types always compatible with incomplete type.
14187 This does not make sense for canonical type calculation and thus we
14188 need to ensure that we are never called on it.
14190 FIXME: For more correctness the function probably should have three modes
14191 1) mode assuming that types are complete mathcing their structure
14192 2) mode allowing incomplete types but producing equivalence classes
14193 and thus ignoring all info from complete types
14194 3) mode allowing incomplete types to match complete but checking
14195 compatibility between complete types.
14197 1 and 2 can be used for canonical type calculation. 3 is the real
14198 definition of type compatibility that can be used i.e. for warnings during
14199 declaration merging. */
14201 gcc_assert (!trust_type_canonical
14202 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
14204 /* If the types have been previously registered and found equal
14205 they still are. */
14207 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
14208 && trust_type_canonical)
14210 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
14211 they are always NULL, but they are set to non-NULL for types
14212 constructed by build_pointer_type and variants. In this case the
14213 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
14214 all pointers are considered equal. Be sure to not return false
14215 negatives. */
14216 gcc_checking_assert (canonical_type_used_p (t1)
14217 && canonical_type_used_p (t2));
14218 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
14221 /* For types where we do ODR based TBAA the canonical type is always
14222 set correctly, so we know that types are different if their
14223 canonical types does not match. */
14224 if (trust_type_canonical
14225 && (odr_type_p (t1) && odr_based_tbaa_p (t1))
14226 != (odr_type_p (t2) && odr_based_tbaa_p (t2)))
14227 return false;
14229 /* Can't be the same type if the types don't have the same code. */
14230 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
14231 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
14232 return false;
14234 /* Qualifiers do not matter for canonical type comparison purposes. */
14236 /* Void types and nullptr types are always the same. */
14237 if (TREE_CODE (t1) == VOID_TYPE
14238 || TREE_CODE (t1) == NULLPTR_TYPE)
14239 return true;
14241 /* Can't be the same type if they have different mode. */
14242 if (TYPE_MODE (t1) != TYPE_MODE (t2))
14243 return false;
14245 /* Non-aggregate types can be handled cheaply. */
14246 if (INTEGRAL_TYPE_P (t1)
14247 || SCALAR_FLOAT_TYPE_P (t1)
14248 || FIXED_POINT_TYPE_P (t1)
14249 || TREE_CODE (t1) == VECTOR_TYPE
14250 || TREE_CODE (t1) == COMPLEX_TYPE
14251 || TREE_CODE (t1) == OFFSET_TYPE
14252 || POINTER_TYPE_P (t1))
14254 /* Can't be the same type if they have different recision. */
14255 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
14256 return false;
14258 /* In some cases the signed and unsigned types are required to be
14259 inter-operable. */
14260 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
14261 && !type_with_interoperable_signedness (t1))
14262 return false;
14264 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
14265 interoperable with "signed char". Unless all frontends are revisited
14266 to agree on these types, we must ignore the flag completely. */
14268 /* Fortran standard define C_PTR type that is compatible with every
14269 C pointer. For this reason we need to glob all pointers into one.
14270 Still pointers in different address spaces are not compatible. */
14271 if (POINTER_TYPE_P (t1))
14273 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
14274 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
14275 return false;
14278 /* Tail-recurse to components. */
14279 if (TREE_CODE (t1) == VECTOR_TYPE
14280 || TREE_CODE (t1) == COMPLEX_TYPE)
14281 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
14282 TREE_TYPE (t2),
14283 trust_type_canonical);
14285 return true;
14288 /* Do type-specific comparisons. */
14289 switch (TREE_CODE (t1))
14291 case ARRAY_TYPE:
14292 /* Array types are the same if the element types are the same and
14293 the number of elements are the same. */
14294 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
14295 trust_type_canonical)
14296 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
14297 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
14298 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
14299 return false;
14300 else
14302 tree i1 = TYPE_DOMAIN (t1);
14303 tree i2 = TYPE_DOMAIN (t2);
14305 /* For an incomplete external array, the type domain can be
14306 NULL_TREE. Check this condition also. */
14307 if (i1 == NULL_TREE && i2 == NULL_TREE)
14308 return true;
14309 else if (i1 == NULL_TREE || i2 == NULL_TREE)
14310 return false;
14311 else
14313 tree min1 = TYPE_MIN_VALUE (i1);
14314 tree min2 = TYPE_MIN_VALUE (i2);
14315 tree max1 = TYPE_MAX_VALUE (i1);
14316 tree max2 = TYPE_MAX_VALUE (i2);
14318 /* The minimum/maximum values have to be the same. */
14319 if ((min1 == min2
14320 || (min1 && min2
14321 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
14322 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
14323 || operand_equal_p (min1, min2, 0))))
14324 && (max1 == max2
14325 || (max1 && max2
14326 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
14327 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
14328 || operand_equal_p (max1, max2, 0)))))
14329 return true;
14330 else
14331 return false;
14335 case METHOD_TYPE:
14336 case FUNCTION_TYPE:
14337 /* Function types are the same if the return type and arguments types
14338 are the same. */
14339 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
14340 trust_type_canonical))
14341 return false;
14343 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
14344 return true;
14345 else
14347 tree parms1, parms2;
14349 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
14350 parms1 && parms2;
14351 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
14353 if (!gimple_canonical_types_compatible_p
14354 (TREE_VALUE (parms1), TREE_VALUE (parms2),
14355 trust_type_canonical))
14356 return false;
14359 if (parms1 || parms2)
14360 return false;
14362 return true;
14365 case RECORD_TYPE:
14366 case UNION_TYPE:
14367 case QUAL_UNION_TYPE:
14369 tree f1, f2;
14371 /* Don't try to compare variants of an incomplete type, before
14372 TYPE_FIELDS has been copied around. */
14373 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
14374 return true;
14377 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
14378 return false;
14380 /* For aggregate types, all the fields must be the same. */
14381 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
14382 f1 || f2;
14383 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
14385 /* Skip non-fields and zero-sized fields. */
14386 while (f1 && (TREE_CODE (f1) != FIELD_DECL
14387 || (DECL_SIZE (f1)
14388 && integer_zerop (DECL_SIZE (f1)))))
14389 f1 = TREE_CHAIN (f1);
14390 while (f2 && (TREE_CODE (f2) != FIELD_DECL
14391 || (DECL_SIZE (f2)
14392 && integer_zerop (DECL_SIZE (f2)))))
14393 f2 = TREE_CHAIN (f2);
14394 if (!f1 || !f2)
14395 break;
14396 /* The fields must have the same name, offset and type. */
14397 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
14398 || !gimple_compare_field_offset (f1, f2)
14399 || !gimple_canonical_types_compatible_p
14400 (TREE_TYPE (f1), TREE_TYPE (f2),
14401 trust_type_canonical))
14402 return false;
14405 /* If one aggregate has more fields than the other, they
14406 are not the same. */
14407 if (f1 || f2)
14408 return false;
14410 return true;
14413 default:
14414 /* Consider all types with language specific trees in them mutually
14415 compatible. This is executed only from verify_type and false
14416 positives can be tolerated. */
14417 gcc_assert (!in_lto_p);
14418 return true;
14422 /* Verify type T. */
14424 void
14425 verify_type (const_tree t)
14427 bool error_found = false;
14428 tree mv = TYPE_MAIN_VARIANT (t);
14429 if (!mv)
14431 error ("main variant is not defined");
14432 error_found = true;
14434 else if (mv != TYPE_MAIN_VARIANT (mv))
14436 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
14437 debug_tree (mv);
14438 error_found = true;
14440 else if (t != mv && !verify_type_variant (t, mv))
14441 error_found = true;
14443 tree ct = TYPE_CANONICAL (t);
14444 if (!ct)
14446 else if (TYPE_CANONICAL (t) != ct)
14448 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
14449 debug_tree (ct);
14450 error_found = true;
14452 /* Method and function types cannot be used to address memory and thus
14453 TYPE_CANONICAL really matters only for determining useless conversions.
14455 FIXME: C++ FE produce declarations of builtin functions that are not
14456 compatible with main variants. */
14457 else if (TREE_CODE (t) == FUNCTION_TYPE)
14459 else if (t != ct
14460 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
14461 with variably sized arrays because their sizes possibly
14462 gimplified to different variables. */
14463 && !variably_modified_type_p (ct, NULL)
14464 && !gimple_canonical_types_compatible_p (t, ct, false)
14465 && COMPLETE_TYPE_P (t))
14467 error ("%<TYPE_CANONICAL%> is not compatible");
14468 debug_tree (ct);
14469 error_found = true;
14472 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
14473 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
14475 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
14476 debug_tree (ct);
14477 error_found = true;
14479 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
14481 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
14482 debug_tree (ct);
14483 debug_tree (TYPE_MAIN_VARIANT (ct));
14484 error_found = true;
14488 /* Check various uses of TYPE_MIN_VALUE_RAW. */
14489 if (RECORD_OR_UNION_TYPE_P (t))
14491 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
14492 and danagle the pointer from time to time. */
14493 if (TYPE_VFIELD (t)
14494 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
14495 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
14497 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
14498 debug_tree (TYPE_VFIELD (t));
14499 error_found = true;
14502 else if (TREE_CODE (t) == POINTER_TYPE)
14504 if (TYPE_NEXT_PTR_TO (t)
14505 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
14507 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
14508 debug_tree (TYPE_NEXT_PTR_TO (t));
14509 error_found = true;
14512 else if (TREE_CODE (t) == REFERENCE_TYPE)
14514 if (TYPE_NEXT_REF_TO (t)
14515 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
14517 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
14518 debug_tree (TYPE_NEXT_REF_TO (t));
14519 error_found = true;
14522 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14523 || TREE_CODE (t) == FIXED_POINT_TYPE)
14525 /* FIXME: The following check should pass:
14526 useless_type_conversion_p (const_cast <tree> (t),
14527 TREE_TYPE (TYPE_MIN_VALUE (t))
14528 but does not for C sizetypes in LTO. */
14531 /* Check various uses of TYPE_MAXVAL_RAW. */
14532 if (RECORD_OR_UNION_TYPE_P (t))
14534 if (!TYPE_BINFO (t))
14536 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
14538 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
14539 debug_tree (TYPE_BINFO (t));
14540 error_found = true;
14542 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
14544 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
14545 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
14546 error_found = true;
14549 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14551 if (TYPE_METHOD_BASETYPE (t)
14552 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
14553 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
14555 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
14556 debug_tree (TYPE_METHOD_BASETYPE (t));
14557 error_found = true;
14560 else if (TREE_CODE (t) == OFFSET_TYPE)
14562 if (TYPE_OFFSET_BASETYPE (t)
14563 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
14564 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
14566 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
14567 debug_tree (TYPE_OFFSET_BASETYPE (t));
14568 error_found = true;
14571 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14572 || TREE_CODE (t) == FIXED_POINT_TYPE)
14574 /* FIXME: The following check should pass:
14575 useless_type_conversion_p (const_cast <tree> (t),
14576 TREE_TYPE (TYPE_MAX_VALUE (t))
14577 but does not for C sizetypes in LTO. */
14579 else if (TREE_CODE (t) == ARRAY_TYPE)
14581 if (TYPE_ARRAY_MAX_SIZE (t)
14582 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
14584 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
14585 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
14586 error_found = true;
14589 else if (TYPE_MAX_VALUE_RAW (t))
14591 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
14592 debug_tree (TYPE_MAX_VALUE_RAW (t));
14593 error_found = true;
14596 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
14598 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
14599 debug_tree (TYPE_LANG_SLOT_1 (t));
14600 error_found = true;
14603 /* Check various uses of TYPE_VALUES_RAW. */
14604 if (TREE_CODE (t) == ENUMERAL_TYPE)
14605 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
14607 tree value = TREE_VALUE (l);
14608 tree name = TREE_PURPOSE (l);
14610 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
14611 CONST_DECL of ENUMERAL TYPE. */
14612 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
14614 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
14615 debug_tree (value);
14616 debug_tree (name);
14617 error_found = true;
14619 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
14620 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
14622 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
14623 "to the enum");
14624 debug_tree (value);
14625 debug_tree (name);
14626 error_found = true;
14628 if (TREE_CODE (name) != IDENTIFIER_NODE)
14630 error ("enum value name is not %<IDENTIFIER_NODE%>");
14631 debug_tree (value);
14632 debug_tree (name);
14633 error_found = true;
14636 else if (TREE_CODE (t) == ARRAY_TYPE)
14638 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
14640 error ("array %<TYPE_DOMAIN%> is not integer type");
14641 debug_tree (TYPE_DOMAIN (t));
14642 error_found = true;
14645 else if (RECORD_OR_UNION_TYPE_P (t))
14647 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
14649 error ("%<TYPE_FIELDS%> defined in incomplete type");
14650 error_found = true;
14652 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
14654 /* TODO: verify properties of decls. */
14655 if (TREE_CODE (fld) == FIELD_DECL)
14657 else if (TREE_CODE (fld) == TYPE_DECL)
14659 else if (TREE_CODE (fld) == CONST_DECL)
14661 else if (VAR_P (fld))
14663 else if (TREE_CODE (fld) == TEMPLATE_DECL)
14665 else if (TREE_CODE (fld) == USING_DECL)
14667 else if (TREE_CODE (fld) == FUNCTION_DECL)
14669 else
14671 error ("wrong tree in %<TYPE_FIELDS%> list");
14672 debug_tree (fld);
14673 error_found = true;
14677 else if (TREE_CODE (t) == INTEGER_TYPE
14678 || TREE_CODE (t) == BOOLEAN_TYPE
14679 || TREE_CODE (t) == OFFSET_TYPE
14680 || TREE_CODE (t) == REFERENCE_TYPE
14681 || TREE_CODE (t) == NULLPTR_TYPE
14682 || TREE_CODE (t) == POINTER_TYPE)
14684 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
14686 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
14687 "is %p",
14688 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
14689 error_found = true;
14691 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
14693 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
14694 debug_tree (TYPE_CACHED_VALUES (t));
14695 error_found = true;
14697 /* Verify just enough of cache to ensure that no one copied it to new type.
14698 All copying should go by copy_node that should clear it. */
14699 else if (TYPE_CACHED_VALUES_P (t))
14701 int i;
14702 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
14703 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
14704 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
14706 error ("wrong %<TYPE_CACHED_VALUES%> entry");
14707 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
14708 error_found = true;
14709 break;
14713 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14714 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
14716 /* C++ FE uses TREE_PURPOSE to store initial values. */
14717 if (TREE_PURPOSE (l) && in_lto_p)
14719 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
14720 debug_tree (l);
14721 error_found = true;
14723 if (!TYPE_P (TREE_VALUE (l)))
14725 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
14726 debug_tree (l);
14727 error_found = true;
14730 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
14732 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
14733 debug_tree (TYPE_VALUES_RAW (t));
14734 error_found = true;
14736 if (TREE_CODE (t) != INTEGER_TYPE
14737 && TREE_CODE (t) != BOOLEAN_TYPE
14738 && TREE_CODE (t) != OFFSET_TYPE
14739 && TREE_CODE (t) != REFERENCE_TYPE
14740 && TREE_CODE (t) != NULLPTR_TYPE
14741 && TREE_CODE (t) != POINTER_TYPE
14742 && TYPE_CACHED_VALUES_P (t))
14744 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
14745 error_found = true;
14748 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14749 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14750 of a type. */
14751 if (TREE_CODE (t) == METHOD_TYPE
14752 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
14754 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
14755 error_found = true;
14758 if (error_found)
14760 debug_tree (const_cast <tree> (t));
14761 internal_error ("%qs failed", __func__);
14766 /* Return 1 if ARG interpreted as signed in its precision is known to be
14767 always positive or 2 if ARG is known to be always negative, or 3 if
14768 ARG may be positive or negative. */
14771 get_range_pos_neg (tree arg)
14773 if (arg == error_mark_node)
14774 return 3;
14776 int prec = TYPE_PRECISION (TREE_TYPE (arg));
14777 int cnt = 0;
14778 if (TREE_CODE (arg) == INTEGER_CST)
14780 wide_int w = wi::sext (wi::to_wide (arg), prec);
14781 if (wi::neg_p (w))
14782 return 2;
14783 else
14784 return 1;
14786 while (CONVERT_EXPR_P (arg)
14787 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
14788 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
14790 arg = TREE_OPERAND (arg, 0);
14791 /* Narrower value zero extended into wider type
14792 will always result in positive values. */
14793 if (TYPE_UNSIGNED (TREE_TYPE (arg))
14794 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
14795 return 1;
14796 prec = TYPE_PRECISION (TREE_TYPE (arg));
14797 if (++cnt > 30)
14798 return 3;
14801 if (TREE_CODE (arg) != SSA_NAME)
14802 return 3;
14803 wide_int arg_min, arg_max;
14804 while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
14806 gimple *g = SSA_NAME_DEF_STMT (arg);
14807 if (is_gimple_assign (g)
14808 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
14810 tree t = gimple_assign_rhs1 (g);
14811 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
14812 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
14814 if (TYPE_UNSIGNED (TREE_TYPE (t))
14815 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
14816 return 1;
14817 prec = TYPE_PRECISION (TREE_TYPE (t));
14818 arg = t;
14819 if (++cnt > 30)
14820 return 3;
14821 continue;
14824 return 3;
14826 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
14828 /* For unsigned values, the "positive" range comes
14829 below the "negative" range. */
14830 if (!wi::neg_p (wi::sext (arg_max, prec), SIGNED))
14831 return 1;
14832 if (wi::neg_p (wi::sext (arg_min, prec), SIGNED))
14833 return 2;
14835 else
14837 if (!wi::neg_p (wi::sext (arg_min, prec), SIGNED))
14838 return 1;
14839 if (wi::neg_p (wi::sext (arg_max, prec), SIGNED))
14840 return 2;
14842 return 3;
14848 /* Return true if ARG is marked with the nonnull attribute in the
14849 current function signature. */
14851 bool
14852 nonnull_arg_p (const_tree arg)
14854 tree t, attrs, fntype;
14855 unsigned HOST_WIDE_INT arg_num;
14857 gcc_assert (TREE_CODE (arg) == PARM_DECL
14858 && (POINTER_TYPE_P (TREE_TYPE (arg))
14859 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
14861 /* The static chain decl is always non null. */
14862 if (arg == cfun->static_chain_decl)
14863 return true;
14865 /* THIS argument of method is always non-NULL. */
14866 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
14867 && arg == DECL_ARGUMENTS (cfun->decl)
14868 && flag_delete_null_pointer_checks)
14869 return true;
14871 /* Values passed by reference are always non-NULL. */
14872 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
14873 && flag_delete_null_pointer_checks)
14874 return true;
14876 fntype = TREE_TYPE (cfun->decl);
14877 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
14879 attrs = lookup_attribute ("nonnull", attrs);
14881 /* If "nonnull" wasn't specified, we know nothing about the argument. */
14882 if (attrs == NULL_TREE)
14883 return false;
14885 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
14886 if (TREE_VALUE (attrs) == NULL_TREE)
14887 return true;
14889 /* Get the position number for ARG in the function signature. */
14890 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
14892 t = DECL_CHAIN (t), arg_num++)
14894 if (t == arg)
14895 break;
14898 gcc_assert (t == arg);
14900 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14901 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14903 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14904 return true;
14908 return false;
14911 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14912 information. */
14914 location_t
14915 set_block (location_t loc, tree block)
14917 location_t pure_loc = get_pure_location (loc);
14918 source_range src_range = get_range_from_loc (line_table, loc);
14919 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
14922 location_t
14923 set_source_range (tree expr, location_t start, location_t finish)
14925 source_range src_range;
14926 src_range.m_start = start;
14927 src_range.m_finish = finish;
14928 return set_source_range (expr, src_range);
14931 location_t
14932 set_source_range (tree expr, source_range src_range)
14934 if (!EXPR_P (expr))
14935 return UNKNOWN_LOCATION;
14937 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
14938 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
14939 pure_loc,
14940 src_range,
14941 NULL);
14942 SET_EXPR_LOCATION (expr, adhoc);
14943 return adhoc;
14946 /* Return EXPR, potentially wrapped with a node expression LOC,
14947 if !CAN_HAVE_LOCATION_P (expr).
14949 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
14950 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
14952 Wrapper nodes can be identified using location_wrapper_p. */
14954 tree
14955 maybe_wrap_with_location (tree expr, location_t loc)
14957 if (expr == NULL)
14958 return NULL;
14959 if (loc == UNKNOWN_LOCATION)
14960 return expr;
14961 if (CAN_HAVE_LOCATION_P (expr))
14962 return expr;
14963 /* We should only be adding wrappers for constants and for decls,
14964 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
14965 gcc_assert (CONSTANT_CLASS_P (expr)
14966 || DECL_P (expr)
14967 || EXCEPTIONAL_CLASS_P (expr));
14969 /* For now, don't add wrappers to exceptional tree nodes, to minimize
14970 any impact of the wrapper nodes. */
14971 if (EXCEPTIONAL_CLASS_P (expr))
14972 return expr;
14974 /* If any auto_suppress_location_wrappers are active, don't create
14975 wrappers. */
14976 if (suppress_location_wrappers > 0)
14977 return expr;
14979 tree_code code
14980 = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
14981 || (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
14982 ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
14983 tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
14984 /* Mark this node as being a wrapper. */
14985 EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
14986 return wrapper;
14989 int suppress_location_wrappers;
14991 /* Return the name of combined function FN, for debugging purposes. */
14993 const char *
14994 combined_fn_name (combined_fn fn)
14996 if (builtin_fn_p (fn))
14998 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14999 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
15001 else
15002 return internal_fn_name (as_internal_fn (fn));
15005 /* Return a bitmap with a bit set corresponding to each argument in
15006 a function call type FNTYPE declared with attribute nonnull,
15007 or null if none of the function's argument are nonnull. The caller
15008 must free the bitmap. */
15010 bitmap
15011 get_nonnull_args (const_tree fntype)
15013 if (fntype == NULL_TREE)
15014 return NULL;
15016 bitmap argmap = NULL;
15017 if (TREE_CODE (fntype) == METHOD_TYPE)
15019 /* The this pointer in C++ non-static member functions is
15020 implicitly nonnull whether or not it's declared as such. */
15021 argmap = BITMAP_ALLOC (NULL);
15022 bitmap_set_bit (argmap, 0);
15025 tree attrs = TYPE_ATTRIBUTES (fntype);
15026 if (!attrs)
15027 return argmap;
15029 /* A function declaration can specify multiple attribute nonnull,
15030 each with zero or more arguments. The loop below creates a bitmap
15031 representing a union of all the arguments. An empty (but non-null)
15032 bitmap means that all arguments have been declaraed nonnull. */
15033 for ( ; attrs; attrs = TREE_CHAIN (attrs))
15035 attrs = lookup_attribute ("nonnull", attrs);
15036 if (!attrs)
15037 break;
15039 if (!argmap)
15040 argmap = BITMAP_ALLOC (NULL);
15042 if (!TREE_VALUE (attrs))
15044 /* Clear the bitmap in case a previous attribute nonnull
15045 set it and this one overrides it for all arguments. */
15046 bitmap_clear (argmap);
15047 return argmap;
15050 /* Iterate over the indices of the format arguments declared nonnull
15051 and set a bit for each. */
15052 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
15054 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
15055 bitmap_set_bit (argmap, val);
15059 return argmap;
15062 /* Returns true if TYPE is a type where it and all of its subobjects
15063 (recursively) are of structure, union, or array type. */
15065 static bool
15066 default_is_empty_type (tree type)
15068 if (RECORD_OR_UNION_TYPE_P (type))
15070 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
15071 if (TREE_CODE (field) == FIELD_DECL
15072 && !DECL_PADDING_P (field)
15073 && !default_is_empty_type (TREE_TYPE (field)))
15074 return false;
15075 return true;
15077 else if (TREE_CODE (type) == ARRAY_TYPE)
15078 return (integer_minus_onep (array_type_nelts (type))
15079 || TYPE_DOMAIN (type) == NULL_TREE
15080 || default_is_empty_type (TREE_TYPE (type)));
15081 return false;
15084 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
15085 that shouldn't be passed via stack. */
15087 bool
15088 default_is_empty_record (const_tree type)
15090 if (!abi_version_at_least (12))
15091 return false;
15093 if (type == error_mark_node)
15094 return false;
15096 if (TREE_ADDRESSABLE (type))
15097 return false;
15099 return default_is_empty_type (TYPE_MAIN_VARIANT (type));
15102 /* Determine whether TYPE is a structure with a flexible array member,
15103 or a union containing such a structure (possibly recursively). */
15105 bool
15106 flexible_array_type_p (const_tree type)
15108 tree x, last;
15109 switch (TREE_CODE (type))
15111 case RECORD_TYPE:
15112 last = NULL_TREE;
15113 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
15114 if (TREE_CODE (x) == FIELD_DECL)
15115 last = x;
15116 if (last == NULL_TREE)
15117 return false;
15118 if (TREE_CODE (TREE_TYPE (last)) == ARRAY_TYPE
15119 && TYPE_SIZE (TREE_TYPE (last)) == NULL_TREE
15120 && TYPE_DOMAIN (TREE_TYPE (last)) != NULL_TREE
15121 && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last))) == NULL_TREE)
15122 return true;
15123 return false;
15124 case UNION_TYPE:
15125 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
15127 if (TREE_CODE (x) == FIELD_DECL
15128 && flexible_array_type_p (TREE_TYPE (x)))
15129 return true;
15131 return false;
15132 default:
15133 return false;
15137 /* Like int_size_in_bytes, but handle empty records specially. */
15139 HOST_WIDE_INT
15140 arg_int_size_in_bytes (const_tree type)
15142 return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
15145 /* Like size_in_bytes, but handle empty records specially. */
15147 tree
15148 arg_size_in_bytes (const_tree type)
15150 return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
15153 /* Return true if an expression with CODE has to have the same result type as
15154 its first operand. */
15156 bool
15157 expr_type_first_operand_type_p (tree_code code)
15159 switch (code)
15161 case NEGATE_EXPR:
15162 case ABS_EXPR:
15163 case BIT_NOT_EXPR:
15164 case PAREN_EXPR:
15165 case CONJ_EXPR:
15167 case PLUS_EXPR:
15168 case MINUS_EXPR:
15169 case MULT_EXPR:
15170 case TRUNC_DIV_EXPR:
15171 case CEIL_DIV_EXPR:
15172 case FLOOR_DIV_EXPR:
15173 case ROUND_DIV_EXPR:
15174 case TRUNC_MOD_EXPR:
15175 case CEIL_MOD_EXPR:
15176 case FLOOR_MOD_EXPR:
15177 case ROUND_MOD_EXPR:
15178 case RDIV_EXPR:
15179 case EXACT_DIV_EXPR:
15180 case MIN_EXPR:
15181 case MAX_EXPR:
15182 case BIT_IOR_EXPR:
15183 case BIT_XOR_EXPR:
15184 case BIT_AND_EXPR:
15186 case LSHIFT_EXPR:
15187 case RSHIFT_EXPR:
15188 case LROTATE_EXPR:
15189 case RROTATE_EXPR:
15190 return true;
15192 default:
15193 return false;
15197 /* Return a typenode for the "standard" C type with a given name. */
15198 tree
15199 get_typenode_from_name (const char *name)
15201 if (name == NULL || *name == '\0')
15202 return NULL_TREE;
15204 if (strcmp (name, "char") == 0)
15205 return char_type_node;
15206 if (strcmp (name, "unsigned char") == 0)
15207 return unsigned_char_type_node;
15208 if (strcmp (name, "signed char") == 0)
15209 return signed_char_type_node;
15211 if (strcmp (name, "short int") == 0)
15212 return short_integer_type_node;
15213 if (strcmp (name, "short unsigned int") == 0)
15214 return short_unsigned_type_node;
15216 if (strcmp (name, "int") == 0)
15217 return integer_type_node;
15218 if (strcmp (name, "unsigned int") == 0)
15219 return unsigned_type_node;
15221 if (strcmp (name, "long int") == 0)
15222 return long_integer_type_node;
15223 if (strcmp (name, "long unsigned int") == 0)
15224 return long_unsigned_type_node;
15226 if (strcmp (name, "long long int") == 0)
15227 return long_long_integer_type_node;
15228 if (strcmp (name, "long long unsigned int") == 0)
15229 return long_long_unsigned_type_node;
15231 gcc_unreachable ();
15234 /* List of pointer types used to declare builtins before we have seen their
15235 real declaration.
15237 Keep the size up to date in tree.h ! */
15238 const builtin_structptr_type builtin_structptr_types[6] =
15240 { fileptr_type_node, ptr_type_node, "FILE" },
15241 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
15242 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
15243 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
15244 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
15245 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
15248 /* Return the maximum object size. */
15250 tree
15251 max_object_size (void)
15253 /* To do: Make this a configurable parameter. */
15254 return TYPE_MAX_VALUE (ptrdiff_type_node);
15257 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
15258 parameter default to false and that weeds out error_mark_node. */
15260 bool
15261 verify_type_context (location_t loc, type_context_kind context,
15262 const_tree type, bool silent_p)
15264 if (type == error_mark_node)
15265 return true;
15267 gcc_assert (TYPE_P (type));
15268 return (!targetm.verify_type_context
15269 || targetm.verify_type_context (loc, context, type, silent_p));
15272 #if CHECKING_P
15274 namespace selftest {
15276 /* Selftests for tree. */
15278 /* Verify that integer constants are sane. */
15280 static void
15281 test_integer_constants ()
15283 ASSERT_TRUE (integer_type_node != NULL);
15284 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
15286 tree type = integer_type_node;
15288 tree zero = build_zero_cst (type);
15289 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
15290 ASSERT_EQ (type, TREE_TYPE (zero));
15292 tree one = build_int_cst (type, 1);
15293 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
15294 ASSERT_EQ (type, TREE_TYPE (zero));
15297 /* Verify identifiers. */
15299 static void
15300 test_identifiers ()
15302 tree identifier = get_identifier ("foo");
15303 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
15304 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
15307 /* Verify LABEL_DECL. */
15309 static void
15310 test_labels ()
15312 tree identifier = get_identifier ("err");
15313 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
15314 identifier, void_type_node);
15315 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
15316 ASSERT_FALSE (FORCED_LABEL (label_decl));
15319 /* Return a new VECTOR_CST node whose type is TYPE and whose values
15320 are given by VALS. */
15322 static tree
15323 build_vector (tree type, vec<tree> vals MEM_STAT_DECL)
15325 gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
15326 tree_vector_builder builder (type, vals.length (), 1);
15327 builder.splice (vals);
15328 return builder.build ();
15331 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
15333 static void
15334 check_vector_cst (vec<tree> expected, tree actual)
15336 ASSERT_KNOWN_EQ (expected.length (),
15337 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
15338 for (unsigned int i = 0; i < expected.length (); ++i)
15339 ASSERT_EQ (wi::to_wide (expected[i]),
15340 wi::to_wide (vector_cst_elt (actual, i)));
15343 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
15344 and that its elements match EXPECTED. */
15346 static void
15347 check_vector_cst_duplicate (vec<tree> expected, tree actual,
15348 unsigned int npatterns)
15350 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15351 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
15352 ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
15353 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
15354 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15355 check_vector_cst (expected, actual);
15358 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
15359 and NPATTERNS background elements, and that its elements match
15360 EXPECTED. */
15362 static void
15363 check_vector_cst_fill (vec<tree> expected, tree actual,
15364 unsigned int npatterns)
15366 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15367 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
15368 ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
15369 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15370 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15371 check_vector_cst (expected, actual);
15374 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
15375 and that its elements match EXPECTED. */
15377 static void
15378 check_vector_cst_stepped (vec<tree> expected, tree actual,
15379 unsigned int npatterns)
15381 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15382 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
15383 ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
15384 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15385 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
15386 check_vector_cst (expected, actual);
15389 /* Test the creation of VECTOR_CSTs. */
15391 static void
15392 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
15394 auto_vec<tree, 8> elements (8);
15395 elements.quick_grow (8);
15396 tree element_type = build_nonstandard_integer_type (16, true);
15397 tree vector_type = build_vector_type (element_type, 8);
15399 /* Test a simple linear series with a base of 0 and a step of 1:
15400 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
15401 for (unsigned int i = 0; i < 8; ++i)
15402 elements[i] = build_int_cst (element_type, i);
15403 tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
15404 check_vector_cst_stepped (elements, vector, 1);
15406 /* Try the same with the first element replaced by 100:
15407 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
15408 elements[0] = build_int_cst (element_type, 100);
15409 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15410 check_vector_cst_stepped (elements, vector, 1);
15412 /* Try a series that wraps around.
15413 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
15414 for (unsigned int i = 1; i < 8; ++i)
15415 elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
15416 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15417 check_vector_cst_stepped (elements, vector, 1);
15419 /* Try a downward series:
15420 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
15421 for (unsigned int i = 1; i < 8; ++i)
15422 elements[i] = build_int_cst (element_type, 80 - i);
15423 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15424 check_vector_cst_stepped (elements, vector, 1);
15426 /* Try two interleaved series with different bases and steps:
15427 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
15428 elements[1] = build_int_cst (element_type, 53);
15429 for (unsigned int i = 2; i < 8; i += 2)
15431 elements[i] = build_int_cst (element_type, 70 - i * 2);
15432 elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
15434 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15435 check_vector_cst_stepped (elements, vector, 2);
15437 /* Try a duplicated value:
15438 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
15439 for (unsigned int i = 1; i < 8; ++i)
15440 elements[i] = elements[0];
15441 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15442 check_vector_cst_duplicate (elements, vector, 1);
15444 /* Try an interleaved duplicated value:
15445 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
15446 elements[1] = build_int_cst (element_type, 55);
15447 for (unsigned int i = 2; i < 8; ++i)
15448 elements[i] = elements[i - 2];
15449 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15450 check_vector_cst_duplicate (elements, vector, 2);
15452 /* Try a duplicated value with 2 exceptions
15453 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
15454 elements[0] = build_int_cst (element_type, 41);
15455 elements[1] = build_int_cst (element_type, 97);
15456 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15457 check_vector_cst_fill (elements, vector, 2);
15459 /* Try with and without a step
15460 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
15461 for (unsigned int i = 3; i < 8; i += 2)
15462 elements[i] = build_int_cst (element_type, i * 7);
15463 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15464 check_vector_cst_stepped (elements, vector, 2);
15466 /* Try a fully-general constant:
15467 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
15468 elements[5] = build_int_cst (element_type, 9990);
15469 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15470 check_vector_cst_fill (elements, vector, 4);
15473 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
15474 Helper function for test_location_wrappers, to deal with STRIP_NOPS
15475 modifying its argument in-place. */
15477 static void
15478 check_strip_nops (tree node, tree expected)
15480 STRIP_NOPS (node);
15481 ASSERT_EQ (expected, node);
15484 /* Verify location wrappers. */
15486 static void
15487 test_location_wrappers ()
15489 location_t loc = BUILTINS_LOCATION;
15491 ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
15493 /* Wrapping a constant. */
15494 tree int_cst = build_int_cst (integer_type_node, 42);
15495 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
15496 ASSERT_FALSE (location_wrapper_p (int_cst));
15498 tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
15499 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
15500 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
15501 ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
15503 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
15504 ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
15506 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
15507 tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
15508 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
15509 ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
15511 /* Wrapping a STRING_CST. */
15512 tree string_cst = build_string (4, "foo");
15513 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
15514 ASSERT_FALSE (location_wrapper_p (string_cst));
15516 tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
15517 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
15518 ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
15519 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
15520 ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
15523 /* Wrapping a variable. */
15524 tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
15525 get_identifier ("some_int_var"),
15526 integer_type_node);
15527 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
15528 ASSERT_FALSE (location_wrapper_p (int_var));
15530 tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
15531 ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
15532 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
15533 ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
15535 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
15536 wrapper. */
15537 tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
15538 ASSERT_FALSE (location_wrapper_p (r_cast));
15539 ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
15541 /* Verify that STRIP_NOPS removes wrappers. */
15542 check_strip_nops (wrapped_int_cst, int_cst);
15543 check_strip_nops (wrapped_string_cst, string_cst);
15544 check_strip_nops (wrapped_int_var, int_var);
15547 /* Test various tree predicates. Verify that location wrappers don't
15548 affect the results. */
15550 static void
15551 test_predicates ()
15553 /* Build various constants and wrappers around them. */
15555 location_t loc = BUILTINS_LOCATION;
15557 tree i_0 = build_int_cst (integer_type_node, 0);
15558 tree wr_i_0 = maybe_wrap_with_location (i_0, loc);
15560 tree i_1 = build_int_cst (integer_type_node, 1);
15561 tree wr_i_1 = maybe_wrap_with_location (i_1, loc);
15563 tree i_m1 = build_int_cst (integer_type_node, -1);
15564 tree wr_i_m1 = maybe_wrap_with_location (i_m1, loc);
15566 tree f_0 = build_real_from_int_cst (float_type_node, i_0);
15567 tree wr_f_0 = maybe_wrap_with_location (f_0, loc);
15568 tree f_1 = build_real_from_int_cst (float_type_node, i_1);
15569 tree wr_f_1 = maybe_wrap_with_location (f_1, loc);
15570 tree f_m1 = build_real_from_int_cst (float_type_node, i_m1);
15571 tree wr_f_m1 = maybe_wrap_with_location (f_m1, loc);
15573 tree c_i_0 = build_complex (NULL_TREE, i_0, i_0);
15574 tree c_i_1 = build_complex (NULL_TREE, i_1, i_0);
15575 tree c_i_m1 = build_complex (NULL_TREE, i_m1, i_0);
15577 tree c_f_0 = build_complex (NULL_TREE, f_0, f_0);
15578 tree c_f_1 = build_complex (NULL_TREE, f_1, f_0);
15579 tree c_f_m1 = build_complex (NULL_TREE, f_m1, f_0);
15581 /* TODO: vector constants. */
15583 /* Test integer_onep. */
15584 ASSERT_FALSE (integer_onep (i_0));
15585 ASSERT_FALSE (integer_onep (wr_i_0));
15586 ASSERT_TRUE (integer_onep (i_1));
15587 ASSERT_TRUE (integer_onep (wr_i_1));
15588 ASSERT_FALSE (integer_onep (i_m1));
15589 ASSERT_FALSE (integer_onep (wr_i_m1));
15590 ASSERT_FALSE (integer_onep (f_0));
15591 ASSERT_FALSE (integer_onep (wr_f_0));
15592 ASSERT_FALSE (integer_onep (f_1));
15593 ASSERT_FALSE (integer_onep (wr_f_1));
15594 ASSERT_FALSE (integer_onep (f_m1));
15595 ASSERT_FALSE (integer_onep (wr_f_m1));
15596 ASSERT_FALSE (integer_onep (c_i_0));
15597 ASSERT_TRUE (integer_onep (c_i_1));
15598 ASSERT_FALSE (integer_onep (c_i_m1));
15599 ASSERT_FALSE (integer_onep (c_f_0));
15600 ASSERT_FALSE (integer_onep (c_f_1));
15601 ASSERT_FALSE (integer_onep (c_f_m1));
15603 /* Test integer_zerop. */
15604 ASSERT_TRUE (integer_zerop (i_0));
15605 ASSERT_TRUE (integer_zerop (wr_i_0));
15606 ASSERT_FALSE (integer_zerop (i_1));
15607 ASSERT_FALSE (integer_zerop (wr_i_1));
15608 ASSERT_FALSE (integer_zerop (i_m1));
15609 ASSERT_FALSE (integer_zerop (wr_i_m1));
15610 ASSERT_FALSE (integer_zerop (f_0));
15611 ASSERT_FALSE (integer_zerop (wr_f_0));
15612 ASSERT_FALSE (integer_zerop (f_1));
15613 ASSERT_FALSE (integer_zerop (wr_f_1));
15614 ASSERT_FALSE (integer_zerop (f_m1));
15615 ASSERT_FALSE (integer_zerop (wr_f_m1));
15616 ASSERT_TRUE (integer_zerop (c_i_0));
15617 ASSERT_FALSE (integer_zerop (c_i_1));
15618 ASSERT_FALSE (integer_zerop (c_i_m1));
15619 ASSERT_FALSE (integer_zerop (c_f_0));
15620 ASSERT_FALSE (integer_zerop (c_f_1));
15621 ASSERT_FALSE (integer_zerop (c_f_m1));
15623 /* Test integer_all_onesp. */
15624 ASSERT_FALSE (integer_all_onesp (i_0));
15625 ASSERT_FALSE (integer_all_onesp (wr_i_0));
15626 ASSERT_FALSE (integer_all_onesp (i_1));
15627 ASSERT_FALSE (integer_all_onesp (wr_i_1));
15628 ASSERT_TRUE (integer_all_onesp (i_m1));
15629 ASSERT_TRUE (integer_all_onesp (wr_i_m1));
15630 ASSERT_FALSE (integer_all_onesp (f_0));
15631 ASSERT_FALSE (integer_all_onesp (wr_f_0));
15632 ASSERT_FALSE (integer_all_onesp (f_1));
15633 ASSERT_FALSE (integer_all_onesp (wr_f_1));
15634 ASSERT_FALSE (integer_all_onesp (f_m1));
15635 ASSERT_FALSE (integer_all_onesp (wr_f_m1));
15636 ASSERT_FALSE (integer_all_onesp (c_i_0));
15637 ASSERT_FALSE (integer_all_onesp (c_i_1));
15638 ASSERT_FALSE (integer_all_onesp (c_i_m1));
15639 ASSERT_FALSE (integer_all_onesp (c_f_0));
15640 ASSERT_FALSE (integer_all_onesp (c_f_1));
15641 ASSERT_FALSE (integer_all_onesp (c_f_m1));
15643 /* Test integer_minus_onep. */
15644 ASSERT_FALSE (integer_minus_onep (i_0));
15645 ASSERT_FALSE (integer_minus_onep (wr_i_0));
15646 ASSERT_FALSE (integer_minus_onep (i_1));
15647 ASSERT_FALSE (integer_minus_onep (wr_i_1));
15648 ASSERT_TRUE (integer_minus_onep (i_m1));
15649 ASSERT_TRUE (integer_minus_onep (wr_i_m1));
15650 ASSERT_FALSE (integer_minus_onep (f_0));
15651 ASSERT_FALSE (integer_minus_onep (wr_f_0));
15652 ASSERT_FALSE (integer_minus_onep (f_1));
15653 ASSERT_FALSE (integer_minus_onep (wr_f_1));
15654 ASSERT_FALSE (integer_minus_onep (f_m1));
15655 ASSERT_FALSE (integer_minus_onep (wr_f_m1));
15656 ASSERT_FALSE (integer_minus_onep (c_i_0));
15657 ASSERT_FALSE (integer_minus_onep (c_i_1));
15658 ASSERT_TRUE (integer_minus_onep (c_i_m1));
15659 ASSERT_FALSE (integer_minus_onep (c_f_0));
15660 ASSERT_FALSE (integer_minus_onep (c_f_1));
15661 ASSERT_FALSE (integer_minus_onep (c_f_m1));
15663 /* Test integer_each_onep. */
15664 ASSERT_FALSE (integer_each_onep (i_0));
15665 ASSERT_FALSE (integer_each_onep (wr_i_0));
15666 ASSERT_TRUE (integer_each_onep (i_1));
15667 ASSERT_TRUE (integer_each_onep (wr_i_1));
15668 ASSERT_FALSE (integer_each_onep (i_m1));
15669 ASSERT_FALSE (integer_each_onep (wr_i_m1));
15670 ASSERT_FALSE (integer_each_onep (f_0));
15671 ASSERT_FALSE (integer_each_onep (wr_f_0));
15672 ASSERT_FALSE (integer_each_onep (f_1));
15673 ASSERT_FALSE (integer_each_onep (wr_f_1));
15674 ASSERT_FALSE (integer_each_onep (f_m1));
15675 ASSERT_FALSE (integer_each_onep (wr_f_m1));
15676 ASSERT_FALSE (integer_each_onep (c_i_0));
15677 ASSERT_FALSE (integer_each_onep (c_i_1));
15678 ASSERT_FALSE (integer_each_onep (c_i_m1));
15679 ASSERT_FALSE (integer_each_onep (c_f_0));
15680 ASSERT_FALSE (integer_each_onep (c_f_1));
15681 ASSERT_FALSE (integer_each_onep (c_f_m1));
15683 /* Test integer_truep. */
15684 ASSERT_FALSE (integer_truep (i_0));
15685 ASSERT_FALSE (integer_truep (wr_i_0));
15686 ASSERT_TRUE (integer_truep (i_1));
15687 ASSERT_TRUE (integer_truep (wr_i_1));
15688 ASSERT_FALSE (integer_truep (i_m1));
15689 ASSERT_FALSE (integer_truep (wr_i_m1));
15690 ASSERT_FALSE (integer_truep (f_0));
15691 ASSERT_FALSE (integer_truep (wr_f_0));
15692 ASSERT_FALSE (integer_truep (f_1));
15693 ASSERT_FALSE (integer_truep (wr_f_1));
15694 ASSERT_FALSE (integer_truep (f_m1));
15695 ASSERT_FALSE (integer_truep (wr_f_m1));
15696 ASSERT_FALSE (integer_truep (c_i_0));
15697 ASSERT_TRUE (integer_truep (c_i_1));
15698 ASSERT_FALSE (integer_truep (c_i_m1));
15699 ASSERT_FALSE (integer_truep (c_f_0));
15700 ASSERT_FALSE (integer_truep (c_f_1));
15701 ASSERT_FALSE (integer_truep (c_f_m1));
15703 /* Test integer_nonzerop. */
15704 ASSERT_FALSE (integer_nonzerop (i_0));
15705 ASSERT_FALSE (integer_nonzerop (wr_i_0));
15706 ASSERT_TRUE (integer_nonzerop (i_1));
15707 ASSERT_TRUE (integer_nonzerop (wr_i_1));
15708 ASSERT_TRUE (integer_nonzerop (i_m1));
15709 ASSERT_TRUE (integer_nonzerop (wr_i_m1));
15710 ASSERT_FALSE (integer_nonzerop (f_0));
15711 ASSERT_FALSE (integer_nonzerop (wr_f_0));
15712 ASSERT_FALSE (integer_nonzerop (f_1));
15713 ASSERT_FALSE (integer_nonzerop (wr_f_1));
15714 ASSERT_FALSE (integer_nonzerop (f_m1));
15715 ASSERT_FALSE (integer_nonzerop (wr_f_m1));
15716 ASSERT_FALSE (integer_nonzerop (c_i_0));
15717 ASSERT_TRUE (integer_nonzerop (c_i_1));
15718 ASSERT_TRUE (integer_nonzerop (c_i_m1));
15719 ASSERT_FALSE (integer_nonzerop (c_f_0));
15720 ASSERT_FALSE (integer_nonzerop (c_f_1));
15721 ASSERT_FALSE (integer_nonzerop (c_f_m1));
15723 /* Test real_zerop. */
15724 ASSERT_FALSE (real_zerop (i_0));
15725 ASSERT_FALSE (real_zerop (wr_i_0));
15726 ASSERT_FALSE (real_zerop (i_1));
15727 ASSERT_FALSE (real_zerop (wr_i_1));
15728 ASSERT_FALSE (real_zerop (i_m1));
15729 ASSERT_FALSE (real_zerop (wr_i_m1));
15730 ASSERT_TRUE (real_zerop (f_0));
15731 ASSERT_TRUE (real_zerop (wr_f_0));
15732 ASSERT_FALSE (real_zerop (f_1));
15733 ASSERT_FALSE (real_zerop (wr_f_1));
15734 ASSERT_FALSE (real_zerop (f_m1));
15735 ASSERT_FALSE (real_zerop (wr_f_m1));
15736 ASSERT_FALSE (real_zerop (c_i_0));
15737 ASSERT_FALSE (real_zerop (c_i_1));
15738 ASSERT_FALSE (real_zerop (c_i_m1));
15739 ASSERT_TRUE (real_zerop (c_f_0));
15740 ASSERT_FALSE (real_zerop (c_f_1));
15741 ASSERT_FALSE (real_zerop (c_f_m1));
15743 /* Test real_onep. */
15744 ASSERT_FALSE (real_onep (i_0));
15745 ASSERT_FALSE (real_onep (wr_i_0));
15746 ASSERT_FALSE (real_onep (i_1));
15747 ASSERT_FALSE (real_onep (wr_i_1));
15748 ASSERT_FALSE (real_onep (i_m1));
15749 ASSERT_FALSE (real_onep (wr_i_m1));
15750 ASSERT_FALSE (real_onep (f_0));
15751 ASSERT_FALSE (real_onep (wr_f_0));
15752 ASSERT_TRUE (real_onep (f_1));
15753 ASSERT_TRUE (real_onep (wr_f_1));
15754 ASSERT_FALSE (real_onep (f_m1));
15755 ASSERT_FALSE (real_onep (wr_f_m1));
15756 ASSERT_FALSE (real_onep (c_i_0));
15757 ASSERT_FALSE (real_onep (c_i_1));
15758 ASSERT_FALSE (real_onep (c_i_m1));
15759 ASSERT_FALSE (real_onep (c_f_0));
15760 ASSERT_TRUE (real_onep (c_f_1));
15761 ASSERT_FALSE (real_onep (c_f_m1));
15763 /* Test real_minus_onep. */
15764 ASSERT_FALSE (real_minus_onep (i_0));
15765 ASSERT_FALSE (real_minus_onep (wr_i_0));
15766 ASSERT_FALSE (real_minus_onep (i_1));
15767 ASSERT_FALSE (real_minus_onep (wr_i_1));
15768 ASSERT_FALSE (real_minus_onep (i_m1));
15769 ASSERT_FALSE (real_minus_onep (wr_i_m1));
15770 ASSERT_FALSE (real_minus_onep (f_0));
15771 ASSERT_FALSE (real_minus_onep (wr_f_0));
15772 ASSERT_FALSE (real_minus_onep (f_1));
15773 ASSERT_FALSE (real_minus_onep (wr_f_1));
15774 ASSERT_TRUE (real_minus_onep (f_m1));
15775 ASSERT_TRUE (real_minus_onep (wr_f_m1));
15776 ASSERT_FALSE (real_minus_onep (c_i_0));
15777 ASSERT_FALSE (real_minus_onep (c_i_1));
15778 ASSERT_FALSE (real_minus_onep (c_i_m1));
15779 ASSERT_FALSE (real_minus_onep (c_f_0));
15780 ASSERT_FALSE (real_minus_onep (c_f_1));
15781 ASSERT_TRUE (real_minus_onep (c_f_m1));
15783 /* Test zerop. */
15784 ASSERT_TRUE (zerop (i_0));
15785 ASSERT_TRUE (zerop (wr_i_0));
15786 ASSERT_FALSE (zerop (i_1));
15787 ASSERT_FALSE (zerop (wr_i_1));
15788 ASSERT_FALSE (zerop (i_m1));
15789 ASSERT_FALSE (zerop (wr_i_m1));
15790 ASSERT_TRUE (zerop (f_0));
15791 ASSERT_TRUE (zerop (wr_f_0));
15792 ASSERT_FALSE (zerop (f_1));
15793 ASSERT_FALSE (zerop (wr_f_1));
15794 ASSERT_FALSE (zerop (f_m1));
15795 ASSERT_FALSE (zerop (wr_f_m1));
15796 ASSERT_TRUE (zerop (c_i_0));
15797 ASSERT_FALSE (zerop (c_i_1));
15798 ASSERT_FALSE (zerop (c_i_m1));
15799 ASSERT_TRUE (zerop (c_f_0));
15800 ASSERT_FALSE (zerop (c_f_1));
15801 ASSERT_FALSE (zerop (c_f_m1));
15803 /* Test tree_expr_nonnegative_p. */
15804 ASSERT_TRUE (tree_expr_nonnegative_p (i_0));
15805 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0));
15806 ASSERT_TRUE (tree_expr_nonnegative_p (i_1));
15807 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1));
15808 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1));
15809 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1));
15810 ASSERT_TRUE (tree_expr_nonnegative_p (f_0));
15811 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0));
15812 ASSERT_TRUE (tree_expr_nonnegative_p (f_1));
15813 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1));
15814 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1));
15815 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1));
15816 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0));
15817 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1));
15818 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1));
15819 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0));
15820 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1));
15821 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1));
15823 /* Test tree_expr_nonzero_p. */
15824 ASSERT_FALSE (tree_expr_nonzero_p (i_0));
15825 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0));
15826 ASSERT_TRUE (tree_expr_nonzero_p (i_1));
15827 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1));
15828 ASSERT_TRUE (tree_expr_nonzero_p (i_m1));
15829 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1));
15831 /* Test integer_valued_real_p. */
15832 ASSERT_FALSE (integer_valued_real_p (i_0));
15833 ASSERT_TRUE (integer_valued_real_p (f_0));
15834 ASSERT_TRUE (integer_valued_real_p (wr_f_0));
15835 ASSERT_TRUE (integer_valued_real_p (f_1));
15836 ASSERT_TRUE (integer_valued_real_p (wr_f_1));
15838 /* Test integer_pow2p. */
15839 ASSERT_FALSE (integer_pow2p (i_0));
15840 ASSERT_TRUE (integer_pow2p (i_1));
15841 ASSERT_TRUE (integer_pow2p (wr_i_1));
15843 /* Test uniform_integer_cst_p. */
15844 ASSERT_TRUE (uniform_integer_cst_p (i_0));
15845 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0));
15846 ASSERT_TRUE (uniform_integer_cst_p (i_1));
15847 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1));
15848 ASSERT_TRUE (uniform_integer_cst_p (i_m1));
15849 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1));
15850 ASSERT_FALSE (uniform_integer_cst_p (f_0));
15851 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0));
15852 ASSERT_FALSE (uniform_integer_cst_p (f_1));
15853 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1));
15854 ASSERT_FALSE (uniform_integer_cst_p (f_m1));
15855 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1));
15856 ASSERT_FALSE (uniform_integer_cst_p (c_i_0));
15857 ASSERT_FALSE (uniform_integer_cst_p (c_i_1));
15858 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1));
15859 ASSERT_FALSE (uniform_integer_cst_p (c_f_0));
15860 ASSERT_FALSE (uniform_integer_cst_p (c_f_1));
15861 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1));
15864 /* Check that string escaping works correctly. */
15866 static void
15867 test_escaped_strings (void)
15869 int saved_cutoff;
15870 escaped_string msg;
15872 msg.escape (NULL);
15873 /* ASSERT_STREQ does not accept NULL as a valid test
15874 result, so we have to use ASSERT_EQ instead. */
15875 ASSERT_EQ (NULL, (const char *) msg);
15877 msg.escape ("");
15878 ASSERT_STREQ ("", (const char *) msg);
15880 msg.escape ("foobar");
15881 ASSERT_STREQ ("foobar", (const char *) msg);
15883 /* Ensure that we have -fmessage-length set to 0. */
15884 saved_cutoff = pp_line_cutoff (global_dc->printer);
15885 pp_line_cutoff (global_dc->printer) = 0;
15887 msg.escape ("foo\nbar");
15888 ASSERT_STREQ ("foo\\nbar", (const char *) msg);
15890 msg.escape ("\a\b\f\n\r\t\v");
15891 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
15893 /* Now repeat the tests with -fmessage-length set to 5. */
15894 pp_line_cutoff (global_dc->printer) = 5;
15896 /* Note that the newline is not translated into an escape. */
15897 msg.escape ("foo\nbar");
15898 ASSERT_STREQ ("foo\nbar", (const char *) msg);
15900 msg.escape ("\a\b\f\n\r\t\v");
15901 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
15903 /* Restore the original message length setting. */
15904 pp_line_cutoff (global_dc->printer) = saved_cutoff;
15907 /* Run all of the selftests within this file. */
15909 void
15910 tree_c_tests ()
15912 test_integer_constants ();
15913 test_identifiers ();
15914 test_labels ();
15915 test_vector_cst_patterns ();
15916 test_location_wrappers ();
15917 test_predicates ();
15918 test_escaped_strings ();
15921 } // namespace selftest
15923 #endif /* CHECKING_P */
15925 #include "gt-tree.h"