[Ada] Do not perform useless work in Check_No_Parts_Violations
[official-gcc.git] / gcc / tree.c
blob1aa6e557a049377a0d7000bdb60f959bfb5d7f1f
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "langhooks-def.h"
58 #include "tree-diagnostic.h"
59 #include "except.h"
60 #include "builtins.h"
61 #include "print-tree.h"
62 #include "ipa-utils.h"
63 #include "selftest.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "rtl.h"
67 #include "regs.h"
68 #include "tree-vector-builder.h"
69 #include "gimple-fold.h"
70 #include "escaped_string.h"
71 #include "gimple-range.h"
73 /* Tree code classes. */
75 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
76 #define END_OF_BASE_TREE_CODES tcc_exceptional,
78 const enum tree_code_class tree_code_type[] = {
79 #include "all-tree.def"
82 #undef DEFTREECODE
83 #undef END_OF_BASE_TREE_CODES
85 /* Table indexed by tree code giving number of expression
86 operands beyond the fixed part of the node structure.
87 Not used for types or decls. */
89 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
90 #define END_OF_BASE_TREE_CODES 0,
92 const unsigned char tree_code_length[] = {
93 #include "all-tree.def"
96 #undef DEFTREECODE
97 #undef END_OF_BASE_TREE_CODES
99 /* Names of tree components.
100 Used for printing out the tree and error messages. */
101 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
102 #define END_OF_BASE_TREE_CODES "@dummy",
104 static const char *const tree_code_name[] = {
105 #include "all-tree.def"
108 #undef DEFTREECODE
109 #undef END_OF_BASE_TREE_CODES
111 /* Each tree code class has an associated string representation.
112 These must correspond to the tree_code_class entries. */
114 const char *const tree_code_class_strings[] =
116 "exceptional",
117 "constant",
118 "type",
119 "declaration",
120 "reference",
121 "comparison",
122 "unary",
123 "binary",
124 "statement",
125 "vl_exp",
126 "expression"
129 /* obstack.[ch] explicitly declined to prototype this. */
130 extern int _obstack_allocated_p (struct obstack *h, void *obj);
132 /* Statistics-gathering stuff. */
134 static uint64_t tree_code_counts[MAX_TREE_CODES];
135 uint64_t tree_node_counts[(int) all_kinds];
136 uint64_t tree_node_sizes[(int) all_kinds];
138 /* Keep in sync with tree.h:enum tree_node_kind. */
139 static const char * const tree_node_kind_names[] = {
140 "decls",
141 "types",
142 "blocks",
143 "stmts",
144 "refs",
145 "exprs",
146 "constants",
147 "identifiers",
148 "vecs",
149 "binfos",
150 "ssa names",
151 "constructors",
152 "random kinds",
153 "lang_decl kinds",
154 "lang_type kinds",
155 "omp clauses",
158 /* Unique id for next decl created. */
159 static GTY(()) int next_decl_uid;
160 /* Unique id for next type created. */
161 static GTY(()) unsigned next_type_uid = 1;
162 /* Unique id for next debug decl created. Use negative numbers,
163 to catch erroneous uses. */
164 static GTY(()) int next_debug_decl_uid;
166 /* Since we cannot rehash a type after it is in the table, we have to
167 keep the hash code. */
169 struct GTY((for_user)) type_hash {
170 unsigned long hash;
171 tree type;
174 /* Initial size of the hash table (rounded to next prime). */
175 #define TYPE_HASH_INITIAL_SIZE 1000
177 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
179 static hashval_t hash (type_hash *t) { return t->hash; }
180 static bool equal (type_hash *a, type_hash *b);
182 static int
183 keep_cache_entry (type_hash *&t)
185 return ggc_marked_p (t->type);
189 /* Now here is the hash table. When recording a type, it is added to
190 the slot whose index is the hash code. Note that the hash table is
191 used for several kinds of types (function types, array types and
192 array index range types, for now). While all these live in the
193 same table, they are completely independent, and the hash code is
194 computed differently for each of these. */
196 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
198 /* Hash table and temporary node for larger integer const values. */
199 static GTY (()) tree int_cst_node;
201 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
203 static hashval_t hash (tree t);
204 static bool equal (tree x, tree y);
207 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
209 /* Class and variable for making sure that there is a single POLY_INT_CST
210 for a given value. */
211 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
213 typedef std::pair<tree, const poly_wide_int *> compare_type;
214 static hashval_t hash (tree t);
215 static bool equal (tree x, const compare_type &y);
218 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
220 /* Hash table for optimization flags and target option flags. Use the same
221 hash table for both sets of options. Nodes for building the current
222 optimization and target option nodes. The assumption is most of the time
223 the options created will already be in the hash table, so we avoid
224 allocating and freeing up a node repeatably. */
225 static GTY (()) tree cl_optimization_node;
226 static GTY (()) tree cl_target_option_node;
228 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
230 static hashval_t hash (tree t);
231 static bool equal (tree x, tree y);
234 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
236 /* General tree->tree mapping structure for use in hash tables. */
239 static GTY ((cache))
240 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
242 static GTY ((cache))
243 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
245 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
247 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
249 static bool
250 equal (tree_vec_map *a, tree_vec_map *b)
252 return a->base.from == b->base.from;
255 static int
256 keep_cache_entry (tree_vec_map *&m)
258 return ggc_marked_p (m->base.from);
262 static GTY ((cache))
263 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
265 static void set_type_quals (tree, int);
266 static void print_type_hash_statistics (void);
267 static void print_debug_expr_statistics (void);
268 static void print_value_expr_statistics (void);
270 tree global_trees[TI_MAX];
271 tree integer_types[itk_none];
273 bool int_n_enabled_p[NUM_INT_N_ENTS];
274 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
276 bool tree_contains_struct[MAX_TREE_CODES][64];
278 /* Number of operands for each OpenMP clause. */
279 unsigned const char omp_clause_num_ops[] =
281 0, /* OMP_CLAUSE_ERROR */
282 1, /* OMP_CLAUSE_PRIVATE */
283 1, /* OMP_CLAUSE_SHARED */
284 1, /* OMP_CLAUSE_FIRSTPRIVATE */
285 2, /* OMP_CLAUSE_LASTPRIVATE */
286 5, /* OMP_CLAUSE_REDUCTION */
287 5, /* OMP_CLAUSE_TASK_REDUCTION */
288 5, /* OMP_CLAUSE_IN_REDUCTION */
289 1, /* OMP_CLAUSE_COPYIN */
290 1, /* OMP_CLAUSE_COPYPRIVATE */
291 3, /* OMP_CLAUSE_LINEAR */
292 1, /* OMP_CLAUSE_AFFINITY */
293 2, /* OMP_CLAUSE_ALIGNED */
294 2, /* OMP_CLAUSE_ALLOCATE */
295 1, /* OMP_CLAUSE_DEPEND */
296 1, /* OMP_CLAUSE_NONTEMPORAL */
297 1, /* OMP_CLAUSE_UNIFORM */
298 1, /* OMP_CLAUSE_TO_DECLARE */
299 1, /* OMP_CLAUSE_LINK */
300 1, /* OMP_CLAUSE_DETACH */
301 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
302 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
303 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
304 1, /* OMP_CLAUSE_INCLUSIVE */
305 1, /* OMP_CLAUSE_EXCLUSIVE */
306 2, /* OMP_CLAUSE_FROM */
307 2, /* OMP_CLAUSE_TO */
308 2, /* OMP_CLAUSE_MAP */
309 2, /* OMP_CLAUSE__CACHE_ */
310 2, /* OMP_CLAUSE_GANG */
311 1, /* OMP_CLAUSE_ASYNC */
312 1, /* OMP_CLAUSE_WAIT */
313 0, /* OMP_CLAUSE_AUTO */
314 0, /* OMP_CLAUSE_SEQ */
315 1, /* OMP_CLAUSE__LOOPTEMP_ */
316 1, /* OMP_CLAUSE__REDUCTEMP_ */
317 1, /* OMP_CLAUSE__CONDTEMP_ */
318 1, /* OMP_CLAUSE__SCANTEMP_ */
319 1, /* OMP_CLAUSE_IF */
320 1, /* OMP_CLAUSE_NUM_THREADS */
321 1, /* OMP_CLAUSE_SCHEDULE */
322 0, /* OMP_CLAUSE_NOWAIT */
323 1, /* OMP_CLAUSE_ORDERED */
324 0, /* OMP_CLAUSE_DEFAULT */
325 3, /* OMP_CLAUSE_COLLAPSE */
326 0, /* OMP_CLAUSE_UNTIED */
327 1, /* OMP_CLAUSE_FINAL */
328 0, /* OMP_CLAUSE_MERGEABLE */
329 1, /* OMP_CLAUSE_DEVICE */
330 1, /* OMP_CLAUSE_DIST_SCHEDULE */
331 0, /* OMP_CLAUSE_INBRANCH */
332 0, /* OMP_CLAUSE_NOTINBRANCH */
333 1, /* OMP_CLAUSE_NUM_TEAMS */
334 1, /* OMP_CLAUSE_THREAD_LIMIT */
335 0, /* OMP_CLAUSE_PROC_BIND */
336 1, /* OMP_CLAUSE_SAFELEN */
337 1, /* OMP_CLAUSE_SIMDLEN */
338 0, /* OMP_CLAUSE_DEVICE_TYPE */
339 0, /* OMP_CLAUSE_FOR */
340 0, /* OMP_CLAUSE_PARALLEL */
341 0, /* OMP_CLAUSE_SECTIONS */
342 0, /* OMP_CLAUSE_TASKGROUP */
343 1, /* OMP_CLAUSE_PRIORITY */
344 1, /* OMP_CLAUSE_GRAINSIZE */
345 1, /* OMP_CLAUSE_NUM_TASKS */
346 0, /* OMP_CLAUSE_NOGROUP */
347 0, /* OMP_CLAUSE_THREADS */
348 0, /* OMP_CLAUSE_SIMD */
349 1, /* OMP_CLAUSE_HINT */
350 0, /* OMP_CLAUSE_DEFAULTMAP */
351 0, /* OMP_CLAUSE_ORDER */
352 0, /* OMP_CLAUSE_BIND */
353 1, /* OMP_CLAUSE__SIMDUID_ */
354 0, /* OMP_CLAUSE__SIMT_ */
355 0, /* OMP_CLAUSE_INDEPENDENT */
356 1, /* OMP_CLAUSE_WORKER */
357 1, /* OMP_CLAUSE_VECTOR */
358 1, /* OMP_CLAUSE_NUM_GANGS */
359 1, /* OMP_CLAUSE_NUM_WORKERS */
360 1, /* OMP_CLAUSE_VECTOR_LENGTH */
361 3, /* OMP_CLAUSE_TILE */
362 0, /* OMP_CLAUSE_IF_PRESENT */
363 0, /* OMP_CLAUSE_FINALIZE */
366 const char * const omp_clause_code_name[] =
368 "error_clause",
369 "private",
370 "shared",
371 "firstprivate",
372 "lastprivate",
373 "reduction",
374 "task_reduction",
375 "in_reduction",
376 "copyin",
377 "copyprivate",
378 "linear",
379 "affinity",
380 "aligned",
381 "allocate",
382 "depend",
383 "nontemporal",
384 "uniform",
385 "to",
386 "link",
387 "detach",
388 "use_device_ptr",
389 "use_device_addr",
390 "is_device_ptr",
391 "inclusive",
392 "exclusive",
393 "from",
394 "to",
395 "map",
396 "_cache_",
397 "gang",
398 "async",
399 "wait",
400 "auto",
401 "seq",
402 "_looptemp_",
403 "_reductemp_",
404 "_condtemp_",
405 "_scantemp_",
406 "if",
407 "num_threads",
408 "schedule",
409 "nowait",
410 "ordered",
411 "default",
412 "collapse",
413 "untied",
414 "final",
415 "mergeable",
416 "device",
417 "dist_schedule",
418 "inbranch",
419 "notinbranch",
420 "num_teams",
421 "thread_limit",
422 "proc_bind",
423 "safelen",
424 "simdlen",
425 "device_type",
426 "for",
427 "parallel",
428 "sections",
429 "taskgroup",
430 "priority",
431 "grainsize",
432 "num_tasks",
433 "nogroup",
434 "threads",
435 "simd",
436 "hint",
437 "defaultmap",
438 "order",
439 "bind",
440 "_simduid_",
441 "_simt_",
442 "independent",
443 "worker",
444 "vector",
445 "num_gangs",
446 "num_workers",
447 "vector_length",
448 "tile",
449 "if_present",
450 "finalize",
454 /* Return the tree node structure used by tree code CODE. */
456 static inline enum tree_node_structure_enum
457 tree_node_structure_for_code (enum tree_code code)
459 switch (TREE_CODE_CLASS (code))
461 case tcc_declaration:
462 switch (code)
464 case CONST_DECL: return TS_CONST_DECL;
465 case DEBUG_EXPR_DECL: return TS_DECL_WRTL;
466 case FIELD_DECL: return TS_FIELD_DECL;
467 case FUNCTION_DECL: return TS_FUNCTION_DECL;
468 case LABEL_DECL: return TS_LABEL_DECL;
469 case PARM_DECL: return TS_PARM_DECL;
470 case RESULT_DECL: return TS_RESULT_DECL;
471 case TRANSLATION_UNIT_DECL: return TS_TRANSLATION_UNIT_DECL;
472 case TYPE_DECL: return TS_TYPE_DECL;
473 case VAR_DECL: return TS_VAR_DECL;
474 default: return TS_DECL_NON_COMMON;
477 case tcc_type: return TS_TYPE_NON_COMMON;
479 case tcc_binary:
480 case tcc_comparison:
481 case tcc_expression:
482 case tcc_reference:
483 case tcc_statement:
484 case tcc_unary:
485 case tcc_vl_exp: return TS_EXP;
487 default: /* tcc_constant and tcc_exceptional */
488 break;
491 switch (code)
493 /* tcc_constant cases. */
494 case COMPLEX_CST: return TS_COMPLEX;
495 case FIXED_CST: return TS_FIXED_CST;
496 case INTEGER_CST: return TS_INT_CST;
497 case POLY_INT_CST: return TS_POLY_INT_CST;
498 case REAL_CST: return TS_REAL_CST;
499 case STRING_CST: return TS_STRING;
500 case VECTOR_CST: return TS_VECTOR;
501 case VOID_CST: return TS_TYPED;
503 /* tcc_exceptional cases. */
504 case BLOCK: return TS_BLOCK;
505 case CONSTRUCTOR: return TS_CONSTRUCTOR;
506 case ERROR_MARK: return TS_COMMON;
507 case IDENTIFIER_NODE: return TS_IDENTIFIER;
508 case OMP_CLAUSE: return TS_OMP_CLAUSE;
509 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
510 case PLACEHOLDER_EXPR: return TS_COMMON;
511 case SSA_NAME: return TS_SSA_NAME;
512 case STATEMENT_LIST: return TS_STATEMENT_LIST;
513 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
514 case TREE_BINFO: return TS_BINFO;
515 case TREE_LIST: return TS_LIST;
516 case TREE_VEC: return TS_VEC;
518 default:
519 gcc_unreachable ();
524 /* Initialize tree_contains_struct to describe the hierarchy of tree
525 nodes. */
527 static void
528 initialize_tree_contains_struct (void)
530 unsigned i;
532 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
534 enum tree_code code;
535 enum tree_node_structure_enum ts_code;
537 code = (enum tree_code) i;
538 ts_code = tree_node_structure_for_code (code);
540 /* Mark the TS structure itself. */
541 tree_contains_struct[code][ts_code] = 1;
543 /* Mark all the structures that TS is derived from. */
544 switch (ts_code)
546 case TS_TYPED:
547 case TS_BLOCK:
548 case TS_OPTIMIZATION:
549 case TS_TARGET_OPTION:
550 MARK_TS_BASE (code);
551 break;
553 case TS_COMMON:
554 case TS_INT_CST:
555 case TS_POLY_INT_CST:
556 case TS_REAL_CST:
557 case TS_FIXED_CST:
558 case TS_VECTOR:
559 case TS_STRING:
560 case TS_COMPLEX:
561 case TS_SSA_NAME:
562 case TS_CONSTRUCTOR:
563 case TS_EXP:
564 case TS_STATEMENT_LIST:
565 MARK_TS_TYPED (code);
566 break;
568 case TS_IDENTIFIER:
569 case TS_DECL_MINIMAL:
570 case TS_TYPE_COMMON:
571 case TS_LIST:
572 case TS_VEC:
573 case TS_BINFO:
574 case TS_OMP_CLAUSE:
575 MARK_TS_COMMON (code);
576 break;
578 case TS_TYPE_WITH_LANG_SPECIFIC:
579 MARK_TS_TYPE_COMMON (code);
580 break;
582 case TS_TYPE_NON_COMMON:
583 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
584 break;
586 case TS_DECL_COMMON:
587 MARK_TS_DECL_MINIMAL (code);
588 break;
590 case TS_DECL_WRTL:
591 case TS_CONST_DECL:
592 MARK_TS_DECL_COMMON (code);
593 break;
595 case TS_DECL_NON_COMMON:
596 MARK_TS_DECL_WITH_VIS (code);
597 break;
599 case TS_DECL_WITH_VIS:
600 case TS_PARM_DECL:
601 case TS_LABEL_DECL:
602 case TS_RESULT_DECL:
603 MARK_TS_DECL_WRTL (code);
604 break;
606 case TS_FIELD_DECL:
607 MARK_TS_DECL_COMMON (code);
608 break;
610 case TS_VAR_DECL:
611 MARK_TS_DECL_WITH_VIS (code);
612 break;
614 case TS_TYPE_DECL:
615 case TS_FUNCTION_DECL:
616 MARK_TS_DECL_NON_COMMON (code);
617 break;
619 case TS_TRANSLATION_UNIT_DECL:
620 MARK_TS_DECL_COMMON (code);
621 break;
623 default:
624 gcc_unreachable ();
628 /* Basic consistency checks for attributes used in fold. */
629 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
630 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
631 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
632 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
633 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
634 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
635 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
636 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
637 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
638 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
639 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
640 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
641 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
642 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
643 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
644 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
645 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
646 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
647 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
648 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
649 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
650 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
651 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
652 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
653 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
654 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
655 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
656 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
657 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
658 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
659 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
660 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
661 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
662 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
663 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
664 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
665 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
666 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
667 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
668 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
672 /* Init tree.c. */
674 void
675 init_ttree (void)
677 /* Initialize the hash table of types. */
678 type_hash_table
679 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
681 debug_expr_for_decl
682 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
684 value_expr_for_decl
685 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
687 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
689 poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
691 int_cst_node = make_int_cst (1, 1);
693 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
695 cl_optimization_node = make_node (OPTIMIZATION_NODE);
696 cl_target_option_node = make_node (TARGET_OPTION_NODE);
698 /* Initialize the tree_contains_struct array. */
699 initialize_tree_contains_struct ();
700 lang_hooks.init_ts ();
704 /* The name of the object as the assembler will see it (but before any
705 translations made by ASM_OUTPUT_LABELREF). Often this is the same
706 as DECL_NAME. It is an IDENTIFIER_NODE. */
707 tree
708 decl_assembler_name (tree decl)
710 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
711 lang_hooks.set_decl_assembler_name (decl);
712 return DECL_ASSEMBLER_NAME_RAW (decl);
715 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
716 (either of which may be NULL). Inform the FE, if this changes the
717 name. */
719 void
720 overwrite_decl_assembler_name (tree decl, tree name)
722 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
723 lang_hooks.overwrite_decl_assembler_name (decl, name);
726 /* Return true if DECL may need an assembler name to be set. */
728 static inline bool
729 need_assembler_name_p (tree decl)
731 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
732 Rule merging. This makes type_odr_p to return true on those types during
733 LTO and by comparing the mangled name, we can say what types are intended
734 to be equivalent across compilation unit.
736 We do not store names of type_in_anonymous_namespace_p.
738 Record, union and enumeration type have linkage that allows use
739 to check type_in_anonymous_namespace_p. We do not mangle compound types
740 that always can be compared structurally.
742 Similarly for builtin types, we compare properties of their main variant.
743 A special case are integer types where mangling do make differences
744 between char/signed char/unsigned char etc. Storing name for these makes
745 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
746 See cp/mangle.c:write_builtin_type for details. */
748 if (TREE_CODE (decl) == TYPE_DECL)
750 if (DECL_NAME (decl)
751 && decl == TYPE_NAME (TREE_TYPE (decl))
752 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
753 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
754 && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
755 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE)
756 || TYPE_CXX_ODR_P (TREE_TYPE (decl)))
757 && (type_with_linkage_p (TREE_TYPE (decl))
758 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
759 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
760 return !DECL_ASSEMBLER_NAME_SET_P (decl);
761 return false;
763 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
764 if (!VAR_OR_FUNCTION_DECL_P (decl))
765 return false;
767 /* If DECL already has its assembler name set, it does not need a
768 new one. */
769 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
770 || DECL_ASSEMBLER_NAME_SET_P (decl))
771 return false;
773 /* Abstract decls do not need an assembler name. */
774 if (DECL_ABSTRACT_P (decl))
775 return false;
777 /* For VAR_DECLs, only static, public and external symbols need an
778 assembler name. */
779 if (VAR_P (decl)
780 && !TREE_STATIC (decl)
781 && !TREE_PUBLIC (decl)
782 && !DECL_EXTERNAL (decl))
783 return false;
785 if (TREE_CODE (decl) == FUNCTION_DECL)
787 /* Do not set assembler name on builtins. Allow RTL expansion to
788 decide whether to expand inline or via a regular call. */
789 if (fndecl_built_in_p (decl)
790 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
791 return false;
793 /* Functions represented in the callgraph need an assembler name. */
794 if (cgraph_node::get (decl) != NULL)
795 return true;
797 /* Unused and not public functions don't need an assembler name. */
798 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
799 return false;
802 return true;
805 /* If T needs an assembler name, have one created for it. */
807 void
808 assign_assembler_name_if_needed (tree t)
810 if (need_assembler_name_p (t))
812 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
813 diagnostics that use input_location to show locus
814 information. The problem here is that, at this point,
815 input_location is generally anchored to the end of the file
816 (since the parser is long gone), so we don't have a good
817 position to pin it to.
819 To alleviate this problem, this uses the location of T's
820 declaration. Examples of this are
821 testsuite/g++.dg/template/cond2.C and
822 testsuite/g++.dg/template/pr35240.C. */
823 location_t saved_location = input_location;
824 input_location = DECL_SOURCE_LOCATION (t);
826 decl_assembler_name (t);
828 input_location = saved_location;
832 /* When the target supports COMDAT groups, this indicates which group the
833 DECL is associated with. This can be either an IDENTIFIER_NODE or a
834 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
835 tree
836 decl_comdat_group (const_tree node)
838 struct symtab_node *snode = symtab_node::get (node);
839 if (!snode)
840 return NULL;
841 return snode->get_comdat_group ();
844 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
845 tree
846 decl_comdat_group_id (const_tree node)
848 struct symtab_node *snode = symtab_node::get (node);
849 if (!snode)
850 return NULL;
851 return snode->get_comdat_group_id ();
854 /* When the target supports named section, return its name as IDENTIFIER_NODE
855 or NULL if it is in no section. */
856 const char *
857 decl_section_name (const_tree node)
859 struct symtab_node *snode = symtab_node::get (node);
860 if (!snode)
861 return NULL;
862 return snode->get_section ();
865 /* Set section name of NODE to VALUE (that is expected to be
866 identifier node) */
867 void
868 set_decl_section_name (tree node, const char *value)
870 struct symtab_node *snode;
872 if (value == NULL)
874 snode = symtab_node::get (node);
875 if (!snode)
876 return;
878 else if (VAR_P (node))
879 snode = varpool_node::get_create (node);
880 else
881 snode = cgraph_node::get_create (node);
882 snode->set_section (value);
885 /* Set section name of NODE to match the section name of OTHER.
887 set_decl_section_name (decl, other) is equivalent to
888 set_decl_section_name (decl, DECL_SECTION_NAME (other)), but possibly more
889 efficient. */
890 void
891 set_decl_section_name (tree decl, const_tree other)
893 struct symtab_node *other_node = symtab_node::get (other);
894 if (other_node)
896 struct symtab_node *decl_node;
897 if (VAR_P (decl))
898 decl_node = varpool_node::get_create (decl);
899 else
900 decl_node = cgraph_node::get_create (decl);
901 decl_node->set_section (*other_node);
903 else
905 struct symtab_node *decl_node = symtab_node::get (decl);
906 if (!decl_node)
907 return;
908 decl_node->set_section (NULL);
912 /* Return TLS model of a variable NODE. */
913 enum tls_model
914 decl_tls_model (const_tree node)
916 struct varpool_node *snode = varpool_node::get (node);
917 if (!snode)
918 return TLS_MODEL_NONE;
919 return snode->tls_model;
922 /* Set TLS model of variable NODE to MODEL. */
923 void
924 set_decl_tls_model (tree node, enum tls_model model)
926 struct varpool_node *vnode;
928 if (model == TLS_MODEL_NONE)
930 vnode = varpool_node::get (node);
931 if (!vnode)
932 return;
934 else
935 vnode = varpool_node::get_create (node);
936 vnode->tls_model = model;
939 /* Compute the number of bytes occupied by a tree with code CODE.
940 This function cannot be used for nodes that have variable sizes,
941 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
942 size_t
943 tree_code_size (enum tree_code code)
945 switch (TREE_CODE_CLASS (code))
947 case tcc_declaration: /* A decl node */
948 switch (code)
950 case FIELD_DECL: return sizeof (tree_field_decl);
951 case PARM_DECL: return sizeof (tree_parm_decl);
952 case VAR_DECL: return sizeof (tree_var_decl);
953 case LABEL_DECL: return sizeof (tree_label_decl);
954 case RESULT_DECL: return sizeof (tree_result_decl);
955 case CONST_DECL: return sizeof (tree_const_decl);
956 case TYPE_DECL: return sizeof (tree_type_decl);
957 case FUNCTION_DECL: return sizeof (tree_function_decl);
958 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
959 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
960 case NAMESPACE_DECL:
961 case IMPORTED_DECL:
962 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
963 default:
964 gcc_checking_assert (code >= NUM_TREE_CODES);
965 return lang_hooks.tree_size (code);
968 case tcc_type: /* a type node */
969 switch (code)
971 case OFFSET_TYPE:
972 case ENUMERAL_TYPE:
973 case BOOLEAN_TYPE:
974 case INTEGER_TYPE:
975 case REAL_TYPE:
976 case OPAQUE_TYPE:
977 case POINTER_TYPE:
978 case REFERENCE_TYPE:
979 case NULLPTR_TYPE:
980 case FIXED_POINT_TYPE:
981 case COMPLEX_TYPE:
982 case VECTOR_TYPE:
983 case ARRAY_TYPE:
984 case RECORD_TYPE:
985 case UNION_TYPE:
986 case QUAL_UNION_TYPE:
987 case VOID_TYPE:
988 case FUNCTION_TYPE:
989 case METHOD_TYPE:
990 case LANG_TYPE: return sizeof (tree_type_non_common);
991 default:
992 gcc_checking_assert (code >= NUM_TREE_CODES);
993 return lang_hooks.tree_size (code);
996 case tcc_reference: /* a reference */
997 case tcc_expression: /* an expression */
998 case tcc_statement: /* an expression with side effects */
999 case tcc_comparison: /* a comparison expression */
1000 case tcc_unary: /* a unary arithmetic expression */
1001 case tcc_binary: /* a binary arithmetic expression */
1002 return (sizeof (struct tree_exp)
1003 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
1005 case tcc_constant: /* a constant */
1006 switch (code)
1008 case VOID_CST: return sizeof (tree_typed);
1009 case INTEGER_CST: gcc_unreachable ();
1010 case POLY_INT_CST: return sizeof (tree_poly_int_cst);
1011 case REAL_CST: return sizeof (tree_real_cst);
1012 case FIXED_CST: return sizeof (tree_fixed_cst);
1013 case COMPLEX_CST: return sizeof (tree_complex);
1014 case VECTOR_CST: gcc_unreachable ();
1015 case STRING_CST: gcc_unreachable ();
1016 default:
1017 gcc_checking_assert (code >= NUM_TREE_CODES);
1018 return lang_hooks.tree_size (code);
1021 case tcc_exceptional: /* something random, like an identifier. */
1022 switch (code)
1024 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
1025 case TREE_LIST: return sizeof (tree_list);
1027 case ERROR_MARK:
1028 case PLACEHOLDER_EXPR: return sizeof (tree_common);
1030 case TREE_VEC: gcc_unreachable ();
1031 case OMP_CLAUSE: gcc_unreachable ();
1033 case SSA_NAME: return sizeof (tree_ssa_name);
1035 case STATEMENT_LIST: return sizeof (tree_statement_list);
1036 case BLOCK: return sizeof (struct tree_block);
1037 case CONSTRUCTOR: return sizeof (tree_constructor);
1038 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
1039 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
1041 default:
1042 gcc_checking_assert (code >= NUM_TREE_CODES);
1043 return lang_hooks.tree_size (code);
1046 default:
1047 gcc_unreachable ();
1051 /* Compute the number of bytes occupied by NODE. This routine only
1052 looks at TREE_CODE, except for those nodes that have variable sizes. */
1053 size_t
1054 tree_size (const_tree node)
1056 const enum tree_code code = TREE_CODE (node);
1057 switch (code)
1059 case INTEGER_CST:
1060 return (sizeof (struct tree_int_cst)
1061 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
1063 case TREE_BINFO:
1064 return (offsetof (struct tree_binfo, base_binfos)
1065 + vec<tree, va_gc>
1066 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
1068 case TREE_VEC:
1069 return (sizeof (struct tree_vec)
1070 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
1072 case VECTOR_CST:
1073 return (sizeof (struct tree_vector)
1074 + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
1076 case STRING_CST:
1077 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
1079 case OMP_CLAUSE:
1080 return (sizeof (struct tree_omp_clause)
1081 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
1082 * sizeof (tree));
1084 default:
1085 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
1086 return (sizeof (struct tree_exp)
1087 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
1088 else
1089 return tree_code_size (code);
1093 /* Return tree node kind based on tree CODE. */
1095 static tree_node_kind
1096 get_stats_node_kind (enum tree_code code)
1098 enum tree_code_class type = TREE_CODE_CLASS (code);
1100 switch (type)
1102 case tcc_declaration: /* A decl node */
1103 return d_kind;
1104 case tcc_type: /* a type node */
1105 return t_kind;
1106 case tcc_statement: /* an expression with side effects */
1107 return s_kind;
1108 case tcc_reference: /* a reference */
1109 return r_kind;
1110 case tcc_expression: /* an expression */
1111 case tcc_comparison: /* a comparison expression */
1112 case tcc_unary: /* a unary arithmetic expression */
1113 case tcc_binary: /* a binary arithmetic expression */
1114 return e_kind;
1115 case tcc_constant: /* a constant */
1116 return c_kind;
1117 case tcc_exceptional: /* something random, like an identifier. */
1118 switch (code)
1120 case IDENTIFIER_NODE:
1121 return id_kind;
1122 case TREE_VEC:
1123 return vec_kind;
1124 case TREE_BINFO:
1125 return binfo_kind;
1126 case SSA_NAME:
1127 return ssa_name_kind;
1128 case BLOCK:
1129 return b_kind;
1130 case CONSTRUCTOR:
1131 return constr_kind;
1132 case OMP_CLAUSE:
1133 return omp_clause_kind;
1134 default:
1135 return x_kind;
1137 break;
1138 case tcc_vl_exp:
1139 return e_kind;
1140 default:
1141 gcc_unreachable ();
1145 /* Record interesting allocation statistics for a tree node with CODE
1146 and LENGTH. */
1148 static void
1149 record_node_allocation_statistics (enum tree_code code, size_t length)
1151 if (!GATHER_STATISTICS)
1152 return;
1154 tree_node_kind kind = get_stats_node_kind (code);
1156 tree_code_counts[(int) code]++;
1157 tree_node_counts[(int) kind]++;
1158 tree_node_sizes[(int) kind] += length;
1161 /* Allocate and return a new UID from the DECL_UID namespace. */
1164 allocate_decl_uid (void)
1166 return next_decl_uid++;
1169 /* Return a newly allocated node of code CODE. For decl and type
1170 nodes, some other fields are initialized. The rest of the node is
1171 initialized to zero. This function cannot be used for TREE_VEC,
1172 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1173 tree_code_size.
1175 Achoo! I got a code in the node. */
1177 tree
1178 make_node (enum tree_code code MEM_STAT_DECL)
1180 tree t;
1181 enum tree_code_class type = TREE_CODE_CLASS (code);
1182 size_t length = tree_code_size (code);
1184 record_node_allocation_statistics (code, length);
1186 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1187 TREE_SET_CODE (t, code);
1189 switch (type)
1191 case tcc_statement:
1192 if (code != DEBUG_BEGIN_STMT)
1193 TREE_SIDE_EFFECTS (t) = 1;
1194 break;
1196 case tcc_declaration:
1197 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1199 if (code == FUNCTION_DECL)
1201 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1202 SET_DECL_MODE (t, FUNCTION_MODE);
1204 else
1205 SET_DECL_ALIGN (t, 1);
1207 DECL_SOURCE_LOCATION (t) = input_location;
1208 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1209 DECL_UID (t) = --next_debug_decl_uid;
1210 else
1212 DECL_UID (t) = allocate_decl_uid ();
1213 SET_DECL_PT_UID (t, -1);
1215 if (TREE_CODE (t) == LABEL_DECL)
1216 LABEL_DECL_UID (t) = -1;
1218 break;
1220 case tcc_type:
1221 TYPE_UID (t) = next_type_uid++;
1222 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1223 TYPE_USER_ALIGN (t) = 0;
1224 TYPE_MAIN_VARIANT (t) = t;
1225 TYPE_CANONICAL (t) = t;
1227 /* Default to no attributes for type, but let target change that. */
1228 TYPE_ATTRIBUTES (t) = NULL_TREE;
1229 targetm.set_default_type_attributes (t);
1231 /* We have not yet computed the alias set for this type. */
1232 TYPE_ALIAS_SET (t) = -1;
1233 break;
1235 case tcc_constant:
1236 TREE_CONSTANT (t) = 1;
1237 break;
1239 case tcc_expression:
1240 switch (code)
1242 case INIT_EXPR:
1243 case MODIFY_EXPR:
1244 case VA_ARG_EXPR:
1245 case PREDECREMENT_EXPR:
1246 case PREINCREMENT_EXPR:
1247 case POSTDECREMENT_EXPR:
1248 case POSTINCREMENT_EXPR:
1249 /* All of these have side-effects, no matter what their
1250 operands are. */
1251 TREE_SIDE_EFFECTS (t) = 1;
1252 break;
1254 default:
1255 break;
1257 break;
1259 case tcc_exceptional:
1260 switch (code)
1262 case TARGET_OPTION_NODE:
1263 TREE_TARGET_OPTION(t)
1264 = ggc_cleared_alloc<struct cl_target_option> ();
1265 break;
1267 case OPTIMIZATION_NODE:
1268 TREE_OPTIMIZATION (t)
1269 = ggc_cleared_alloc<struct cl_optimization> ();
1270 break;
1272 default:
1273 break;
1275 break;
1277 default:
1278 /* Other classes need no special treatment. */
1279 break;
1282 return t;
1285 /* Free tree node. */
1287 void
1288 free_node (tree node)
1290 enum tree_code code = TREE_CODE (node);
1291 if (GATHER_STATISTICS)
1293 enum tree_node_kind kind = get_stats_node_kind (code);
1295 gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1296 gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1297 gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1299 tree_code_counts[(int) TREE_CODE (node)]--;
1300 tree_node_counts[(int) kind]--;
1301 tree_node_sizes[(int) kind] -= tree_size (node);
1303 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1304 vec_free (CONSTRUCTOR_ELTS (node));
1305 else if (code == BLOCK)
1306 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1307 else if (code == TREE_BINFO)
1308 vec_free (BINFO_BASE_ACCESSES (node));
1309 else if (code == OPTIMIZATION_NODE)
1310 cl_optimization_option_free (TREE_OPTIMIZATION (node));
1311 else if (code == TARGET_OPTION_NODE)
1312 cl_target_option_free (TREE_TARGET_OPTION (node));
1313 ggc_free (node);
1316 /* Return a new node with the same contents as NODE except that its
1317 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1319 tree
1320 copy_node (tree node MEM_STAT_DECL)
1322 tree t;
1323 enum tree_code code = TREE_CODE (node);
1324 size_t length;
1326 gcc_assert (code != STATEMENT_LIST);
1328 length = tree_size (node);
1329 record_node_allocation_statistics (code, length);
1330 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1331 memcpy (t, node, length);
1333 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1334 TREE_CHAIN (t) = 0;
1335 TREE_ASM_WRITTEN (t) = 0;
1336 TREE_VISITED (t) = 0;
1338 if (TREE_CODE_CLASS (code) == tcc_declaration)
1340 if (code == DEBUG_EXPR_DECL)
1341 DECL_UID (t) = --next_debug_decl_uid;
1342 else
1344 DECL_UID (t) = allocate_decl_uid ();
1345 if (DECL_PT_UID_SET_P (node))
1346 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1348 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1349 && DECL_HAS_VALUE_EXPR_P (node))
1351 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1352 DECL_HAS_VALUE_EXPR_P (t) = 1;
1354 /* DECL_DEBUG_EXPR is copied explicitly by callers. */
1355 if (VAR_P (node))
1357 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1358 t->decl_with_vis.symtab_node = NULL;
1360 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1362 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1363 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1365 if (TREE_CODE (node) == FUNCTION_DECL)
1367 DECL_STRUCT_FUNCTION (t) = NULL;
1368 t->decl_with_vis.symtab_node = NULL;
1371 else if (TREE_CODE_CLASS (code) == tcc_type)
1373 TYPE_UID (t) = next_type_uid++;
1374 /* The following is so that the debug code for
1375 the copy is different from the original type.
1376 The two statements usually duplicate each other
1377 (because they clear fields of the same union),
1378 but the optimizer should catch that. */
1379 TYPE_SYMTAB_ADDRESS (t) = 0;
1380 TYPE_SYMTAB_DIE (t) = 0;
1382 /* Do not copy the values cache. */
1383 if (TYPE_CACHED_VALUES_P (t))
1385 TYPE_CACHED_VALUES_P (t) = 0;
1386 TYPE_CACHED_VALUES (t) = NULL_TREE;
1389 else if (code == TARGET_OPTION_NODE)
1391 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1392 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1393 sizeof (struct cl_target_option));
1395 else if (code == OPTIMIZATION_NODE)
1397 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1398 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1399 sizeof (struct cl_optimization));
1402 return t;
1405 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1406 For example, this can copy a list made of TREE_LIST nodes. */
1408 tree
1409 copy_list (tree list)
1411 tree head;
1412 tree prev, next;
1414 if (list == 0)
1415 return 0;
1417 head = prev = copy_node (list);
1418 next = TREE_CHAIN (list);
1419 while (next)
1421 TREE_CHAIN (prev) = copy_node (next);
1422 prev = TREE_CHAIN (prev);
1423 next = TREE_CHAIN (next);
1425 return head;
1429 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1430 INTEGER_CST with value CST and type TYPE. */
1432 static unsigned int
1433 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1435 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1436 /* We need extra HWIs if CST is an unsigned integer with its
1437 upper bit set. */
1438 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1439 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1440 return cst.get_len ();
1443 /* Return a new INTEGER_CST with value CST and type TYPE. */
1445 static tree
1446 build_new_int_cst (tree type, const wide_int &cst)
1448 unsigned int len = cst.get_len ();
1449 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1450 tree nt = make_int_cst (len, ext_len);
1452 if (len < ext_len)
1454 --ext_len;
1455 TREE_INT_CST_ELT (nt, ext_len)
1456 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1457 for (unsigned int i = len; i < ext_len; ++i)
1458 TREE_INT_CST_ELT (nt, i) = -1;
1460 else if (TYPE_UNSIGNED (type)
1461 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1463 len--;
1464 TREE_INT_CST_ELT (nt, len)
1465 = zext_hwi (cst.elt (len),
1466 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1469 for (unsigned int i = 0; i < len; i++)
1470 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1471 TREE_TYPE (nt) = type;
1472 return nt;
1475 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1477 static tree
1478 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1479 CXX_MEM_STAT_INFO)
1481 size_t length = sizeof (struct tree_poly_int_cst);
1482 record_node_allocation_statistics (POLY_INT_CST, length);
1484 tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1486 TREE_SET_CODE (t, POLY_INT_CST);
1487 TREE_CONSTANT (t) = 1;
1488 TREE_TYPE (t) = type;
1489 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1490 POLY_INT_CST_COEFF (t, i) = coeffs[i];
1491 return t;
1494 /* Create a constant tree that contains CST sign-extended to TYPE. */
1496 tree
1497 build_int_cst (tree type, poly_int64 cst)
1499 /* Support legacy code. */
1500 if (!type)
1501 type = integer_type_node;
1503 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1506 /* Create a constant tree that contains CST zero-extended to TYPE. */
1508 tree
1509 build_int_cstu (tree type, poly_uint64 cst)
1511 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1514 /* Create a constant tree that contains CST sign-extended to TYPE. */
1516 tree
1517 build_int_cst_type (tree type, poly_int64 cst)
1519 gcc_assert (type);
1520 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1523 /* Constructs tree in type TYPE from with value given by CST. Signedness
1524 of CST is assumed to be the same as the signedness of TYPE. */
1526 tree
1527 double_int_to_tree (tree type, double_int cst)
1529 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1532 /* We force the wide_int CST to the range of the type TYPE by sign or
1533 zero extending it. OVERFLOWABLE indicates if we are interested in
1534 overflow of the value, when >0 we are only interested in signed
1535 overflow, for <0 we are interested in any overflow. OVERFLOWED
1536 indicates whether overflow has already occurred. CONST_OVERFLOWED
1537 indicates whether constant overflow has already occurred. We force
1538 T's value to be within range of T's type (by setting to 0 or 1 all
1539 the bits outside the type's range). We set TREE_OVERFLOWED if,
1540 OVERFLOWED is nonzero,
1541 or OVERFLOWABLE is >0 and signed overflow occurs
1542 or OVERFLOWABLE is <0 and any overflow occurs
1543 We return a new tree node for the extended wide_int. The node
1544 is shared if no overflow flags are set. */
1547 tree
1548 force_fit_type (tree type, const poly_wide_int_ref &cst,
1549 int overflowable, bool overflowed)
1551 signop sign = TYPE_SIGN (type);
1553 /* If we need to set overflow flags, return a new unshared node. */
1554 if (overflowed || !wi::fits_to_tree_p (cst, type))
1556 if (overflowed
1557 || overflowable < 0
1558 || (overflowable > 0 && sign == SIGNED))
1560 poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1561 sign);
1562 tree t;
1563 if (tmp.is_constant ())
1564 t = build_new_int_cst (type, tmp.coeffs[0]);
1565 else
1567 tree coeffs[NUM_POLY_INT_COEFFS];
1568 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1570 coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1571 TREE_OVERFLOW (coeffs[i]) = 1;
1573 t = build_new_poly_int_cst (type, coeffs);
1575 TREE_OVERFLOW (t) = 1;
1576 return t;
1580 /* Else build a shared node. */
1581 return wide_int_to_tree (type, cst);
1584 /* These are the hash table functions for the hash table of INTEGER_CST
1585 nodes of a sizetype. */
1587 /* Return the hash code X, an INTEGER_CST. */
1589 hashval_t
1590 int_cst_hasher::hash (tree x)
1592 const_tree const t = x;
1593 hashval_t code = TYPE_UID (TREE_TYPE (t));
1594 int i;
1596 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1597 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1599 return code;
1602 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1603 is the same as that given by *Y, which is the same. */
1605 bool
1606 int_cst_hasher::equal (tree x, tree y)
1608 const_tree const xt = x;
1609 const_tree const yt = y;
1611 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1612 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1613 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1614 return false;
1616 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1617 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1618 return false;
1620 return true;
1623 /* Cache wide_int CST into the TYPE_CACHED_VALUES cache for TYPE.
1624 SLOT is the slot entry to store it in, and MAX_SLOTS is the maximum
1625 number of slots that can be cached for the type. */
1627 static inline tree
1628 cache_wide_int_in_type_cache (tree type, const wide_int &cst,
1629 int slot, int max_slots)
1631 gcc_checking_assert (slot >= 0);
1632 /* Initialize cache. */
1633 if (!TYPE_CACHED_VALUES_P (type))
1635 TYPE_CACHED_VALUES_P (type) = 1;
1636 TYPE_CACHED_VALUES (type) = make_tree_vec (max_slots);
1638 tree t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot);
1639 if (!t)
1641 /* Create a new shared int. */
1642 t = build_new_int_cst (type, cst);
1643 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot) = t;
1645 return t;
1648 /* Create an INT_CST node of TYPE and value CST.
1649 The returned node is always shared. For small integers we use a
1650 per-type vector cache, for larger ones we use a single hash table.
1651 The value is extended from its precision according to the sign of
1652 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1653 the upper bits and ensures that hashing and value equality based
1654 upon the underlying HOST_WIDE_INTs works without masking. */
1656 static tree
1657 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1659 tree t;
1660 int ix = -1;
1661 int limit = 0;
1663 gcc_assert (type);
1664 unsigned int prec = TYPE_PRECISION (type);
1665 signop sgn = TYPE_SIGN (type);
1667 /* Verify that everything is canonical. */
1668 int l = pcst.get_len ();
1669 if (l > 1)
1671 if (pcst.elt (l - 1) == 0)
1672 gcc_checking_assert (pcst.elt (l - 2) < 0);
1673 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1674 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1677 wide_int cst = wide_int::from (pcst, prec, sgn);
1678 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1680 enum tree_code code = TREE_CODE (type);
1681 if (code == POINTER_TYPE || code == REFERENCE_TYPE)
1683 /* Cache NULL pointer and zero bounds. */
1684 if (cst == 0)
1685 ix = 0;
1686 /* Cache upper bounds of pointers. */
1687 else if (cst == wi::max_value (prec, sgn))
1688 ix = 1;
1689 /* Cache 1 which is used for a non-zero range. */
1690 else if (cst == 1)
1691 ix = 2;
1693 if (ix >= 0)
1695 t = cache_wide_int_in_type_cache (type, cst, ix, 3);
1696 /* Make sure no one is clobbering the shared constant. */
1697 gcc_checking_assert (TREE_TYPE (t) == type
1698 && cst == wi::to_wide (t));
1699 return t;
1702 if (ext_len == 1)
1704 /* We just need to store a single HOST_WIDE_INT. */
1705 HOST_WIDE_INT hwi;
1706 if (TYPE_UNSIGNED (type))
1707 hwi = cst.to_uhwi ();
1708 else
1709 hwi = cst.to_shwi ();
1711 switch (code)
1713 case NULLPTR_TYPE:
1714 gcc_assert (hwi == 0);
1715 /* Fallthru. */
1717 case POINTER_TYPE:
1718 case REFERENCE_TYPE:
1719 /* Ignore pointers, as they were already handled above. */
1720 break;
1722 case BOOLEAN_TYPE:
1723 /* Cache false or true. */
1724 limit = 2;
1725 if (IN_RANGE (hwi, 0, 1))
1726 ix = hwi;
1727 break;
1729 case INTEGER_TYPE:
1730 case OFFSET_TYPE:
1731 if (TYPE_SIGN (type) == UNSIGNED)
1733 /* Cache [0, N). */
1734 limit = param_integer_share_limit;
1735 if (IN_RANGE (hwi, 0, param_integer_share_limit - 1))
1736 ix = hwi;
1738 else
1740 /* Cache [-1, N). */
1741 limit = param_integer_share_limit + 1;
1742 if (IN_RANGE (hwi, -1, param_integer_share_limit - 1))
1743 ix = hwi + 1;
1745 break;
1747 case ENUMERAL_TYPE:
1748 break;
1750 default:
1751 gcc_unreachable ();
1754 if (ix >= 0)
1756 t = cache_wide_int_in_type_cache (type, cst, ix, limit);
1757 /* Make sure no one is clobbering the shared constant. */
1758 gcc_checking_assert (TREE_TYPE (t) == type
1759 && TREE_INT_CST_NUNITS (t) == 1
1760 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1761 && TREE_INT_CST_EXT_NUNITS (t) == 1
1762 && TREE_INT_CST_ELT (t, 0) == hwi);
1763 return t;
1765 else
1767 /* Use the cache of larger shared ints, using int_cst_node as
1768 a temporary. */
1770 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1771 TREE_TYPE (int_cst_node) = type;
1773 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1774 t = *slot;
1775 if (!t)
1777 /* Insert this one into the hash table. */
1778 t = int_cst_node;
1779 *slot = t;
1780 /* Make a new node for next time round. */
1781 int_cst_node = make_int_cst (1, 1);
1785 else
1787 /* The value either hashes properly or we drop it on the floor
1788 for the gc to take care of. There will not be enough of them
1789 to worry about. */
1791 tree nt = build_new_int_cst (type, cst);
1792 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1793 t = *slot;
1794 if (!t)
1796 /* Insert this one into the hash table. */
1797 t = nt;
1798 *slot = t;
1800 else
1801 ggc_free (nt);
1804 return t;
1807 hashval_t
1808 poly_int_cst_hasher::hash (tree t)
1810 inchash::hash hstate;
1812 hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1813 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1814 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1816 return hstate.end ();
1819 bool
1820 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1822 if (TREE_TYPE (x) != y.first)
1823 return false;
1824 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1825 if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1826 return false;
1827 return true;
1830 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1831 The elements must also have type TYPE. */
1833 tree
1834 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1836 unsigned int prec = TYPE_PRECISION (type);
1837 gcc_assert (prec <= values.coeffs[0].get_precision ());
1838 poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1840 inchash::hash h;
1841 h.add_int (TYPE_UID (type));
1842 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1843 h.add_wide_int (c.coeffs[i]);
1844 poly_int_cst_hasher::compare_type comp (type, &c);
1845 tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1846 INSERT);
1847 if (*slot == NULL_TREE)
1849 tree coeffs[NUM_POLY_INT_COEFFS];
1850 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1851 coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1852 *slot = build_new_poly_int_cst (type, coeffs);
1854 return *slot;
1857 /* Create a constant tree with value VALUE in type TYPE. */
1859 tree
1860 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1862 if (value.is_constant ())
1863 return wide_int_to_tree_1 (type, value.coeffs[0]);
1864 return build_poly_int_cst (type, value);
1867 /* Insert INTEGER_CST T into a cache of integer constants. And return
1868 the cached constant (which may or may not be T). If MIGHT_DUPLICATE
1869 is false, and T falls into the type's 'smaller values' range, there
1870 cannot be an existing entry. Otherwise, if MIGHT_DUPLICATE is true,
1871 or the value is large, should an existing entry exist, it is
1872 returned (rather than inserting T). */
1874 tree
1875 cache_integer_cst (tree t, bool might_duplicate ATTRIBUTE_UNUSED)
1877 tree type = TREE_TYPE (t);
1878 int ix = -1;
1879 int limit = 0;
1880 int prec = TYPE_PRECISION (type);
1882 gcc_assert (!TREE_OVERFLOW (t));
1884 /* The caching indices here must match those in
1885 wide_int_to_type_1. */
1886 switch (TREE_CODE (type))
1888 case NULLPTR_TYPE:
1889 gcc_checking_assert (integer_zerop (t));
1890 /* Fallthru. */
1892 case POINTER_TYPE:
1893 case REFERENCE_TYPE:
1895 if (integer_zerop (t))
1896 ix = 0;
1897 else if (integer_onep (t))
1898 ix = 2;
1900 if (ix >= 0)
1901 limit = 3;
1903 break;
1905 case BOOLEAN_TYPE:
1906 /* Cache false or true. */
1907 limit = 2;
1908 if (wi::ltu_p (wi::to_wide (t), 2))
1909 ix = TREE_INT_CST_ELT (t, 0);
1910 break;
1912 case INTEGER_TYPE:
1913 case OFFSET_TYPE:
1914 if (TYPE_UNSIGNED (type))
1916 /* Cache 0..N */
1917 limit = param_integer_share_limit;
1919 /* This is a little hokie, but if the prec is smaller than
1920 what is necessary to hold param_integer_share_limit, then the
1921 obvious test will not get the correct answer. */
1922 if (prec < HOST_BITS_PER_WIDE_INT)
1924 if (tree_to_uhwi (t)
1925 < (unsigned HOST_WIDE_INT) param_integer_share_limit)
1926 ix = tree_to_uhwi (t);
1928 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1929 ix = tree_to_uhwi (t);
1931 else
1933 /* Cache -1..N */
1934 limit = param_integer_share_limit + 1;
1936 if (integer_minus_onep (t))
1937 ix = 0;
1938 else if (!wi::neg_p (wi::to_wide (t)))
1940 if (prec < HOST_BITS_PER_WIDE_INT)
1942 if (tree_to_shwi (t) < param_integer_share_limit)
1943 ix = tree_to_shwi (t) + 1;
1945 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1946 ix = tree_to_shwi (t) + 1;
1949 break;
1951 case ENUMERAL_TYPE:
1952 /* The slot used by TYPE_CACHED_VALUES is used for the enum
1953 members. */
1954 break;
1956 default:
1957 gcc_unreachable ();
1960 if (ix >= 0)
1962 /* Look for it in the type's vector of small shared ints. */
1963 if (!TYPE_CACHED_VALUES_P (type))
1965 TYPE_CACHED_VALUES_P (type) = 1;
1966 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1969 if (tree r = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix))
1971 gcc_checking_assert (might_duplicate);
1972 t = r;
1974 else
1975 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1977 else
1979 /* Use the cache of larger shared ints. */
1980 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1981 if (tree r = *slot)
1983 /* If there is already an entry for the number verify it's the
1984 same value. */
1985 gcc_checking_assert (wi::to_wide (tree (r)) == wi::to_wide (t));
1986 /* And return the cached value. */
1987 t = r;
1989 else
1990 /* Otherwise insert this one into the hash table. */
1991 *slot = t;
1994 return t;
1998 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1999 and the rest are zeros. */
2001 tree
2002 build_low_bits_mask (tree type, unsigned bits)
2004 gcc_assert (bits <= TYPE_PRECISION (type));
2006 return wide_int_to_tree (type, wi::mask (bits, false,
2007 TYPE_PRECISION (type)));
2010 /* Checks that X is integer constant that can be expressed in (unsigned)
2011 HOST_WIDE_INT without loss of precision. */
2013 bool
2014 cst_and_fits_in_hwi (const_tree x)
2016 return (TREE_CODE (x) == INTEGER_CST
2017 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
2020 /* Build a newly constructed VECTOR_CST with the given values of
2021 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
2023 tree
2024 make_vector (unsigned log2_npatterns,
2025 unsigned int nelts_per_pattern MEM_STAT_DECL)
2027 gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
2028 tree t;
2029 unsigned npatterns = 1 << log2_npatterns;
2030 unsigned encoded_nelts = npatterns * nelts_per_pattern;
2031 unsigned length = (sizeof (struct tree_vector)
2032 + (encoded_nelts - 1) * sizeof (tree));
2034 record_node_allocation_statistics (VECTOR_CST, length);
2036 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2038 TREE_SET_CODE (t, VECTOR_CST);
2039 TREE_CONSTANT (t) = 1;
2040 VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
2041 VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
2043 return t;
2046 /* Return a new VECTOR_CST node whose type is TYPE and whose values
2047 are extracted from V, a vector of CONSTRUCTOR_ELT. */
2049 tree
2050 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
2052 if (vec_safe_length (v) == 0)
2053 return build_zero_cst (type);
2055 unsigned HOST_WIDE_INT idx, nelts;
2056 tree value;
2058 /* We can't construct a VECTOR_CST for a variable number of elements. */
2059 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
2060 tree_vector_builder vec (type, nelts, 1);
2061 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
2063 if (TREE_CODE (value) == VECTOR_CST)
2065 /* If NELTS is constant then this must be too. */
2066 unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
2067 for (unsigned i = 0; i < sub_nelts; ++i)
2068 vec.quick_push (VECTOR_CST_ELT (value, i));
2070 else
2071 vec.quick_push (value);
2073 while (vec.length () < nelts)
2074 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
2076 return vec.build ();
2079 /* Build a vector of type VECTYPE where all the elements are SCs. */
2080 tree
2081 build_vector_from_val (tree vectype, tree sc)
2083 unsigned HOST_WIDE_INT i, nunits;
2085 if (sc == error_mark_node)
2086 return sc;
2088 /* Verify that the vector type is suitable for SC. Note that there
2089 is some inconsistency in the type-system with respect to restrict
2090 qualifications of pointers. Vector types always have a main-variant
2091 element type and the qualification is applied to the vector-type.
2092 So TREE_TYPE (vector-type) does not return a properly qualified
2093 vector element-type. */
2094 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
2095 TREE_TYPE (vectype)));
2097 if (CONSTANT_CLASS_P (sc))
2099 tree_vector_builder v (vectype, 1, 1);
2100 v.quick_push (sc);
2101 return v.build ();
2103 else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
2104 return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
2105 else
2107 vec<constructor_elt, va_gc> *v;
2108 vec_alloc (v, nunits);
2109 for (i = 0; i < nunits; ++i)
2110 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
2111 return build_constructor (vectype, v);
2115 /* If TYPE is not a vector type, just return SC, otherwise return
2116 build_vector_from_val (TYPE, SC). */
2118 tree
2119 build_uniform_cst (tree type, tree sc)
2121 if (!VECTOR_TYPE_P (type))
2122 return sc;
2124 return build_vector_from_val (type, sc);
2127 /* Build a vector series of type TYPE in which element I has the value
2128 BASE + I * STEP. The result is a constant if BASE and STEP are constant
2129 and a VEC_SERIES_EXPR otherwise. */
2131 tree
2132 build_vec_series (tree type, tree base, tree step)
2134 if (integer_zerop (step))
2135 return build_vector_from_val (type, base);
2136 if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
2138 tree_vector_builder builder (type, 1, 3);
2139 tree elt1 = wide_int_to_tree (TREE_TYPE (base),
2140 wi::to_wide (base) + wi::to_wide (step));
2141 tree elt2 = wide_int_to_tree (TREE_TYPE (base),
2142 wi::to_wide (elt1) + wi::to_wide (step));
2143 builder.quick_push (base);
2144 builder.quick_push (elt1);
2145 builder.quick_push (elt2);
2146 return builder.build ();
2148 return build2 (VEC_SERIES_EXPR, type, base, step);
2151 /* Return a vector with the same number of units and number of bits
2152 as VEC_TYPE, but in which the elements are a linear series of unsigned
2153 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
2155 tree
2156 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
2158 tree index_vec_type = vec_type;
2159 tree index_elt_type = TREE_TYPE (vec_type);
2160 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
2161 if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
2163 index_elt_type = build_nonstandard_integer_type
2164 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
2165 index_vec_type = build_vector_type (index_elt_type, nunits);
2168 tree_vector_builder v (index_vec_type, 1, 3);
2169 for (unsigned int i = 0; i < 3; ++i)
2170 v.quick_push (build_int_cstu (index_elt_type, base + i * step));
2171 return v.build ();
2174 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
2175 elements are A and the rest are B. */
2177 tree
2178 build_vector_a_then_b (tree vec_type, unsigned int num_a, tree a, tree b)
2180 gcc_assert (known_le (num_a, TYPE_VECTOR_SUBPARTS (vec_type)));
2181 unsigned int count = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type));
2182 /* Optimize the constant case. */
2183 if ((count & 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type).is_constant ())
2184 count /= 2;
2185 tree_vector_builder builder (vec_type, count, 2);
2186 for (unsigned int i = 0; i < count * 2; ++i)
2187 builder.quick_push (i < num_a ? a : b);
2188 return builder.build ();
2191 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2192 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
2194 void
2195 recompute_constructor_flags (tree c)
2197 unsigned int i;
2198 tree val;
2199 bool constant_p = true;
2200 bool side_effects_p = false;
2201 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2203 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2205 /* Mostly ctors will have elts that don't have side-effects, so
2206 the usual case is to scan all the elements. Hence a single
2207 loop for both const and side effects, rather than one loop
2208 each (with early outs). */
2209 if (!TREE_CONSTANT (val))
2210 constant_p = false;
2211 if (TREE_SIDE_EFFECTS (val))
2212 side_effects_p = true;
2215 TREE_SIDE_EFFECTS (c) = side_effects_p;
2216 TREE_CONSTANT (c) = constant_p;
2219 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2220 CONSTRUCTOR C. */
2222 void
2223 verify_constructor_flags (tree c)
2225 unsigned int i;
2226 tree val;
2227 bool constant_p = TREE_CONSTANT (c);
2228 bool side_effects_p = TREE_SIDE_EFFECTS (c);
2229 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2231 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2233 if (constant_p && !TREE_CONSTANT (val))
2234 internal_error ("non-constant element in constant CONSTRUCTOR");
2235 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
2236 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2240 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2241 are in the vec pointed to by VALS. */
2242 tree
2243 build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL)
2245 tree c = make_node (CONSTRUCTOR PASS_MEM_STAT);
2247 TREE_TYPE (c) = type;
2248 CONSTRUCTOR_ELTS (c) = vals;
2250 recompute_constructor_flags (c);
2252 return c;
2255 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2256 INDEX and VALUE. */
2257 tree
2258 build_constructor_single (tree type, tree index, tree value)
2260 vec<constructor_elt, va_gc> *v;
2261 constructor_elt elt = {index, value};
2263 vec_alloc (v, 1);
2264 v->quick_push (elt);
2266 return build_constructor (type, v);
2270 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2271 are in a list pointed to by VALS. */
2272 tree
2273 build_constructor_from_list (tree type, tree vals)
2275 tree t;
2276 vec<constructor_elt, va_gc> *v = NULL;
2278 if (vals)
2280 vec_alloc (v, list_length (vals));
2281 for (t = vals; t; t = TREE_CHAIN (t))
2282 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2285 return build_constructor (type, v);
2288 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2289 are in a vector pointed to by VALS. Note that the TREE_PURPOSE
2290 fields in the constructor remain null. */
2292 tree
2293 build_constructor_from_vec (tree type, const vec<tree, va_gc> *vals)
2295 vec<constructor_elt, va_gc> *v = NULL;
2297 for (tree t : vals)
2298 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t);
2300 return build_constructor (type, v);
2303 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2304 of elements, provided as index/value pairs. */
2306 tree
2307 build_constructor_va (tree type, int nelts, ...)
2309 vec<constructor_elt, va_gc> *v = NULL;
2310 va_list p;
2312 va_start (p, nelts);
2313 vec_alloc (v, nelts);
2314 while (nelts--)
2316 tree index = va_arg (p, tree);
2317 tree value = va_arg (p, tree);
2318 CONSTRUCTOR_APPEND_ELT (v, index, value);
2320 va_end (p);
2321 return build_constructor (type, v);
2324 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2326 tree
2327 build_clobber (tree type)
2329 tree clobber = build_constructor (type, NULL);
2330 TREE_THIS_VOLATILE (clobber) = true;
2331 return clobber;
2334 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2336 tree
2337 build_fixed (tree type, FIXED_VALUE_TYPE f)
2339 tree v;
2340 FIXED_VALUE_TYPE *fp;
2342 v = make_node (FIXED_CST);
2343 fp = ggc_alloc<fixed_value> ();
2344 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2346 TREE_TYPE (v) = type;
2347 TREE_FIXED_CST_PTR (v) = fp;
2348 return v;
2351 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2353 tree
2354 build_real (tree type, REAL_VALUE_TYPE d)
2356 tree v;
2357 REAL_VALUE_TYPE *dp;
2358 int overflow = 0;
2360 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2361 Consider doing it via real_convert now. */
2363 v = make_node (REAL_CST);
2364 dp = ggc_alloc<real_value> ();
2365 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
2367 TREE_TYPE (v) = type;
2368 TREE_REAL_CST_PTR (v) = dp;
2369 TREE_OVERFLOW (v) = overflow;
2370 return v;
2373 /* Like build_real, but first truncate D to the type. */
2375 tree
2376 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2378 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2381 /* Return a new REAL_CST node whose type is TYPE
2382 and whose value is the integer value of the INTEGER_CST node I. */
2384 REAL_VALUE_TYPE
2385 real_value_from_int_cst (const_tree type, const_tree i)
2387 REAL_VALUE_TYPE d;
2389 /* Clear all bits of the real value type so that we can later do
2390 bitwise comparisons to see if two values are the same. */
2391 memset (&d, 0, sizeof d);
2393 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2394 TYPE_SIGN (TREE_TYPE (i)));
2395 return d;
2398 /* Given a tree representing an integer constant I, return a tree
2399 representing the same value as a floating-point constant of type TYPE. */
2401 tree
2402 build_real_from_int_cst (tree type, const_tree i)
2404 tree v;
2405 int overflow = TREE_OVERFLOW (i);
2407 v = build_real (type, real_value_from_int_cst (type, i));
2409 TREE_OVERFLOW (v) |= overflow;
2410 return v;
2413 /* Return a new REAL_CST node whose type is TYPE
2414 and whose value is the integer value I which has sign SGN. */
2416 tree
2417 build_real_from_wide (tree type, const wide_int_ref &i, signop sgn)
2419 REAL_VALUE_TYPE d;
2421 /* Clear all bits of the real value type so that we can later do
2422 bitwise comparisons to see if two values are the same. */
2423 memset (&d, 0, sizeof d);
2425 real_from_integer (&d, TYPE_MODE (type), i, sgn);
2426 return build_real (type, d);
2429 /* Return a newly constructed STRING_CST node whose value is the LEN
2430 characters at STR when STR is nonnull, or all zeros otherwise.
2431 Note that for a C string literal, LEN should include the trailing NUL.
2432 The TREE_TYPE is not initialized. */
2434 tree
2435 build_string (unsigned len, const char *str /*= NULL */)
2437 /* Do not waste bytes provided by padding of struct tree_string. */
2438 unsigned size = len + offsetof (struct tree_string, str) + 1;
2440 record_node_allocation_statistics (STRING_CST, size);
2442 tree s = (tree) ggc_internal_alloc (size);
2444 memset (s, 0, sizeof (struct tree_typed));
2445 TREE_SET_CODE (s, STRING_CST);
2446 TREE_CONSTANT (s) = 1;
2447 TREE_STRING_LENGTH (s) = len;
2448 if (str)
2449 memcpy (s->string.str, str, len);
2450 else
2451 memset (s->string.str, 0, len);
2452 s->string.str[len] = '\0';
2454 return s;
2457 /* Return a newly constructed COMPLEX_CST node whose value is
2458 specified by the real and imaginary parts REAL and IMAG.
2459 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2460 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2462 tree
2463 build_complex (tree type, tree real, tree imag)
2465 gcc_assert (CONSTANT_CLASS_P (real));
2466 gcc_assert (CONSTANT_CLASS_P (imag));
2468 tree t = make_node (COMPLEX_CST);
2470 TREE_REALPART (t) = real;
2471 TREE_IMAGPART (t) = imag;
2472 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2473 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2474 return t;
2477 /* Build a complex (inf +- 0i), such as for the result of cproj.
2478 TYPE is the complex tree type of the result. If NEG is true, the
2479 imaginary zero is negative. */
2481 tree
2482 build_complex_inf (tree type, bool neg)
2484 REAL_VALUE_TYPE rinf, rzero = dconst0;
2486 real_inf (&rinf);
2487 rzero.sign = neg;
2488 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2489 build_real (TREE_TYPE (type), rzero));
2492 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2493 element is set to 1. In particular, this is 1 + i for complex types. */
2495 tree
2496 build_each_one_cst (tree type)
2498 if (TREE_CODE (type) == COMPLEX_TYPE)
2500 tree scalar = build_one_cst (TREE_TYPE (type));
2501 return build_complex (type, scalar, scalar);
2503 else
2504 return build_one_cst (type);
2507 /* Return a constant of arithmetic type TYPE which is the
2508 multiplicative identity of the set TYPE. */
2510 tree
2511 build_one_cst (tree type)
2513 switch (TREE_CODE (type))
2515 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2516 case POINTER_TYPE: case REFERENCE_TYPE:
2517 case OFFSET_TYPE:
2518 return build_int_cst (type, 1);
2520 case REAL_TYPE:
2521 return build_real (type, dconst1);
2523 case FIXED_POINT_TYPE:
2524 /* We can only generate 1 for accum types. */
2525 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2526 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2528 case VECTOR_TYPE:
2530 tree scalar = build_one_cst (TREE_TYPE (type));
2532 return build_vector_from_val (type, scalar);
2535 case COMPLEX_TYPE:
2536 return build_complex (type,
2537 build_one_cst (TREE_TYPE (type)),
2538 build_zero_cst (TREE_TYPE (type)));
2540 default:
2541 gcc_unreachable ();
2545 /* Return an integer of type TYPE containing all 1's in as much precision as
2546 it contains, or a complex or vector whose subparts are such integers. */
2548 tree
2549 build_all_ones_cst (tree type)
2551 if (TREE_CODE (type) == COMPLEX_TYPE)
2553 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2554 return build_complex (type, scalar, scalar);
2556 else
2557 return build_minus_one_cst (type);
2560 /* Return a constant of arithmetic type TYPE which is the
2561 opposite of the multiplicative identity of the set TYPE. */
2563 tree
2564 build_minus_one_cst (tree type)
2566 switch (TREE_CODE (type))
2568 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2569 case POINTER_TYPE: case REFERENCE_TYPE:
2570 case OFFSET_TYPE:
2571 return build_int_cst (type, -1);
2573 case REAL_TYPE:
2574 return build_real (type, dconstm1);
2576 case FIXED_POINT_TYPE:
2577 /* We can only generate 1 for accum types. */
2578 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2579 return build_fixed (type,
2580 fixed_from_double_int (double_int_minus_one,
2581 SCALAR_TYPE_MODE (type)));
2583 case VECTOR_TYPE:
2585 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2587 return build_vector_from_val (type, scalar);
2590 case COMPLEX_TYPE:
2591 return build_complex (type,
2592 build_minus_one_cst (TREE_TYPE (type)),
2593 build_zero_cst (TREE_TYPE (type)));
2595 default:
2596 gcc_unreachable ();
2600 /* Build 0 constant of type TYPE. This is used by constructor folding
2601 and thus the constant should be represented in memory by
2602 zero(es). */
2604 tree
2605 build_zero_cst (tree type)
2607 switch (TREE_CODE (type))
2609 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2610 case POINTER_TYPE: case REFERENCE_TYPE:
2611 case OFFSET_TYPE: case NULLPTR_TYPE:
2612 return build_int_cst (type, 0);
2614 case REAL_TYPE:
2615 return build_real (type, dconst0);
2617 case FIXED_POINT_TYPE:
2618 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2620 case VECTOR_TYPE:
2622 tree scalar = build_zero_cst (TREE_TYPE (type));
2624 return build_vector_from_val (type, scalar);
2627 case COMPLEX_TYPE:
2629 tree zero = build_zero_cst (TREE_TYPE (type));
2631 return build_complex (type, zero, zero);
2634 default:
2635 if (!AGGREGATE_TYPE_P (type))
2636 return fold_convert (type, integer_zero_node);
2637 return build_constructor (type, NULL);
2642 /* Build a BINFO with LEN language slots. */
2644 tree
2645 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2647 tree t;
2648 size_t length = (offsetof (struct tree_binfo, base_binfos)
2649 + vec<tree, va_gc>::embedded_size (base_binfos));
2651 record_node_allocation_statistics (TREE_BINFO, length);
2653 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2655 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2657 TREE_SET_CODE (t, TREE_BINFO);
2659 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2661 return t;
2664 /* Create a CASE_LABEL_EXPR tree node and return it. */
2666 tree
2667 build_case_label (tree low_value, tree high_value, tree label_decl)
2669 tree t = make_node (CASE_LABEL_EXPR);
2671 TREE_TYPE (t) = void_type_node;
2672 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2674 CASE_LOW (t) = low_value;
2675 CASE_HIGH (t) = high_value;
2676 CASE_LABEL (t) = label_decl;
2677 CASE_CHAIN (t) = NULL_TREE;
2679 return t;
2682 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2683 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2684 The latter determines the length of the HOST_WIDE_INT vector. */
2686 tree
2687 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2689 tree t;
2690 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2691 + sizeof (struct tree_int_cst));
2693 gcc_assert (len);
2694 record_node_allocation_statistics (INTEGER_CST, length);
2696 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2698 TREE_SET_CODE (t, INTEGER_CST);
2699 TREE_INT_CST_NUNITS (t) = len;
2700 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2701 /* to_offset can only be applied to trees that are offset_int-sized
2702 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2703 must be exactly the precision of offset_int and so LEN is correct. */
2704 if (ext_len <= OFFSET_INT_ELTS)
2705 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2706 else
2707 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2709 TREE_CONSTANT (t) = 1;
2711 return t;
2714 /* Build a newly constructed TREE_VEC node of length LEN. */
2716 tree
2717 make_tree_vec (int len MEM_STAT_DECL)
2719 tree t;
2720 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2722 record_node_allocation_statistics (TREE_VEC, length);
2724 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2726 TREE_SET_CODE (t, TREE_VEC);
2727 TREE_VEC_LENGTH (t) = len;
2729 return t;
2732 /* Grow a TREE_VEC node to new length LEN. */
2734 tree
2735 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2737 gcc_assert (TREE_CODE (v) == TREE_VEC);
2739 int oldlen = TREE_VEC_LENGTH (v);
2740 gcc_assert (len > oldlen);
2742 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2743 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2745 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2747 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2749 TREE_VEC_LENGTH (v) = len;
2751 return v;
2754 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2755 fixed, and scalar, complex or vector. */
2757 bool
2758 zerop (const_tree expr)
2760 return (integer_zerop (expr)
2761 || real_zerop (expr)
2762 || fixed_zerop (expr));
2765 /* Return 1 if EXPR is the integer constant zero or a complex constant
2766 of zero, or a location wrapper for such a constant. */
2768 bool
2769 integer_zerop (const_tree expr)
2771 STRIP_ANY_LOCATION_WRAPPER (expr);
2773 switch (TREE_CODE (expr))
2775 case INTEGER_CST:
2776 return wi::to_wide (expr) == 0;
2777 case COMPLEX_CST:
2778 return (integer_zerop (TREE_REALPART (expr))
2779 && integer_zerop (TREE_IMAGPART (expr)));
2780 case VECTOR_CST:
2781 return (VECTOR_CST_NPATTERNS (expr) == 1
2782 && VECTOR_CST_DUPLICATE_P (expr)
2783 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2784 default:
2785 return false;
2789 /* Return 1 if EXPR is the integer constant one or the corresponding
2790 complex constant, or a location wrapper for such a constant. */
2792 bool
2793 integer_onep (const_tree expr)
2795 STRIP_ANY_LOCATION_WRAPPER (expr);
2797 switch (TREE_CODE (expr))
2799 case INTEGER_CST:
2800 return wi::eq_p (wi::to_widest (expr), 1);
2801 case COMPLEX_CST:
2802 return (integer_onep (TREE_REALPART (expr))
2803 && integer_zerop (TREE_IMAGPART (expr)));
2804 case VECTOR_CST:
2805 return (VECTOR_CST_NPATTERNS (expr) == 1
2806 && VECTOR_CST_DUPLICATE_P (expr)
2807 && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2808 default:
2809 return false;
2813 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2814 return 1 if every piece is the integer constant one.
2815 Also return 1 for location wrappers for such a constant. */
2817 bool
2818 integer_each_onep (const_tree expr)
2820 STRIP_ANY_LOCATION_WRAPPER (expr);
2822 if (TREE_CODE (expr) == COMPLEX_CST)
2823 return (integer_onep (TREE_REALPART (expr))
2824 && integer_onep (TREE_IMAGPART (expr)));
2825 else
2826 return integer_onep (expr);
2829 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2830 it contains, or a complex or vector whose subparts are such integers,
2831 or a location wrapper for such a constant. */
2833 bool
2834 integer_all_onesp (const_tree expr)
2836 STRIP_ANY_LOCATION_WRAPPER (expr);
2838 if (TREE_CODE (expr) == COMPLEX_CST
2839 && integer_all_onesp (TREE_REALPART (expr))
2840 && integer_all_onesp (TREE_IMAGPART (expr)))
2841 return true;
2843 else if (TREE_CODE (expr) == VECTOR_CST)
2844 return (VECTOR_CST_NPATTERNS (expr) == 1
2845 && VECTOR_CST_DUPLICATE_P (expr)
2846 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2848 else if (TREE_CODE (expr) != INTEGER_CST)
2849 return false;
2851 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2852 == wi::to_wide (expr));
2855 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
2856 for such a constant. */
2858 bool
2859 integer_minus_onep (const_tree expr)
2861 STRIP_ANY_LOCATION_WRAPPER (expr);
2863 if (TREE_CODE (expr) == COMPLEX_CST)
2864 return (integer_all_onesp (TREE_REALPART (expr))
2865 && integer_zerop (TREE_IMAGPART (expr)));
2866 else
2867 return integer_all_onesp (expr);
2870 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2871 one bit on), or a location wrapper for such a constant. */
2873 bool
2874 integer_pow2p (const_tree expr)
2876 STRIP_ANY_LOCATION_WRAPPER (expr);
2878 if (TREE_CODE (expr) == COMPLEX_CST
2879 && integer_pow2p (TREE_REALPART (expr))
2880 && integer_zerop (TREE_IMAGPART (expr)))
2881 return true;
2883 if (TREE_CODE (expr) != INTEGER_CST)
2884 return false;
2886 return wi::popcount (wi::to_wide (expr)) == 1;
2889 /* Return 1 if EXPR is an integer constant other than zero or a
2890 complex constant other than zero, or a location wrapper for such a
2891 constant. */
2893 bool
2894 integer_nonzerop (const_tree expr)
2896 STRIP_ANY_LOCATION_WRAPPER (expr);
2898 return ((TREE_CODE (expr) == INTEGER_CST
2899 && wi::to_wide (expr) != 0)
2900 || (TREE_CODE (expr) == COMPLEX_CST
2901 && (integer_nonzerop (TREE_REALPART (expr))
2902 || integer_nonzerop (TREE_IMAGPART (expr)))));
2905 /* Return 1 if EXPR is the integer constant one. For vector,
2906 return 1 if every piece is the integer constant minus one
2907 (representing the value TRUE).
2908 Also return 1 for location wrappers for such a constant. */
2910 bool
2911 integer_truep (const_tree expr)
2913 STRIP_ANY_LOCATION_WRAPPER (expr);
2915 if (TREE_CODE (expr) == VECTOR_CST)
2916 return integer_all_onesp (expr);
2917 return integer_onep (expr);
2920 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
2921 for such a constant. */
2923 bool
2924 fixed_zerop (const_tree expr)
2926 STRIP_ANY_LOCATION_WRAPPER (expr);
2928 return (TREE_CODE (expr) == FIXED_CST
2929 && TREE_FIXED_CST (expr).data.is_zero ());
2932 /* Return the power of two represented by a tree node known to be a
2933 power of two. */
2936 tree_log2 (const_tree expr)
2938 if (TREE_CODE (expr) == COMPLEX_CST)
2939 return tree_log2 (TREE_REALPART (expr));
2941 return wi::exact_log2 (wi::to_wide (expr));
2944 /* Similar, but return the largest integer Y such that 2 ** Y is less
2945 than or equal to EXPR. */
2948 tree_floor_log2 (const_tree expr)
2950 if (TREE_CODE (expr) == COMPLEX_CST)
2951 return tree_log2 (TREE_REALPART (expr));
2953 return wi::floor_log2 (wi::to_wide (expr));
2956 /* Return number of known trailing zero bits in EXPR, or, if the value of
2957 EXPR is known to be zero, the precision of it's type. */
2959 unsigned int
2960 tree_ctz (const_tree expr)
2962 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2963 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2964 return 0;
2966 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2967 switch (TREE_CODE (expr))
2969 case INTEGER_CST:
2970 ret1 = wi::ctz (wi::to_wide (expr));
2971 return MIN (ret1, prec);
2972 case SSA_NAME:
2973 ret1 = wi::ctz (get_nonzero_bits (expr));
2974 return MIN (ret1, prec);
2975 case PLUS_EXPR:
2976 case MINUS_EXPR:
2977 case BIT_IOR_EXPR:
2978 case BIT_XOR_EXPR:
2979 case MIN_EXPR:
2980 case MAX_EXPR:
2981 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2982 if (ret1 == 0)
2983 return ret1;
2984 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2985 return MIN (ret1, ret2);
2986 case POINTER_PLUS_EXPR:
2987 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2988 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2989 /* Second operand is sizetype, which could be in theory
2990 wider than pointer's precision. Make sure we never
2991 return more than prec. */
2992 ret2 = MIN (ret2, prec);
2993 return MIN (ret1, ret2);
2994 case BIT_AND_EXPR:
2995 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2996 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2997 return MAX (ret1, ret2);
2998 case MULT_EXPR:
2999 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3000 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3001 return MIN (ret1 + ret2, prec);
3002 case LSHIFT_EXPR:
3003 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3004 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3005 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3007 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3008 return MIN (ret1 + ret2, prec);
3010 return ret1;
3011 case RSHIFT_EXPR:
3012 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3013 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3015 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3016 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3017 if (ret1 > ret2)
3018 return ret1 - ret2;
3020 return 0;
3021 case TRUNC_DIV_EXPR:
3022 case CEIL_DIV_EXPR:
3023 case FLOOR_DIV_EXPR:
3024 case ROUND_DIV_EXPR:
3025 case EXACT_DIV_EXPR:
3026 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3027 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
3029 int l = tree_log2 (TREE_OPERAND (expr, 1));
3030 if (l >= 0)
3032 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3033 ret2 = l;
3034 if (ret1 > ret2)
3035 return ret1 - ret2;
3038 return 0;
3039 CASE_CONVERT:
3040 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3041 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
3042 ret1 = prec;
3043 return MIN (ret1, prec);
3044 case SAVE_EXPR:
3045 return tree_ctz (TREE_OPERAND (expr, 0));
3046 case COND_EXPR:
3047 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
3048 if (ret1 == 0)
3049 return 0;
3050 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
3051 return MIN (ret1, ret2);
3052 case COMPOUND_EXPR:
3053 return tree_ctz (TREE_OPERAND (expr, 1));
3054 case ADDR_EXPR:
3055 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
3056 if (ret1 > BITS_PER_UNIT)
3058 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
3059 return MIN (ret1, prec);
3061 return 0;
3062 default:
3063 return 0;
3067 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
3068 decimal float constants, so don't return 1 for them.
3069 Also return 1 for location wrappers around such a constant. */
3071 bool
3072 real_zerop (const_tree expr)
3074 STRIP_ANY_LOCATION_WRAPPER (expr);
3076 switch (TREE_CODE (expr))
3078 case REAL_CST:
3079 return real_equal (&TREE_REAL_CST (expr), &dconst0)
3080 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3081 case COMPLEX_CST:
3082 return real_zerop (TREE_REALPART (expr))
3083 && real_zerop (TREE_IMAGPART (expr));
3084 case VECTOR_CST:
3086 /* Don't simply check for a duplicate because the predicate
3087 accepts both +0.0 and -0.0. */
3088 unsigned count = vector_cst_encoded_nelts (expr);
3089 for (unsigned int i = 0; i < count; ++i)
3090 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
3091 return false;
3092 return true;
3094 default:
3095 return false;
3099 /* Return 1 if EXPR is the real constant one in real or complex form.
3100 Trailing zeroes matter for decimal float constants, so don't return
3101 1 for them.
3102 Also return 1 for location wrappers around such a constant. */
3104 bool
3105 real_onep (const_tree expr)
3107 STRIP_ANY_LOCATION_WRAPPER (expr);
3109 switch (TREE_CODE (expr))
3111 case REAL_CST:
3112 return real_equal (&TREE_REAL_CST (expr), &dconst1)
3113 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3114 case COMPLEX_CST:
3115 return real_onep (TREE_REALPART (expr))
3116 && real_zerop (TREE_IMAGPART (expr));
3117 case VECTOR_CST:
3118 return (VECTOR_CST_NPATTERNS (expr) == 1
3119 && VECTOR_CST_DUPLICATE_P (expr)
3120 && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3121 default:
3122 return false;
3126 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
3127 matter for decimal float constants, so don't return 1 for them.
3128 Also return 1 for location wrappers around such a constant. */
3130 bool
3131 real_minus_onep (const_tree expr)
3133 STRIP_ANY_LOCATION_WRAPPER (expr);
3135 switch (TREE_CODE (expr))
3137 case REAL_CST:
3138 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
3139 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3140 case COMPLEX_CST:
3141 return real_minus_onep (TREE_REALPART (expr))
3142 && real_zerop (TREE_IMAGPART (expr));
3143 case VECTOR_CST:
3144 return (VECTOR_CST_NPATTERNS (expr) == 1
3145 && VECTOR_CST_DUPLICATE_P (expr)
3146 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3147 default:
3148 return false;
3152 /* Nonzero if EXP is a constant or a cast of a constant. */
3154 bool
3155 really_constant_p (const_tree exp)
3157 /* This is not quite the same as STRIP_NOPS. It does more. */
3158 while (CONVERT_EXPR_P (exp)
3159 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3160 exp = TREE_OPERAND (exp, 0);
3161 return TREE_CONSTANT (exp);
3164 /* Return true if T holds a polynomial pointer difference, storing it in
3165 *VALUE if so. A true return means that T's precision is no greater
3166 than 64 bits, which is the largest address space we support, so *VALUE
3167 never loses precision. However, the signedness of the result does
3168 not necessarily match the signedness of T: sometimes an unsigned type
3169 like sizetype is used to encode a value that is actually negative. */
3171 bool
3172 ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
3174 if (!t)
3175 return false;
3176 if (TREE_CODE (t) == INTEGER_CST)
3178 if (!cst_and_fits_in_hwi (t))
3179 return false;
3180 *value = int_cst_value (t);
3181 return true;
3183 if (POLY_INT_CST_P (t))
3185 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3186 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
3187 return false;
3188 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3189 value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
3190 return true;
3192 return false;
3195 poly_int64
3196 tree_to_poly_int64 (const_tree t)
3198 gcc_assert (tree_fits_poly_int64_p (t));
3199 if (POLY_INT_CST_P (t))
3200 return poly_int_cst_value (t).force_shwi ();
3201 return TREE_INT_CST_LOW (t);
3204 poly_uint64
3205 tree_to_poly_uint64 (const_tree t)
3207 gcc_assert (tree_fits_poly_uint64_p (t));
3208 if (POLY_INT_CST_P (t))
3209 return poly_int_cst_value (t).force_uhwi ();
3210 return TREE_INT_CST_LOW (t);
3213 /* Return first list element whose TREE_VALUE is ELEM.
3214 Return 0 if ELEM is not in LIST. */
3216 tree
3217 value_member (tree elem, tree list)
3219 while (list)
3221 if (elem == TREE_VALUE (list))
3222 return list;
3223 list = TREE_CHAIN (list);
3225 return NULL_TREE;
3228 /* Return first list element whose TREE_PURPOSE is ELEM.
3229 Return 0 if ELEM is not in LIST. */
3231 tree
3232 purpose_member (const_tree elem, tree list)
3234 while (list)
3236 if (elem == TREE_PURPOSE (list))
3237 return list;
3238 list = TREE_CHAIN (list);
3240 return NULL_TREE;
3243 /* Return true if ELEM is in V. */
3245 bool
3246 vec_member (const_tree elem, vec<tree, va_gc> *v)
3248 unsigned ix;
3249 tree t;
3250 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
3251 if (elem == t)
3252 return true;
3253 return false;
3256 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3257 NULL_TREE. */
3259 tree
3260 chain_index (int idx, tree chain)
3262 for (; chain && idx > 0; --idx)
3263 chain = TREE_CHAIN (chain);
3264 return chain;
3267 /* Return nonzero if ELEM is part of the chain CHAIN. */
3269 bool
3270 chain_member (const_tree elem, const_tree chain)
3272 while (chain)
3274 if (elem == chain)
3275 return true;
3276 chain = DECL_CHAIN (chain);
3279 return false;
3282 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3283 We expect a null pointer to mark the end of the chain.
3284 This is the Lisp primitive `length'. */
3287 list_length (const_tree t)
3289 const_tree p = t;
3290 #ifdef ENABLE_TREE_CHECKING
3291 const_tree q = t;
3292 #endif
3293 int len = 0;
3295 while (p)
3297 p = TREE_CHAIN (p);
3298 #ifdef ENABLE_TREE_CHECKING
3299 if (len % 2)
3300 q = TREE_CHAIN (q);
3301 gcc_assert (p != q);
3302 #endif
3303 len++;
3306 return len;
3309 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3310 UNION_TYPE TYPE, or NULL_TREE if none. */
3312 tree
3313 first_field (const_tree type)
3315 tree t = TYPE_FIELDS (type);
3316 while (t && TREE_CODE (t) != FIELD_DECL)
3317 t = TREE_CHAIN (t);
3318 return t;
3321 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3322 UNION_TYPE TYPE, or NULL_TREE if none. */
3324 tree
3325 last_field (const_tree type)
3327 tree last = NULL_TREE;
3329 for (tree fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
3331 if (TREE_CODE (fld) != FIELD_DECL)
3332 continue;
3334 last = fld;
3337 return last;
3340 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3341 by modifying the last node in chain 1 to point to chain 2.
3342 This is the Lisp primitive `nconc'. */
3344 tree
3345 chainon (tree op1, tree op2)
3347 tree t1;
3349 if (!op1)
3350 return op2;
3351 if (!op2)
3352 return op1;
3354 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3355 continue;
3356 TREE_CHAIN (t1) = op2;
3358 #ifdef ENABLE_TREE_CHECKING
3360 tree t2;
3361 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3362 gcc_assert (t2 != t1);
3364 #endif
3366 return op1;
3369 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3371 tree
3372 tree_last (tree chain)
3374 tree next;
3375 if (chain)
3376 while ((next = TREE_CHAIN (chain)))
3377 chain = next;
3378 return chain;
3381 /* Reverse the order of elements in the chain T,
3382 and return the new head of the chain (old last element). */
3384 tree
3385 nreverse (tree t)
3387 tree prev = 0, decl, next;
3388 for (decl = t; decl; decl = next)
3390 /* We shouldn't be using this function to reverse BLOCK chains; we
3391 have blocks_nreverse for that. */
3392 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3393 next = TREE_CHAIN (decl);
3394 TREE_CHAIN (decl) = prev;
3395 prev = decl;
3397 return prev;
3400 /* Return a newly created TREE_LIST node whose
3401 purpose and value fields are PARM and VALUE. */
3403 tree
3404 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3406 tree t = make_node (TREE_LIST PASS_MEM_STAT);
3407 TREE_PURPOSE (t) = parm;
3408 TREE_VALUE (t) = value;
3409 return t;
3412 /* Build a chain of TREE_LIST nodes from a vector. */
3414 tree
3415 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3417 tree ret = NULL_TREE;
3418 tree *pp = &ret;
3419 unsigned int i;
3420 tree t;
3421 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3423 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3424 pp = &TREE_CHAIN (*pp);
3426 return ret;
3429 /* Return a newly created TREE_LIST node whose
3430 purpose and value fields are PURPOSE and VALUE
3431 and whose TREE_CHAIN is CHAIN. */
3433 tree
3434 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3436 tree node;
3438 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3439 memset (node, 0, sizeof (struct tree_common));
3441 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3443 TREE_SET_CODE (node, TREE_LIST);
3444 TREE_CHAIN (node) = chain;
3445 TREE_PURPOSE (node) = purpose;
3446 TREE_VALUE (node) = value;
3447 return node;
3450 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3451 trees. */
3453 vec<tree, va_gc> *
3454 ctor_to_vec (tree ctor)
3456 vec<tree, va_gc> *vec;
3457 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3458 unsigned int ix;
3459 tree val;
3461 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3462 vec->quick_push (val);
3464 return vec;
3467 /* Return the size nominally occupied by an object of type TYPE
3468 when it resides in memory. The value is measured in units of bytes,
3469 and its data type is that normally used for type sizes
3470 (which is the first type created by make_signed_type or
3471 make_unsigned_type). */
3473 tree
3474 size_in_bytes_loc (location_t loc, const_tree type)
3476 tree t;
3478 if (type == error_mark_node)
3479 return integer_zero_node;
3481 type = TYPE_MAIN_VARIANT (type);
3482 t = TYPE_SIZE_UNIT (type);
3484 if (t == 0)
3486 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3487 return size_zero_node;
3490 return t;
3493 /* Return the size of TYPE (in bytes) as a wide integer
3494 or return -1 if the size can vary or is larger than an integer. */
3496 HOST_WIDE_INT
3497 int_size_in_bytes (const_tree type)
3499 tree t;
3501 if (type == error_mark_node)
3502 return 0;
3504 type = TYPE_MAIN_VARIANT (type);
3505 t = TYPE_SIZE_UNIT (type);
3507 if (t && tree_fits_uhwi_p (t))
3508 return TREE_INT_CST_LOW (t);
3509 else
3510 return -1;
3513 /* Return the maximum size of TYPE (in bytes) as a wide integer
3514 or return -1 if the size can vary or is larger than an integer. */
3516 HOST_WIDE_INT
3517 max_int_size_in_bytes (const_tree type)
3519 HOST_WIDE_INT size = -1;
3520 tree size_tree;
3522 /* If this is an array type, check for a possible MAX_SIZE attached. */
3524 if (TREE_CODE (type) == ARRAY_TYPE)
3526 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3528 if (size_tree && tree_fits_uhwi_p (size_tree))
3529 size = tree_to_uhwi (size_tree);
3532 /* If we still haven't been able to get a size, see if the language
3533 can compute a maximum size. */
3535 if (size == -1)
3537 size_tree = lang_hooks.types.max_size (type);
3539 if (size_tree && tree_fits_uhwi_p (size_tree))
3540 size = tree_to_uhwi (size_tree);
3543 return size;
3546 /* Return the bit position of FIELD, in bits from the start of the record.
3547 This is a tree of type bitsizetype. */
3549 tree
3550 bit_position (const_tree field)
3552 return bit_from_pos (DECL_FIELD_OFFSET (field),
3553 DECL_FIELD_BIT_OFFSET (field));
3556 /* Return the byte position of FIELD, in bytes from the start of the record.
3557 This is a tree of type sizetype. */
3559 tree
3560 byte_position (const_tree field)
3562 return byte_from_pos (DECL_FIELD_OFFSET (field),
3563 DECL_FIELD_BIT_OFFSET (field));
3566 /* Likewise, but return as an integer. It must be representable in
3567 that way (since it could be a signed value, we don't have the
3568 option of returning -1 like int_size_in_byte can. */
3570 HOST_WIDE_INT
3571 int_byte_position (const_tree field)
3573 return tree_to_shwi (byte_position (field));
3576 /* Return, as a tree node, the number of elements for TYPE (which is an
3577 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3579 tree
3580 array_type_nelts (const_tree type)
3582 tree index_type, min, max;
3584 /* If they did it with unspecified bounds, then we should have already
3585 given an error about it before we got here. */
3586 if (! TYPE_DOMAIN (type))
3587 return error_mark_node;
3589 index_type = TYPE_DOMAIN (type);
3590 min = TYPE_MIN_VALUE (index_type);
3591 max = TYPE_MAX_VALUE (index_type);
3593 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3594 if (!max)
3596 /* zero sized arrays are represented from C FE as complete types with
3597 NULL TYPE_MAX_VALUE and zero TYPE_SIZE, while C++ FE represents
3598 them as min 0, max -1. */
3599 if (COMPLETE_TYPE_P (type)
3600 && integer_zerop (TYPE_SIZE (type))
3601 && integer_zerop (min))
3602 return build_int_cst (TREE_TYPE (min), -1);
3604 return error_mark_node;
3607 return (integer_zerop (min)
3608 ? max
3609 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3612 /* If arg is static -- a reference to an object in static storage -- then
3613 return the object. This is not the same as the C meaning of `static'.
3614 If arg isn't static, return NULL. */
3616 tree
3617 staticp (tree arg)
3619 switch (TREE_CODE (arg))
3621 case FUNCTION_DECL:
3622 /* Nested functions are static, even though taking their address will
3623 involve a trampoline as we unnest the nested function and create
3624 the trampoline on the tree level. */
3625 return arg;
3627 case VAR_DECL:
3628 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3629 && ! DECL_THREAD_LOCAL_P (arg)
3630 && ! DECL_DLLIMPORT_P (arg)
3631 ? arg : NULL);
3633 case CONST_DECL:
3634 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3635 ? arg : NULL);
3637 case CONSTRUCTOR:
3638 return TREE_STATIC (arg) ? arg : NULL;
3640 case LABEL_DECL:
3641 case STRING_CST:
3642 return arg;
3644 case COMPONENT_REF:
3645 /* If the thing being referenced is not a field, then it is
3646 something language specific. */
3647 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3649 /* If we are referencing a bitfield, we can't evaluate an
3650 ADDR_EXPR at compile time and so it isn't a constant. */
3651 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3652 return NULL;
3654 return staticp (TREE_OPERAND (arg, 0));
3656 case BIT_FIELD_REF:
3657 return NULL;
3659 case INDIRECT_REF:
3660 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3662 case ARRAY_REF:
3663 case ARRAY_RANGE_REF:
3664 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3665 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3666 return staticp (TREE_OPERAND (arg, 0));
3667 else
3668 return NULL;
3670 case COMPOUND_LITERAL_EXPR:
3671 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3673 default:
3674 return NULL;
3681 /* Return whether OP is a DECL whose address is function-invariant. */
3683 bool
3684 decl_address_invariant_p (const_tree op)
3686 /* The conditions below are slightly less strict than the one in
3687 staticp. */
3689 switch (TREE_CODE (op))
3691 case PARM_DECL:
3692 case RESULT_DECL:
3693 case LABEL_DECL:
3694 case FUNCTION_DECL:
3695 return true;
3697 case VAR_DECL:
3698 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3699 || DECL_THREAD_LOCAL_P (op)
3700 || DECL_CONTEXT (op) == current_function_decl
3701 || decl_function_context (op) == current_function_decl)
3702 return true;
3703 break;
3705 case CONST_DECL:
3706 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3707 || decl_function_context (op) == current_function_decl)
3708 return true;
3709 break;
3711 default:
3712 break;
3715 return false;
3718 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3720 bool
3721 decl_address_ip_invariant_p (const_tree op)
3723 /* The conditions below are slightly less strict than the one in
3724 staticp. */
3726 switch (TREE_CODE (op))
3728 case LABEL_DECL:
3729 case FUNCTION_DECL:
3730 case STRING_CST:
3731 return true;
3733 case VAR_DECL:
3734 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3735 && !DECL_DLLIMPORT_P (op))
3736 || DECL_THREAD_LOCAL_P (op))
3737 return true;
3738 break;
3740 case CONST_DECL:
3741 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3742 return true;
3743 break;
3745 default:
3746 break;
3749 return false;
3753 /* Return true if T is function-invariant (internal function, does
3754 not handle arithmetic; that's handled in skip_simple_arithmetic and
3755 tree_invariant_p). */
3757 static bool
3758 tree_invariant_p_1 (tree t)
3760 tree op;
3762 if (TREE_CONSTANT (t)
3763 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3764 return true;
3766 switch (TREE_CODE (t))
3768 case SAVE_EXPR:
3769 return true;
3771 case ADDR_EXPR:
3772 op = TREE_OPERAND (t, 0);
3773 while (handled_component_p (op))
3775 switch (TREE_CODE (op))
3777 case ARRAY_REF:
3778 case ARRAY_RANGE_REF:
3779 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3780 || TREE_OPERAND (op, 2) != NULL_TREE
3781 || TREE_OPERAND (op, 3) != NULL_TREE)
3782 return false;
3783 break;
3785 case COMPONENT_REF:
3786 if (TREE_OPERAND (op, 2) != NULL_TREE)
3787 return false;
3788 break;
3790 default:;
3792 op = TREE_OPERAND (op, 0);
3795 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3797 default:
3798 break;
3801 return false;
3804 /* Return true if T is function-invariant. */
3806 bool
3807 tree_invariant_p (tree t)
3809 tree inner = skip_simple_arithmetic (t);
3810 return tree_invariant_p_1 (inner);
3813 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3814 Do this to any expression which may be used in more than one place,
3815 but must be evaluated only once.
3817 Normally, expand_expr would reevaluate the expression each time.
3818 Calling save_expr produces something that is evaluated and recorded
3819 the first time expand_expr is called on it. Subsequent calls to
3820 expand_expr just reuse the recorded value.
3822 The call to expand_expr that generates code that actually computes
3823 the value is the first call *at compile time*. Subsequent calls
3824 *at compile time* generate code to use the saved value.
3825 This produces correct result provided that *at run time* control
3826 always flows through the insns made by the first expand_expr
3827 before reaching the other places where the save_expr was evaluated.
3828 You, the caller of save_expr, must make sure this is so.
3830 Constants, and certain read-only nodes, are returned with no
3831 SAVE_EXPR because that is safe. Expressions containing placeholders
3832 are not touched; see tree.def for an explanation of what these
3833 are used for. */
3835 tree
3836 save_expr (tree expr)
3838 tree inner;
3840 /* If the tree evaluates to a constant, then we don't want to hide that
3841 fact (i.e. this allows further folding, and direct checks for constants).
3842 However, a read-only object that has side effects cannot be bypassed.
3843 Since it is no problem to reevaluate literals, we just return the
3844 literal node. */
3845 inner = skip_simple_arithmetic (expr);
3846 if (TREE_CODE (inner) == ERROR_MARK)
3847 return inner;
3849 if (tree_invariant_p_1 (inner))
3850 return expr;
3852 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3853 it means that the size or offset of some field of an object depends on
3854 the value within another field.
3856 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3857 and some variable since it would then need to be both evaluated once and
3858 evaluated more than once. Front-ends must assure this case cannot
3859 happen by surrounding any such subexpressions in their own SAVE_EXPR
3860 and forcing evaluation at the proper time. */
3861 if (contains_placeholder_p (inner))
3862 return expr;
3864 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3866 /* This expression might be placed ahead of a jump to ensure that the
3867 value was computed on both sides of the jump. So make sure it isn't
3868 eliminated as dead. */
3869 TREE_SIDE_EFFECTS (expr) = 1;
3870 return expr;
3873 /* Look inside EXPR into any simple arithmetic operations. Return the
3874 outermost non-arithmetic or non-invariant node. */
3876 tree
3877 skip_simple_arithmetic (tree expr)
3879 /* We don't care about whether this can be used as an lvalue in this
3880 context. */
3881 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3882 expr = TREE_OPERAND (expr, 0);
3884 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3885 a constant, it will be more efficient to not make another SAVE_EXPR since
3886 it will allow better simplification and GCSE will be able to merge the
3887 computations if they actually occur. */
3888 while (true)
3890 if (UNARY_CLASS_P (expr))
3891 expr = TREE_OPERAND (expr, 0);
3892 else if (BINARY_CLASS_P (expr))
3894 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3895 expr = TREE_OPERAND (expr, 0);
3896 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3897 expr = TREE_OPERAND (expr, 1);
3898 else
3899 break;
3901 else
3902 break;
3905 return expr;
3908 /* Look inside EXPR into simple arithmetic operations involving constants.
3909 Return the outermost non-arithmetic or non-constant node. */
3911 tree
3912 skip_simple_constant_arithmetic (tree expr)
3914 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3915 expr = TREE_OPERAND (expr, 0);
3917 while (true)
3919 if (UNARY_CLASS_P (expr))
3920 expr = TREE_OPERAND (expr, 0);
3921 else if (BINARY_CLASS_P (expr))
3923 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3924 expr = TREE_OPERAND (expr, 0);
3925 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3926 expr = TREE_OPERAND (expr, 1);
3927 else
3928 break;
3930 else
3931 break;
3934 return expr;
3937 /* Return which tree structure is used by T. */
3939 enum tree_node_structure_enum
3940 tree_node_structure (const_tree t)
3942 const enum tree_code code = TREE_CODE (t);
3943 return tree_node_structure_for_code (code);
3946 /* Set various status flags when building a CALL_EXPR object T. */
3948 static void
3949 process_call_operands (tree t)
3951 bool side_effects = TREE_SIDE_EFFECTS (t);
3952 bool read_only = false;
3953 int i = call_expr_flags (t);
3955 /* Calls have side-effects, except those to const or pure functions. */
3956 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3957 side_effects = true;
3958 /* Propagate TREE_READONLY of arguments for const functions. */
3959 if (i & ECF_CONST)
3960 read_only = true;
3962 if (!side_effects || read_only)
3963 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3965 tree op = TREE_OPERAND (t, i);
3966 if (op && TREE_SIDE_EFFECTS (op))
3967 side_effects = true;
3968 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3969 read_only = false;
3972 TREE_SIDE_EFFECTS (t) = side_effects;
3973 TREE_READONLY (t) = read_only;
3976 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3977 size or offset that depends on a field within a record. */
3979 bool
3980 contains_placeholder_p (const_tree exp)
3982 enum tree_code code;
3984 if (!exp)
3985 return 0;
3987 code = TREE_CODE (exp);
3988 if (code == PLACEHOLDER_EXPR)
3989 return 1;
3991 switch (TREE_CODE_CLASS (code))
3993 case tcc_reference:
3994 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3995 position computations since they will be converted into a
3996 WITH_RECORD_EXPR involving the reference, which will assume
3997 here will be valid. */
3998 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4000 case tcc_exceptional:
4001 if (code == TREE_LIST)
4002 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
4003 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
4004 break;
4006 case tcc_unary:
4007 case tcc_binary:
4008 case tcc_comparison:
4009 case tcc_expression:
4010 switch (code)
4012 case COMPOUND_EXPR:
4013 /* Ignoring the first operand isn't quite right, but works best. */
4014 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
4016 case COND_EXPR:
4017 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4018 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
4019 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
4021 case SAVE_EXPR:
4022 /* The save_expr function never wraps anything containing
4023 a PLACEHOLDER_EXPR. */
4024 return 0;
4026 default:
4027 break;
4030 switch (TREE_CODE_LENGTH (code))
4032 case 1:
4033 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4034 case 2:
4035 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4036 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
4037 default:
4038 return 0;
4041 case tcc_vl_exp:
4042 switch (code)
4044 case CALL_EXPR:
4046 const_tree arg;
4047 const_call_expr_arg_iterator iter;
4048 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
4049 if (CONTAINS_PLACEHOLDER_P (arg))
4050 return 1;
4051 return 0;
4053 default:
4054 return 0;
4057 default:
4058 return 0;
4060 return 0;
4063 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
4064 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
4065 field positions. */
4067 static bool
4068 type_contains_placeholder_1 (const_tree type)
4070 /* If the size contains a placeholder or the parent type (component type in
4071 the case of arrays) type involves a placeholder, this type does. */
4072 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
4073 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
4074 || (!POINTER_TYPE_P (type)
4075 && TREE_TYPE (type)
4076 && type_contains_placeholder_p (TREE_TYPE (type))))
4077 return true;
4079 /* Now do type-specific checks. Note that the last part of the check above
4080 greatly limits what we have to do below. */
4081 switch (TREE_CODE (type))
4083 case VOID_TYPE:
4084 case OPAQUE_TYPE:
4085 case COMPLEX_TYPE:
4086 case ENUMERAL_TYPE:
4087 case BOOLEAN_TYPE:
4088 case POINTER_TYPE:
4089 case OFFSET_TYPE:
4090 case REFERENCE_TYPE:
4091 case METHOD_TYPE:
4092 case FUNCTION_TYPE:
4093 case VECTOR_TYPE:
4094 case NULLPTR_TYPE:
4095 return false;
4097 case INTEGER_TYPE:
4098 case REAL_TYPE:
4099 case FIXED_POINT_TYPE:
4100 /* Here we just check the bounds. */
4101 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
4102 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
4104 case ARRAY_TYPE:
4105 /* We have already checked the component type above, so just check
4106 the domain type. Flexible array members have a null domain. */
4107 return TYPE_DOMAIN (type) ?
4108 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
4110 case RECORD_TYPE:
4111 case UNION_TYPE:
4112 case QUAL_UNION_TYPE:
4114 tree field;
4116 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4117 if (TREE_CODE (field) == FIELD_DECL
4118 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
4119 || (TREE_CODE (type) == QUAL_UNION_TYPE
4120 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
4121 || type_contains_placeholder_p (TREE_TYPE (field))))
4122 return true;
4124 return false;
4127 default:
4128 gcc_unreachable ();
4132 /* Wrapper around above function used to cache its result. */
4134 bool
4135 type_contains_placeholder_p (tree type)
4137 bool result;
4139 /* If the contains_placeholder_bits field has been initialized,
4140 then we know the answer. */
4141 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
4142 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
4144 /* Indicate that we've seen this type node, and the answer is false.
4145 This is what we want to return if we run into recursion via fields. */
4146 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
4148 /* Compute the real value. */
4149 result = type_contains_placeholder_1 (type);
4151 /* Store the real value. */
4152 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
4154 return result;
4157 /* Push tree EXP onto vector QUEUE if it is not already present. */
4159 static void
4160 push_without_duplicates (tree exp, vec<tree> *queue)
4162 unsigned int i;
4163 tree iter;
4165 FOR_EACH_VEC_ELT (*queue, i, iter)
4166 if (simple_cst_equal (iter, exp) == 1)
4167 break;
4169 if (!iter)
4170 queue->safe_push (exp);
4173 /* Given a tree EXP, find all occurrences of references to fields
4174 in a PLACEHOLDER_EXPR and place them in vector REFS without
4175 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
4176 we assume here that EXP contains only arithmetic expressions
4177 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
4178 argument list. */
4180 void
4181 find_placeholder_in_expr (tree exp, vec<tree> *refs)
4183 enum tree_code code = TREE_CODE (exp);
4184 tree inner;
4185 int i;
4187 /* We handle TREE_LIST and COMPONENT_REF separately. */
4188 if (code == TREE_LIST)
4190 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
4191 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
4193 else if (code == COMPONENT_REF)
4195 for (inner = TREE_OPERAND (exp, 0);
4196 REFERENCE_CLASS_P (inner);
4197 inner = TREE_OPERAND (inner, 0))
4200 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
4201 push_without_duplicates (exp, refs);
4202 else
4203 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
4205 else
4206 switch (TREE_CODE_CLASS (code))
4208 case tcc_constant:
4209 break;
4211 case tcc_declaration:
4212 /* Variables allocated to static storage can stay. */
4213 if (!TREE_STATIC (exp))
4214 push_without_duplicates (exp, refs);
4215 break;
4217 case tcc_expression:
4218 /* This is the pattern built in ada/make_aligning_type. */
4219 if (code == ADDR_EXPR
4220 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
4222 push_without_duplicates (exp, refs);
4223 break;
4226 /* Fall through. */
4228 case tcc_exceptional:
4229 case tcc_unary:
4230 case tcc_binary:
4231 case tcc_comparison:
4232 case tcc_reference:
4233 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
4234 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4235 break;
4237 case tcc_vl_exp:
4238 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4239 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4240 break;
4242 default:
4243 gcc_unreachable ();
4247 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4248 return a tree with all occurrences of references to F in a
4249 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4250 CONST_DECLs. Note that we assume here that EXP contains only
4251 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4252 occurring only in their argument list. */
4254 tree
4255 substitute_in_expr (tree exp, tree f, tree r)
4257 enum tree_code code = TREE_CODE (exp);
4258 tree op0, op1, op2, op3;
4259 tree new_tree;
4261 /* We handle TREE_LIST and COMPONENT_REF separately. */
4262 if (code == TREE_LIST)
4264 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
4265 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
4266 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4267 return exp;
4269 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4271 else if (code == COMPONENT_REF)
4273 tree inner;
4275 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4276 and it is the right field, replace it with R. */
4277 for (inner = TREE_OPERAND (exp, 0);
4278 REFERENCE_CLASS_P (inner);
4279 inner = TREE_OPERAND (inner, 0))
4282 /* The field. */
4283 op1 = TREE_OPERAND (exp, 1);
4285 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
4286 return r;
4288 /* If this expression hasn't been completed let, leave it alone. */
4289 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
4290 return exp;
4292 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4293 if (op0 == TREE_OPERAND (exp, 0))
4294 return exp;
4296 new_tree
4297 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4299 else
4300 switch (TREE_CODE_CLASS (code))
4302 case tcc_constant:
4303 return exp;
4305 case tcc_declaration:
4306 if (exp == f)
4307 return r;
4308 else
4309 return exp;
4311 case tcc_expression:
4312 if (exp == f)
4313 return r;
4315 /* Fall through. */
4317 case tcc_exceptional:
4318 case tcc_unary:
4319 case tcc_binary:
4320 case tcc_comparison:
4321 case tcc_reference:
4322 switch (TREE_CODE_LENGTH (code))
4324 case 0:
4325 return exp;
4327 case 1:
4328 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4329 if (op0 == TREE_OPERAND (exp, 0))
4330 return exp;
4332 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4333 break;
4335 case 2:
4336 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4337 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4339 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4340 return exp;
4342 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4343 break;
4345 case 3:
4346 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4347 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4348 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4350 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4351 && op2 == TREE_OPERAND (exp, 2))
4352 return exp;
4354 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4355 break;
4357 case 4:
4358 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4359 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4360 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4361 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4363 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4364 && op2 == TREE_OPERAND (exp, 2)
4365 && op3 == TREE_OPERAND (exp, 3))
4366 return exp;
4368 new_tree
4369 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4370 break;
4372 default:
4373 gcc_unreachable ();
4375 break;
4377 case tcc_vl_exp:
4379 int i;
4381 new_tree = NULL_TREE;
4383 /* If we are trying to replace F with a constant or with another
4384 instance of one of the arguments of the call, inline back
4385 functions which do nothing else than computing a value from
4386 the arguments they are passed. This makes it possible to
4387 fold partially or entirely the replacement expression. */
4388 if (code == CALL_EXPR)
4390 bool maybe_inline = false;
4391 if (CONSTANT_CLASS_P (r))
4392 maybe_inline = true;
4393 else
4394 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4395 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4397 maybe_inline = true;
4398 break;
4400 if (maybe_inline)
4402 tree t = maybe_inline_call_in_expr (exp);
4403 if (t)
4404 return SUBSTITUTE_IN_EXPR (t, f, r);
4408 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4410 tree op = TREE_OPERAND (exp, i);
4411 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4412 if (new_op != op)
4414 if (!new_tree)
4415 new_tree = copy_node (exp);
4416 TREE_OPERAND (new_tree, i) = new_op;
4420 if (new_tree)
4422 new_tree = fold (new_tree);
4423 if (TREE_CODE (new_tree) == CALL_EXPR)
4424 process_call_operands (new_tree);
4426 else
4427 return exp;
4429 break;
4431 default:
4432 gcc_unreachable ();
4435 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4437 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4438 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4440 return new_tree;
4443 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4444 for it within OBJ, a tree that is an object or a chain of references. */
4446 tree
4447 substitute_placeholder_in_expr (tree exp, tree obj)
4449 enum tree_code code = TREE_CODE (exp);
4450 tree op0, op1, op2, op3;
4451 tree new_tree;
4453 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4454 in the chain of OBJ. */
4455 if (code == PLACEHOLDER_EXPR)
4457 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4458 tree elt;
4460 for (elt = obj; elt != 0;
4461 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4462 || TREE_CODE (elt) == COND_EXPR)
4463 ? TREE_OPERAND (elt, 1)
4464 : (REFERENCE_CLASS_P (elt)
4465 || UNARY_CLASS_P (elt)
4466 || BINARY_CLASS_P (elt)
4467 || VL_EXP_CLASS_P (elt)
4468 || EXPRESSION_CLASS_P (elt))
4469 ? TREE_OPERAND (elt, 0) : 0))
4470 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4471 return elt;
4473 for (elt = obj; elt != 0;
4474 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4475 || TREE_CODE (elt) == COND_EXPR)
4476 ? TREE_OPERAND (elt, 1)
4477 : (REFERENCE_CLASS_P (elt)
4478 || UNARY_CLASS_P (elt)
4479 || BINARY_CLASS_P (elt)
4480 || VL_EXP_CLASS_P (elt)
4481 || EXPRESSION_CLASS_P (elt))
4482 ? TREE_OPERAND (elt, 0) : 0))
4483 if (POINTER_TYPE_P (TREE_TYPE (elt))
4484 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4485 == need_type))
4486 return fold_build1 (INDIRECT_REF, need_type, elt);
4488 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4489 survives until RTL generation, there will be an error. */
4490 return exp;
4493 /* TREE_LIST is special because we need to look at TREE_VALUE
4494 and TREE_CHAIN, not TREE_OPERANDS. */
4495 else if (code == TREE_LIST)
4497 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4498 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4499 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4500 return exp;
4502 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4504 else
4505 switch (TREE_CODE_CLASS (code))
4507 case tcc_constant:
4508 case tcc_declaration:
4509 return exp;
4511 case tcc_exceptional:
4512 case tcc_unary:
4513 case tcc_binary:
4514 case tcc_comparison:
4515 case tcc_expression:
4516 case tcc_reference:
4517 case tcc_statement:
4518 switch (TREE_CODE_LENGTH (code))
4520 case 0:
4521 return exp;
4523 case 1:
4524 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4525 if (op0 == TREE_OPERAND (exp, 0))
4526 return exp;
4528 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4529 break;
4531 case 2:
4532 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4533 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4535 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4536 return exp;
4538 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4539 break;
4541 case 3:
4542 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4543 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4544 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4546 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4547 && op2 == TREE_OPERAND (exp, 2))
4548 return exp;
4550 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4551 break;
4553 case 4:
4554 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4555 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4556 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4557 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4559 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4560 && op2 == TREE_OPERAND (exp, 2)
4561 && op3 == TREE_OPERAND (exp, 3))
4562 return exp;
4564 new_tree
4565 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4566 break;
4568 default:
4569 gcc_unreachable ();
4571 break;
4573 case tcc_vl_exp:
4575 int i;
4577 new_tree = NULL_TREE;
4579 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4581 tree op = TREE_OPERAND (exp, i);
4582 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4583 if (new_op != op)
4585 if (!new_tree)
4586 new_tree = copy_node (exp);
4587 TREE_OPERAND (new_tree, i) = new_op;
4591 if (new_tree)
4593 new_tree = fold (new_tree);
4594 if (TREE_CODE (new_tree) == CALL_EXPR)
4595 process_call_operands (new_tree);
4597 else
4598 return exp;
4600 break;
4602 default:
4603 gcc_unreachable ();
4606 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4608 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4609 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4611 return new_tree;
4615 /* Subroutine of stabilize_reference; this is called for subtrees of
4616 references. Any expression with side-effects must be put in a SAVE_EXPR
4617 to ensure that it is only evaluated once.
4619 We don't put SAVE_EXPR nodes around everything, because assigning very
4620 simple expressions to temporaries causes us to miss good opportunities
4621 for optimizations. Among other things, the opportunity to fold in the
4622 addition of a constant into an addressing mode often gets lost, e.g.
4623 "y[i+1] += x;". In general, we take the approach that we should not make
4624 an assignment unless we are forced into it - i.e., that any non-side effect
4625 operator should be allowed, and that cse should take care of coalescing
4626 multiple utterances of the same expression should that prove fruitful. */
4628 static tree
4629 stabilize_reference_1 (tree e)
4631 tree result;
4632 enum tree_code code = TREE_CODE (e);
4634 /* We cannot ignore const expressions because it might be a reference
4635 to a const array but whose index contains side-effects. But we can
4636 ignore things that are actual constant or that already have been
4637 handled by this function. */
4639 if (tree_invariant_p (e))
4640 return e;
4642 switch (TREE_CODE_CLASS (code))
4644 case tcc_exceptional:
4645 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4646 have side-effects. */
4647 if (code == STATEMENT_LIST)
4648 return save_expr (e);
4649 /* FALLTHRU */
4650 case tcc_type:
4651 case tcc_declaration:
4652 case tcc_comparison:
4653 case tcc_statement:
4654 case tcc_expression:
4655 case tcc_reference:
4656 case tcc_vl_exp:
4657 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4658 so that it will only be evaluated once. */
4659 /* The reference (r) and comparison (<) classes could be handled as
4660 below, but it is generally faster to only evaluate them once. */
4661 if (TREE_SIDE_EFFECTS (e))
4662 return save_expr (e);
4663 return e;
4665 case tcc_constant:
4666 /* Constants need no processing. In fact, we should never reach
4667 here. */
4668 return e;
4670 case tcc_binary:
4671 /* Division is slow and tends to be compiled with jumps,
4672 especially the division by powers of 2 that is often
4673 found inside of an array reference. So do it just once. */
4674 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4675 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4676 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4677 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4678 return save_expr (e);
4679 /* Recursively stabilize each operand. */
4680 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4681 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4682 break;
4684 case tcc_unary:
4685 /* Recursively stabilize each operand. */
4686 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4687 break;
4689 default:
4690 gcc_unreachable ();
4693 TREE_TYPE (result) = TREE_TYPE (e);
4694 TREE_READONLY (result) = TREE_READONLY (e);
4695 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4696 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4698 return result;
4701 /* Stabilize a reference so that we can use it any number of times
4702 without causing its operands to be evaluated more than once.
4703 Returns the stabilized reference. This works by means of save_expr,
4704 so see the caveats in the comments about save_expr.
4706 Also allows conversion expressions whose operands are references.
4707 Any other kind of expression is returned unchanged. */
4709 tree
4710 stabilize_reference (tree ref)
4712 tree result;
4713 enum tree_code code = TREE_CODE (ref);
4715 switch (code)
4717 case VAR_DECL:
4718 case PARM_DECL:
4719 case RESULT_DECL:
4720 /* No action is needed in this case. */
4721 return ref;
4723 CASE_CONVERT:
4724 case FLOAT_EXPR:
4725 case FIX_TRUNC_EXPR:
4726 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4727 break;
4729 case INDIRECT_REF:
4730 result = build_nt (INDIRECT_REF,
4731 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4732 break;
4734 case COMPONENT_REF:
4735 result = build_nt (COMPONENT_REF,
4736 stabilize_reference (TREE_OPERAND (ref, 0)),
4737 TREE_OPERAND (ref, 1), NULL_TREE);
4738 break;
4740 case BIT_FIELD_REF:
4741 result = build_nt (BIT_FIELD_REF,
4742 stabilize_reference (TREE_OPERAND (ref, 0)),
4743 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4744 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4745 break;
4747 case ARRAY_REF:
4748 result = build_nt (ARRAY_REF,
4749 stabilize_reference (TREE_OPERAND (ref, 0)),
4750 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4751 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4752 break;
4754 case ARRAY_RANGE_REF:
4755 result = build_nt (ARRAY_RANGE_REF,
4756 stabilize_reference (TREE_OPERAND (ref, 0)),
4757 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4758 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4759 break;
4761 case COMPOUND_EXPR:
4762 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4763 it wouldn't be ignored. This matters when dealing with
4764 volatiles. */
4765 return stabilize_reference_1 (ref);
4767 /* If arg isn't a kind of lvalue we recognize, make no change.
4768 Caller should recognize the error for an invalid lvalue. */
4769 default:
4770 return ref;
4772 case ERROR_MARK:
4773 return error_mark_node;
4776 TREE_TYPE (result) = TREE_TYPE (ref);
4777 TREE_READONLY (result) = TREE_READONLY (ref);
4778 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4779 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4781 return result;
4784 /* Low-level constructors for expressions. */
4786 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4787 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4789 void
4790 recompute_tree_invariant_for_addr_expr (tree t)
4792 tree node;
4793 bool tc = true, se = false;
4795 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4797 /* We started out assuming this address is both invariant and constant, but
4798 does not have side effects. Now go down any handled components and see if
4799 any of them involve offsets that are either non-constant or non-invariant.
4800 Also check for side-effects.
4802 ??? Note that this code makes no attempt to deal with the case where
4803 taking the address of something causes a copy due to misalignment. */
4805 #define UPDATE_FLAGS(NODE) \
4806 do { tree _node = (NODE); \
4807 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4808 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4810 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4811 node = TREE_OPERAND (node, 0))
4813 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4814 array reference (probably made temporarily by the G++ front end),
4815 so ignore all the operands. */
4816 if ((TREE_CODE (node) == ARRAY_REF
4817 || TREE_CODE (node) == ARRAY_RANGE_REF)
4818 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4820 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4821 if (TREE_OPERAND (node, 2))
4822 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4823 if (TREE_OPERAND (node, 3))
4824 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4826 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4827 FIELD_DECL, apparently. The G++ front end can put something else
4828 there, at least temporarily. */
4829 else if (TREE_CODE (node) == COMPONENT_REF
4830 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4832 if (TREE_OPERAND (node, 2))
4833 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4837 node = lang_hooks.expr_to_decl (node, &tc, &se);
4839 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4840 the address, since &(*a)->b is a form of addition. If it's a constant, the
4841 address is constant too. If it's a decl, its address is constant if the
4842 decl is static. Everything else is not constant and, furthermore,
4843 taking the address of a volatile variable is not volatile. */
4844 if (TREE_CODE (node) == INDIRECT_REF
4845 || TREE_CODE (node) == MEM_REF)
4846 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4847 else if (CONSTANT_CLASS_P (node))
4849 else if (DECL_P (node))
4850 tc &= (staticp (node) != NULL_TREE);
4851 else
4853 tc = false;
4854 se |= TREE_SIDE_EFFECTS (node);
4858 TREE_CONSTANT (t) = tc;
4859 TREE_SIDE_EFFECTS (t) = se;
4860 #undef UPDATE_FLAGS
4863 /* Build an expression of code CODE, data type TYPE, and operands as
4864 specified. Expressions and reference nodes can be created this way.
4865 Constants, decls, types and misc nodes cannot be.
4867 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4868 enough for all extant tree codes. */
4870 tree
4871 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4873 tree t;
4875 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4877 t = make_node (code PASS_MEM_STAT);
4878 TREE_TYPE (t) = tt;
4880 return t;
4883 tree
4884 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4886 int length = sizeof (struct tree_exp);
4887 tree t;
4889 record_node_allocation_statistics (code, length);
4891 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4893 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4895 memset (t, 0, sizeof (struct tree_common));
4897 TREE_SET_CODE (t, code);
4899 TREE_TYPE (t) = type;
4900 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4901 TREE_OPERAND (t, 0) = node;
4902 if (node && !TYPE_P (node))
4904 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4905 TREE_READONLY (t) = TREE_READONLY (node);
4908 if (TREE_CODE_CLASS (code) == tcc_statement)
4910 if (code != DEBUG_BEGIN_STMT)
4911 TREE_SIDE_EFFECTS (t) = 1;
4913 else switch (code)
4915 case VA_ARG_EXPR:
4916 /* All of these have side-effects, no matter what their
4917 operands are. */
4918 TREE_SIDE_EFFECTS (t) = 1;
4919 TREE_READONLY (t) = 0;
4920 break;
4922 case INDIRECT_REF:
4923 /* Whether a dereference is readonly has nothing to do with whether
4924 its operand is readonly. */
4925 TREE_READONLY (t) = 0;
4926 break;
4928 case ADDR_EXPR:
4929 if (node)
4930 recompute_tree_invariant_for_addr_expr (t);
4931 break;
4933 default:
4934 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4935 && node && !TYPE_P (node)
4936 && TREE_CONSTANT (node))
4937 TREE_CONSTANT (t) = 1;
4938 if (TREE_CODE_CLASS (code) == tcc_reference
4939 && node && TREE_THIS_VOLATILE (node))
4940 TREE_THIS_VOLATILE (t) = 1;
4941 break;
4944 return t;
4947 #define PROCESS_ARG(N) \
4948 do { \
4949 TREE_OPERAND (t, N) = arg##N; \
4950 if (arg##N &&!TYPE_P (arg##N)) \
4952 if (TREE_SIDE_EFFECTS (arg##N)) \
4953 side_effects = 1; \
4954 if (!TREE_READONLY (arg##N) \
4955 && !CONSTANT_CLASS_P (arg##N)) \
4956 (void) (read_only = 0); \
4957 if (!TREE_CONSTANT (arg##N)) \
4958 (void) (constant = 0); \
4960 } while (0)
4962 tree
4963 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4965 bool constant, read_only, side_effects, div_by_zero;
4966 tree t;
4968 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4970 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4971 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4972 /* When sizetype precision doesn't match that of pointers
4973 we need to be able to build explicit extensions or truncations
4974 of the offset argument. */
4975 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4976 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4977 && TREE_CODE (arg1) == INTEGER_CST);
4979 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4980 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4981 && ptrofftype_p (TREE_TYPE (arg1)));
4983 t = make_node (code PASS_MEM_STAT);
4984 TREE_TYPE (t) = tt;
4986 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4987 result based on those same flags for the arguments. But if the
4988 arguments aren't really even `tree' expressions, we shouldn't be trying
4989 to do this. */
4991 /* Expressions without side effects may be constant if their
4992 arguments are as well. */
4993 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4994 || TREE_CODE_CLASS (code) == tcc_binary);
4995 read_only = 1;
4996 side_effects = TREE_SIDE_EFFECTS (t);
4998 switch (code)
5000 case TRUNC_DIV_EXPR:
5001 case CEIL_DIV_EXPR:
5002 case FLOOR_DIV_EXPR:
5003 case ROUND_DIV_EXPR:
5004 case EXACT_DIV_EXPR:
5005 case CEIL_MOD_EXPR:
5006 case FLOOR_MOD_EXPR:
5007 case ROUND_MOD_EXPR:
5008 case TRUNC_MOD_EXPR:
5009 div_by_zero = integer_zerop (arg1);
5010 break;
5011 default:
5012 div_by_zero = false;
5015 PROCESS_ARG (0);
5016 PROCESS_ARG (1);
5018 TREE_SIDE_EFFECTS (t) = side_effects;
5019 if (code == MEM_REF)
5021 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5023 tree o = TREE_OPERAND (arg0, 0);
5024 TREE_READONLY (t) = TREE_READONLY (o);
5025 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5028 else
5030 TREE_READONLY (t) = read_only;
5031 /* Don't mark X / 0 as constant. */
5032 TREE_CONSTANT (t) = constant && !div_by_zero;
5033 TREE_THIS_VOLATILE (t)
5034 = (TREE_CODE_CLASS (code) == tcc_reference
5035 && arg0 && TREE_THIS_VOLATILE (arg0));
5038 return t;
5042 tree
5043 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
5044 tree arg2 MEM_STAT_DECL)
5046 bool constant, read_only, side_effects;
5047 tree t;
5049 gcc_assert (TREE_CODE_LENGTH (code) == 3);
5050 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5052 t = make_node (code PASS_MEM_STAT);
5053 TREE_TYPE (t) = tt;
5055 read_only = 1;
5057 /* As a special exception, if COND_EXPR has NULL branches, we
5058 assume that it is a gimple statement and always consider
5059 it to have side effects. */
5060 if (code == COND_EXPR
5061 && tt == void_type_node
5062 && arg1 == NULL_TREE
5063 && arg2 == NULL_TREE)
5064 side_effects = true;
5065 else
5066 side_effects = TREE_SIDE_EFFECTS (t);
5068 PROCESS_ARG (0);
5069 PROCESS_ARG (1);
5070 PROCESS_ARG (2);
5072 if (code == COND_EXPR)
5073 TREE_READONLY (t) = read_only;
5075 TREE_SIDE_EFFECTS (t) = side_effects;
5076 TREE_THIS_VOLATILE (t)
5077 = (TREE_CODE_CLASS (code) == tcc_reference
5078 && arg0 && TREE_THIS_VOLATILE (arg0));
5080 return t;
5083 tree
5084 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
5085 tree arg2, tree arg3 MEM_STAT_DECL)
5087 bool constant, read_only, side_effects;
5088 tree t;
5090 gcc_assert (TREE_CODE_LENGTH (code) == 4);
5092 t = make_node (code PASS_MEM_STAT);
5093 TREE_TYPE (t) = tt;
5095 side_effects = TREE_SIDE_EFFECTS (t);
5097 PROCESS_ARG (0);
5098 PROCESS_ARG (1);
5099 PROCESS_ARG (2);
5100 PROCESS_ARG (3);
5102 TREE_SIDE_EFFECTS (t) = side_effects;
5103 TREE_THIS_VOLATILE (t)
5104 = (TREE_CODE_CLASS (code) == tcc_reference
5105 && arg0 && TREE_THIS_VOLATILE (arg0));
5107 return t;
5110 tree
5111 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
5112 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
5114 bool constant, read_only, side_effects;
5115 tree t;
5117 gcc_assert (TREE_CODE_LENGTH (code) == 5);
5119 t = make_node (code PASS_MEM_STAT);
5120 TREE_TYPE (t) = tt;
5122 side_effects = TREE_SIDE_EFFECTS (t);
5124 PROCESS_ARG (0);
5125 PROCESS_ARG (1);
5126 PROCESS_ARG (2);
5127 PROCESS_ARG (3);
5128 PROCESS_ARG (4);
5130 TREE_SIDE_EFFECTS (t) = side_effects;
5131 if (code == TARGET_MEM_REF)
5133 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5135 tree o = TREE_OPERAND (arg0, 0);
5136 TREE_READONLY (t) = TREE_READONLY (o);
5137 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5140 else
5141 TREE_THIS_VOLATILE (t)
5142 = (TREE_CODE_CLASS (code) == tcc_reference
5143 && arg0 && TREE_THIS_VOLATILE (arg0));
5145 return t;
5148 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
5149 on the pointer PTR. */
5151 tree
5152 build_simple_mem_ref_loc (location_t loc, tree ptr)
5154 poly_int64 offset = 0;
5155 tree ptype = TREE_TYPE (ptr);
5156 tree tem;
5157 /* For convenience allow addresses that collapse to a simple base
5158 and offset. */
5159 if (TREE_CODE (ptr) == ADDR_EXPR
5160 && (handled_component_p (TREE_OPERAND (ptr, 0))
5161 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
5163 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
5164 gcc_assert (ptr);
5165 if (TREE_CODE (ptr) == MEM_REF)
5167 offset += mem_ref_offset (ptr).force_shwi ();
5168 ptr = TREE_OPERAND (ptr, 0);
5170 else
5171 ptr = build_fold_addr_expr (ptr);
5172 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
5174 tem = build2 (MEM_REF, TREE_TYPE (ptype),
5175 ptr, build_int_cst (ptype, offset));
5176 SET_EXPR_LOCATION (tem, loc);
5177 return tem;
5180 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
5182 poly_offset_int
5183 mem_ref_offset (const_tree t)
5185 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
5186 SIGNED);
5189 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
5190 offsetted by OFFSET units. */
5192 tree
5193 build_invariant_address (tree type, tree base, poly_int64 offset)
5195 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
5196 build_fold_addr_expr (base),
5197 build_int_cst (ptr_type_node, offset));
5198 tree addr = build1 (ADDR_EXPR, type, ref);
5199 recompute_tree_invariant_for_addr_expr (addr);
5200 return addr;
5203 /* Similar except don't specify the TREE_TYPE
5204 and leave the TREE_SIDE_EFFECTS as 0.
5205 It is permissible for arguments to be null,
5206 or even garbage if their values do not matter. */
5208 tree
5209 build_nt (enum tree_code code, ...)
5211 tree t;
5212 int length;
5213 int i;
5214 va_list p;
5216 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5218 va_start (p, code);
5220 t = make_node (code);
5221 length = TREE_CODE_LENGTH (code);
5223 for (i = 0; i < length; i++)
5224 TREE_OPERAND (t, i) = va_arg (p, tree);
5226 va_end (p);
5227 return t;
5230 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5231 tree vec. */
5233 tree
5234 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
5236 tree ret, t;
5237 unsigned int ix;
5239 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
5240 CALL_EXPR_FN (ret) = fn;
5241 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
5242 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
5243 CALL_EXPR_ARG (ret, ix) = t;
5244 return ret;
5247 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5248 and data type TYPE.
5249 We do NOT enter this node in any sort of symbol table.
5251 LOC is the location of the decl.
5253 layout_decl is used to set up the decl's storage layout.
5254 Other slots are initialized to 0 or null pointers. */
5256 tree
5257 build_decl (location_t loc, enum tree_code code, tree name,
5258 tree type MEM_STAT_DECL)
5260 tree t;
5262 t = make_node (code PASS_MEM_STAT);
5263 DECL_SOURCE_LOCATION (t) = loc;
5265 /* if (type == error_mark_node)
5266 type = integer_type_node; */
5267 /* That is not done, deliberately, so that having error_mark_node
5268 as the type can suppress useless errors in the use of this variable. */
5270 DECL_NAME (t) = name;
5271 TREE_TYPE (t) = type;
5273 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
5274 layout_decl (t, 0);
5276 return t;
5279 /* Builds and returns function declaration with NAME and TYPE. */
5281 tree
5282 build_fn_decl (const char *name, tree type)
5284 tree id = get_identifier (name);
5285 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
5287 DECL_EXTERNAL (decl) = 1;
5288 TREE_PUBLIC (decl) = 1;
5289 DECL_ARTIFICIAL (decl) = 1;
5290 TREE_NOTHROW (decl) = 1;
5292 return decl;
5295 vec<tree, va_gc> *all_translation_units;
5297 /* Builds a new translation-unit decl with name NAME, queues it in the
5298 global list of translation-unit decls and returns it. */
5300 tree
5301 build_translation_unit_decl (tree name)
5303 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5304 name, NULL_TREE);
5305 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5306 vec_safe_push (all_translation_units, tu);
5307 return tu;
5311 /* BLOCK nodes are used to represent the structure of binding contours
5312 and declarations, once those contours have been exited and their contents
5313 compiled. This information is used for outputting debugging info. */
5315 tree
5316 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5318 tree block = make_node (BLOCK);
5320 BLOCK_VARS (block) = vars;
5321 BLOCK_SUBBLOCKS (block) = subblocks;
5322 BLOCK_SUPERCONTEXT (block) = supercontext;
5323 BLOCK_CHAIN (block) = chain;
5324 return block;
5328 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5330 LOC is the location to use in tree T. */
5332 void
5333 protected_set_expr_location (tree t, location_t loc)
5335 if (CAN_HAVE_LOCATION_P (t))
5336 SET_EXPR_LOCATION (t, loc);
5337 else if (t && TREE_CODE (t) == STATEMENT_LIST)
5339 t = expr_single (t);
5340 if (t && CAN_HAVE_LOCATION_P (t))
5341 SET_EXPR_LOCATION (t, loc);
5345 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
5346 UNKNOWN_LOCATION. */
5348 void
5349 protected_set_expr_location_if_unset (tree t, location_t loc)
5351 t = expr_single (t);
5352 if (t && !EXPR_HAS_LOCATION (t))
5353 protected_set_expr_location (t, loc);
5356 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5357 of the various TYPE_QUAL values. */
5359 static void
5360 set_type_quals (tree type, int type_quals)
5362 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
5363 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
5364 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
5365 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
5366 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
5369 /* Returns true iff CAND and BASE have equivalent language-specific
5370 qualifiers. */
5372 bool
5373 check_lang_type (const_tree cand, const_tree base)
5375 if (lang_hooks.types.type_hash_eq == NULL)
5376 return true;
5377 /* type_hash_eq currently only applies to these types. */
5378 if (TREE_CODE (cand) != FUNCTION_TYPE
5379 && TREE_CODE (cand) != METHOD_TYPE)
5380 return true;
5381 return lang_hooks.types.type_hash_eq (cand, base);
5384 /* This function checks to see if TYPE matches the size one of the built-in
5385 atomic types, and returns that core atomic type. */
5387 static tree
5388 find_atomic_core_type (const_tree type)
5390 tree base_atomic_type;
5392 /* Only handle complete types. */
5393 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
5394 return NULL_TREE;
5396 switch (tree_to_uhwi (TYPE_SIZE (type)))
5398 case 8:
5399 base_atomic_type = atomicQI_type_node;
5400 break;
5402 case 16:
5403 base_atomic_type = atomicHI_type_node;
5404 break;
5406 case 32:
5407 base_atomic_type = atomicSI_type_node;
5408 break;
5410 case 64:
5411 base_atomic_type = atomicDI_type_node;
5412 break;
5414 case 128:
5415 base_atomic_type = atomicTI_type_node;
5416 break;
5418 default:
5419 base_atomic_type = NULL_TREE;
5422 return base_atomic_type;
5425 /* Returns true iff unqualified CAND and BASE are equivalent. */
5427 bool
5428 check_base_type (const_tree cand, const_tree base)
5430 if (TYPE_NAME (cand) != TYPE_NAME (base)
5431 /* Apparently this is needed for Objective-C. */
5432 || TYPE_CONTEXT (cand) != TYPE_CONTEXT (base)
5433 || !attribute_list_equal (TYPE_ATTRIBUTES (cand),
5434 TYPE_ATTRIBUTES (base)))
5435 return false;
5436 /* Check alignment. */
5437 if (TYPE_ALIGN (cand) == TYPE_ALIGN (base)
5438 && TYPE_USER_ALIGN (cand) == TYPE_USER_ALIGN (base))
5439 return true;
5440 /* Atomic types increase minimal alignment. We must to do so as well
5441 or we get duplicated canonical types. See PR88686. */
5442 if ((TYPE_QUALS (cand) & TYPE_QUAL_ATOMIC))
5444 /* See if this object can map to a basic atomic type. */
5445 tree atomic_type = find_atomic_core_type (cand);
5446 if (atomic_type && TYPE_ALIGN (atomic_type) == TYPE_ALIGN (cand))
5447 return true;
5449 return false;
5452 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5454 bool
5455 check_qualified_type (const_tree cand, const_tree base, int type_quals)
5457 return (TYPE_QUALS (cand) == type_quals
5458 && check_base_type (cand, base)
5459 && check_lang_type (cand, base));
5462 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
5464 static bool
5465 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
5467 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
5468 && TYPE_NAME (cand) == TYPE_NAME (base)
5469 /* Apparently this is needed for Objective-C. */
5470 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5471 /* Check alignment. */
5472 && TYPE_ALIGN (cand) == align
5473 /* Check this is a user-aligned type as build_aligned_type
5474 would create. */
5475 && TYPE_USER_ALIGN (cand)
5476 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5477 TYPE_ATTRIBUTES (base))
5478 && check_lang_type (cand, base));
5481 /* Return a version of the TYPE, qualified as indicated by the
5482 TYPE_QUALS, if one exists. If no qualified version exists yet,
5483 return NULL_TREE. */
5485 tree
5486 get_qualified_type (tree type, int type_quals)
5488 if (TYPE_QUALS (type) == type_quals)
5489 return type;
5491 tree mv = TYPE_MAIN_VARIANT (type);
5492 if (check_qualified_type (mv, type, type_quals))
5493 return mv;
5495 /* Search the chain of variants to see if there is already one there just
5496 like the one we need to have. If so, use that existing one. We must
5497 preserve the TYPE_NAME, since there is code that depends on this. */
5498 for (tree *tp = &TYPE_NEXT_VARIANT (mv); *tp; tp = &TYPE_NEXT_VARIANT (*tp))
5499 if (check_qualified_type (*tp, type, type_quals))
5501 /* Put the found variant at the head of the variant list so
5502 frequently searched variants get found faster. The C++ FE
5503 benefits greatly from this. */
5504 tree t = *tp;
5505 *tp = TYPE_NEXT_VARIANT (t);
5506 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
5507 TYPE_NEXT_VARIANT (mv) = t;
5508 return t;
5511 return NULL_TREE;
5514 /* Like get_qualified_type, but creates the type if it does not
5515 exist. This function never returns NULL_TREE. */
5517 tree
5518 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
5520 tree t;
5522 /* See if we already have the appropriate qualified variant. */
5523 t = get_qualified_type (type, type_quals);
5525 /* If not, build it. */
5526 if (!t)
5528 t = build_variant_type_copy (type PASS_MEM_STAT);
5529 set_type_quals (t, type_quals);
5531 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
5533 /* See if this object can map to a basic atomic type. */
5534 tree atomic_type = find_atomic_core_type (type);
5535 if (atomic_type)
5537 /* Ensure the alignment of this type is compatible with
5538 the required alignment of the atomic type. */
5539 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
5540 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
5544 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5545 /* Propagate structural equality. */
5546 SET_TYPE_STRUCTURAL_EQUALITY (t);
5547 else if (TYPE_CANONICAL (type) != type)
5548 /* Build the underlying canonical type, since it is different
5549 from TYPE. */
5551 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
5552 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
5554 else
5555 /* T is its own canonical type. */
5556 TYPE_CANONICAL (t) = t;
5560 return t;
5563 /* Create a variant of type T with alignment ALIGN. */
5565 tree
5566 build_aligned_type (tree type, unsigned int align)
5568 tree t;
5570 if (TYPE_PACKED (type)
5571 || TYPE_ALIGN (type) == align)
5572 return type;
5574 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5575 if (check_aligned_type (t, type, align))
5576 return t;
5578 t = build_variant_type_copy (type);
5579 SET_TYPE_ALIGN (t, align);
5580 TYPE_USER_ALIGN (t) = 1;
5582 return t;
5585 /* Create a new distinct copy of TYPE. The new type is made its own
5586 MAIN_VARIANT. If TYPE requires structural equality checks, the
5587 resulting type requires structural equality checks; otherwise, its
5588 TYPE_CANONICAL points to itself. */
5590 tree
5591 build_distinct_type_copy (tree type MEM_STAT_DECL)
5593 tree t = copy_node (type PASS_MEM_STAT);
5595 TYPE_POINTER_TO (t) = 0;
5596 TYPE_REFERENCE_TO (t) = 0;
5598 /* Set the canonical type either to a new equivalence class, or
5599 propagate the need for structural equality checks. */
5600 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5601 SET_TYPE_STRUCTURAL_EQUALITY (t);
5602 else
5603 TYPE_CANONICAL (t) = t;
5605 /* Make it its own variant. */
5606 TYPE_MAIN_VARIANT (t) = t;
5607 TYPE_NEXT_VARIANT (t) = 0;
5609 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
5610 whose TREE_TYPE is not t. This can also happen in the Ada
5611 frontend when using subtypes. */
5613 return t;
5616 /* Create a new variant of TYPE, equivalent but distinct. This is so
5617 the caller can modify it. TYPE_CANONICAL for the return type will
5618 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
5619 are considered equal by the language itself (or that both types
5620 require structural equality checks). */
5622 tree
5623 build_variant_type_copy (tree type MEM_STAT_DECL)
5625 tree t, m = TYPE_MAIN_VARIANT (type);
5627 t = build_distinct_type_copy (type PASS_MEM_STAT);
5629 /* Since we're building a variant, assume that it is a non-semantic
5630 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
5631 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
5632 /* Type variants have no alias set defined. */
5633 TYPE_ALIAS_SET (t) = -1;
5635 /* Add the new type to the chain of variants of TYPE. */
5636 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
5637 TYPE_NEXT_VARIANT (m) = t;
5638 TYPE_MAIN_VARIANT (t) = m;
5640 return t;
5643 /* Return true if the from tree in both tree maps are equal. */
5646 tree_map_base_eq (const void *va, const void *vb)
5648 const struct tree_map_base *const a = (const struct tree_map_base *) va,
5649 *const b = (const struct tree_map_base *) vb;
5650 return (a->from == b->from);
5653 /* Hash a from tree in a tree_base_map. */
5655 unsigned int
5656 tree_map_base_hash (const void *item)
5658 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
5661 /* Return true if this tree map structure is marked for garbage collection
5662 purposes. We simply return true if the from tree is marked, so that this
5663 structure goes away when the from tree goes away. */
5666 tree_map_base_marked_p (const void *p)
5668 return ggc_marked_p (((const struct tree_map_base *) p)->from);
5671 /* Hash a from tree in a tree_map. */
5673 unsigned int
5674 tree_map_hash (const void *item)
5676 return (((const struct tree_map *) item)->hash);
5679 /* Hash a from tree in a tree_decl_map. */
5681 unsigned int
5682 tree_decl_map_hash (const void *item)
5684 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
5687 /* Return the initialization priority for DECL. */
5689 priority_type
5690 decl_init_priority_lookup (tree decl)
5692 symtab_node *snode = symtab_node::get (decl);
5694 if (!snode)
5695 return DEFAULT_INIT_PRIORITY;
5696 return
5697 snode->get_init_priority ();
5700 /* Return the finalization priority for DECL. */
5702 priority_type
5703 decl_fini_priority_lookup (tree decl)
5705 cgraph_node *node = cgraph_node::get (decl);
5707 if (!node)
5708 return DEFAULT_INIT_PRIORITY;
5709 return
5710 node->get_fini_priority ();
5713 /* Set the initialization priority for DECL to PRIORITY. */
5715 void
5716 decl_init_priority_insert (tree decl, priority_type priority)
5718 struct symtab_node *snode;
5720 if (priority == DEFAULT_INIT_PRIORITY)
5722 snode = symtab_node::get (decl);
5723 if (!snode)
5724 return;
5726 else if (VAR_P (decl))
5727 snode = varpool_node::get_create (decl);
5728 else
5729 snode = cgraph_node::get_create (decl);
5730 snode->set_init_priority (priority);
5733 /* Set the finalization priority for DECL to PRIORITY. */
5735 void
5736 decl_fini_priority_insert (tree decl, priority_type priority)
5738 struct cgraph_node *node;
5740 if (priority == DEFAULT_INIT_PRIORITY)
5742 node = cgraph_node::get (decl);
5743 if (!node)
5744 return;
5746 else
5747 node = cgraph_node::get_create (decl);
5748 node->set_fini_priority (priority);
5751 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
5753 static void
5754 print_debug_expr_statistics (void)
5756 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
5757 (long) debug_expr_for_decl->size (),
5758 (long) debug_expr_for_decl->elements (),
5759 debug_expr_for_decl->collisions ());
5762 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
5764 static void
5765 print_value_expr_statistics (void)
5767 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
5768 (long) value_expr_for_decl->size (),
5769 (long) value_expr_for_decl->elements (),
5770 value_expr_for_decl->collisions ());
5773 /* Lookup a debug expression for FROM, and return it if we find one. */
5775 tree
5776 decl_debug_expr_lookup (tree from)
5778 struct tree_decl_map *h, in;
5779 in.base.from = from;
5781 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5782 if (h)
5783 return h->to;
5784 return NULL_TREE;
5787 /* Insert a mapping FROM->TO in the debug expression hashtable. */
5789 void
5790 decl_debug_expr_insert (tree from, tree to)
5792 struct tree_decl_map *h;
5794 h = ggc_alloc<tree_decl_map> ();
5795 h->base.from = from;
5796 h->to = to;
5797 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5800 /* Lookup a value expression for FROM, and return it if we find one. */
5802 tree
5803 decl_value_expr_lookup (tree from)
5805 struct tree_decl_map *h, in;
5806 in.base.from = from;
5808 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5809 if (h)
5810 return h->to;
5811 return NULL_TREE;
5814 /* Insert a mapping FROM->TO in the value expression hashtable. */
5816 void
5817 decl_value_expr_insert (tree from, tree to)
5819 struct tree_decl_map *h;
5821 h = ggc_alloc<tree_decl_map> ();
5822 h->base.from = from;
5823 h->to = to;
5824 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5827 /* Lookup a vector of debug arguments for FROM, and return it if we
5828 find one. */
5830 vec<tree, va_gc> **
5831 decl_debug_args_lookup (tree from)
5833 struct tree_vec_map *h, in;
5835 if (!DECL_HAS_DEBUG_ARGS_P (from))
5836 return NULL;
5837 gcc_checking_assert (debug_args_for_decl != NULL);
5838 in.base.from = from;
5839 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
5840 if (h)
5841 return &h->to;
5842 return NULL;
5845 /* Insert a mapping FROM->empty vector of debug arguments in the value
5846 expression hashtable. */
5848 vec<tree, va_gc> **
5849 decl_debug_args_insert (tree from)
5851 struct tree_vec_map *h;
5852 tree_vec_map **loc;
5854 if (DECL_HAS_DEBUG_ARGS_P (from))
5855 return decl_debug_args_lookup (from);
5856 if (debug_args_for_decl == NULL)
5857 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
5858 h = ggc_alloc<tree_vec_map> ();
5859 h->base.from = from;
5860 h->to = NULL;
5861 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
5862 *loc = h;
5863 DECL_HAS_DEBUG_ARGS_P (from) = 1;
5864 return &h->to;
5867 /* Hashing of types so that we don't make duplicates.
5868 The entry point is `type_hash_canon'. */
5870 /* Generate the default hash code for TYPE. This is designed for
5871 speed, rather than maximum entropy. */
5873 hashval_t
5874 type_hash_canon_hash (tree type)
5876 inchash::hash hstate;
5878 hstate.add_int (TREE_CODE (type));
5880 if (TREE_TYPE (type))
5881 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
5883 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
5884 /* Just the identifier is adequate to distinguish. */
5885 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
5887 switch (TREE_CODE (type))
5889 case METHOD_TYPE:
5890 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
5891 /* FALLTHROUGH. */
5892 case FUNCTION_TYPE:
5893 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
5894 if (TREE_VALUE (t) != error_mark_node)
5895 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
5896 break;
5898 case OFFSET_TYPE:
5899 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
5900 break;
5902 case ARRAY_TYPE:
5904 if (TYPE_DOMAIN (type))
5905 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
5906 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
5908 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
5909 hstate.add_object (typeless);
5912 break;
5914 case INTEGER_TYPE:
5916 tree t = TYPE_MAX_VALUE (type);
5917 if (!t)
5918 t = TYPE_MIN_VALUE (type);
5919 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
5920 hstate.add_object (TREE_INT_CST_ELT (t, i));
5921 break;
5924 case REAL_TYPE:
5925 case FIXED_POINT_TYPE:
5927 unsigned prec = TYPE_PRECISION (type);
5928 hstate.add_object (prec);
5929 break;
5932 case VECTOR_TYPE:
5933 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
5934 break;
5936 default:
5937 break;
5940 return hstate.end ();
5943 /* These are the Hashtable callback functions. */
5945 /* Returns true iff the types are equivalent. */
5947 bool
5948 type_cache_hasher::equal (type_hash *a, type_hash *b)
5950 /* First test the things that are the same for all types. */
5951 if (a->hash != b->hash
5952 || TREE_CODE (a->type) != TREE_CODE (b->type)
5953 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
5954 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
5955 TYPE_ATTRIBUTES (b->type))
5956 || (TREE_CODE (a->type) != COMPLEX_TYPE
5957 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
5958 return 0;
5960 /* Be careful about comparing arrays before and after the element type
5961 has been completed; don't compare TYPE_ALIGN unless both types are
5962 complete. */
5963 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
5964 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
5965 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
5966 return 0;
5968 switch (TREE_CODE (a->type))
5970 case VOID_TYPE:
5971 case OPAQUE_TYPE:
5972 case COMPLEX_TYPE:
5973 case POINTER_TYPE:
5974 case REFERENCE_TYPE:
5975 case NULLPTR_TYPE:
5976 return 1;
5978 case VECTOR_TYPE:
5979 return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
5980 TYPE_VECTOR_SUBPARTS (b->type));
5982 case ENUMERAL_TYPE:
5983 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
5984 && !(TYPE_VALUES (a->type)
5985 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
5986 && TYPE_VALUES (b->type)
5987 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
5988 && type_list_equal (TYPE_VALUES (a->type),
5989 TYPE_VALUES (b->type))))
5990 return 0;
5992 /* fall through */
5994 case INTEGER_TYPE:
5995 case REAL_TYPE:
5996 case BOOLEAN_TYPE:
5997 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
5998 return false;
5999 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6000 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6001 TYPE_MAX_VALUE (b->type)))
6002 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6003 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6004 TYPE_MIN_VALUE (b->type))));
6006 case FIXED_POINT_TYPE:
6007 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6009 case OFFSET_TYPE:
6010 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6012 case METHOD_TYPE:
6013 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6014 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6015 || (TYPE_ARG_TYPES (a->type)
6016 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6017 && TYPE_ARG_TYPES (b->type)
6018 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6019 && type_list_equal (TYPE_ARG_TYPES (a->type),
6020 TYPE_ARG_TYPES (b->type)))))
6021 break;
6022 return 0;
6023 case ARRAY_TYPE:
6024 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6025 where the flag should be inherited from the element type
6026 and can change after ARRAY_TYPEs are created; on non-aggregates
6027 compare it and hash it, scalars will never have that flag set
6028 and we need to differentiate between arrays created by different
6029 front-ends or middle-end created arrays. */
6030 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
6031 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
6032 || (TYPE_TYPELESS_STORAGE (a->type)
6033 == TYPE_TYPELESS_STORAGE (b->type))));
6035 case RECORD_TYPE:
6036 case UNION_TYPE:
6037 case QUAL_UNION_TYPE:
6038 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6039 || (TYPE_FIELDS (a->type)
6040 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6041 && TYPE_FIELDS (b->type)
6042 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6043 && type_list_equal (TYPE_FIELDS (a->type),
6044 TYPE_FIELDS (b->type))));
6046 case FUNCTION_TYPE:
6047 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6048 || (TYPE_ARG_TYPES (a->type)
6049 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6050 && TYPE_ARG_TYPES (b->type)
6051 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6052 && type_list_equal (TYPE_ARG_TYPES (a->type),
6053 TYPE_ARG_TYPES (b->type))))
6054 break;
6055 return 0;
6057 default:
6058 return 0;
6061 if (lang_hooks.types.type_hash_eq != NULL)
6062 return lang_hooks.types.type_hash_eq (a->type, b->type);
6064 return 1;
6067 /* Given TYPE, and HASHCODE its hash code, return the canonical
6068 object for an identical type if one already exists.
6069 Otherwise, return TYPE, and record it as the canonical object.
6071 To use this function, first create a type of the sort you want.
6072 Then compute its hash code from the fields of the type that
6073 make it different from other similar types.
6074 Then call this function and use the value. */
6076 tree
6077 type_hash_canon (unsigned int hashcode, tree type)
6079 type_hash in;
6080 type_hash **loc;
6082 /* The hash table only contains main variants, so ensure that's what we're
6083 being passed. */
6084 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6086 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6087 must call that routine before comparing TYPE_ALIGNs. */
6088 layout_type (type);
6090 in.hash = hashcode;
6091 in.type = type;
6093 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6094 if (*loc)
6096 tree t1 = ((type_hash *) *loc)->type;
6097 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
6098 && t1 != type);
6099 if (TYPE_UID (type) + 1 == next_type_uid)
6100 --next_type_uid;
6101 /* Free also min/max values and the cache for integer
6102 types. This can't be done in free_node, as LTO frees
6103 those on its own. */
6104 if (TREE_CODE (type) == INTEGER_TYPE)
6106 if (TYPE_MIN_VALUE (type)
6107 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
6109 /* Zero is always in TYPE_CACHED_VALUES. */
6110 if (! TYPE_UNSIGNED (type))
6111 int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
6112 ggc_free (TYPE_MIN_VALUE (type));
6114 if (TYPE_MAX_VALUE (type)
6115 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
6117 int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
6118 ggc_free (TYPE_MAX_VALUE (type));
6120 if (TYPE_CACHED_VALUES_P (type))
6121 ggc_free (TYPE_CACHED_VALUES (type));
6123 free_node (type);
6124 return t1;
6126 else
6128 struct type_hash *h;
6130 h = ggc_alloc<type_hash> ();
6131 h->hash = hashcode;
6132 h->type = type;
6133 *loc = h;
6135 return type;
6139 static void
6140 print_type_hash_statistics (void)
6142 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6143 (long) type_hash_table->size (),
6144 (long) type_hash_table->elements (),
6145 type_hash_table->collisions ());
6148 /* Given two lists of types
6149 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6150 return 1 if the lists contain the same types in the same order.
6151 Also, the TREE_PURPOSEs must match. */
6153 bool
6154 type_list_equal (const_tree l1, const_tree l2)
6156 const_tree t1, t2;
6158 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6159 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6160 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6161 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6162 && (TREE_TYPE (TREE_PURPOSE (t1))
6163 == TREE_TYPE (TREE_PURPOSE (t2))))))
6164 return false;
6166 return t1 == t2;
6169 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6170 given by TYPE. If the argument list accepts variable arguments,
6171 then this function counts only the ordinary arguments. */
6174 type_num_arguments (const_tree fntype)
6176 int i = 0;
6178 for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
6179 /* If the function does not take a variable number of arguments,
6180 the last element in the list will have type `void'. */
6181 if (VOID_TYPE_P (TREE_VALUE (t)))
6182 break;
6183 else
6184 ++i;
6186 return i;
6189 /* Return the type of the function TYPE's argument ARGNO if known.
6190 For vararg function's where ARGNO refers to one of the variadic
6191 arguments return null. Otherwise, return a void_type_node for
6192 out-of-bounds ARGNO. */
6194 tree
6195 type_argument_type (const_tree fntype, unsigned argno)
6197 /* Treat zero the same as an out-of-bounds argument number. */
6198 if (!argno)
6199 return void_type_node;
6201 function_args_iterator iter;
6203 tree argtype;
6204 unsigned i = 1;
6205 FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
6207 /* A vararg function's argument list ends in a null. Otherwise,
6208 an ordinary function's argument list ends with void. Return
6209 null if ARGNO refers to a vararg argument, void_type_node if
6210 it's out of bounds, and the formal argument type otherwise. */
6211 if (!argtype)
6212 break;
6214 if (i == argno || VOID_TYPE_P (argtype))
6215 return argtype;
6217 ++i;
6220 return NULL_TREE;
6223 /* Nonzero if integer constants T1 and T2
6224 represent the same constant value. */
6227 tree_int_cst_equal (const_tree t1, const_tree t2)
6229 if (t1 == t2)
6230 return 1;
6232 if (t1 == 0 || t2 == 0)
6233 return 0;
6235 STRIP_ANY_LOCATION_WRAPPER (t1);
6236 STRIP_ANY_LOCATION_WRAPPER (t2);
6238 if (TREE_CODE (t1) == INTEGER_CST
6239 && TREE_CODE (t2) == INTEGER_CST
6240 && wi::to_widest (t1) == wi::to_widest (t2))
6241 return 1;
6243 return 0;
6246 /* Return true if T is an INTEGER_CST whose numerical value (extended
6247 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6249 bool
6250 tree_fits_shwi_p (const_tree t)
6252 return (t != NULL_TREE
6253 && TREE_CODE (t) == INTEGER_CST
6254 && wi::fits_shwi_p (wi::to_widest (t)));
6257 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6258 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
6260 bool
6261 tree_fits_poly_int64_p (const_tree t)
6263 if (t == NULL_TREE)
6264 return false;
6265 if (POLY_INT_CST_P (t))
6267 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6268 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
6269 return false;
6270 return true;
6272 return (TREE_CODE (t) == INTEGER_CST
6273 && wi::fits_shwi_p (wi::to_widest (t)));
6276 /* Return true if T is an INTEGER_CST whose numerical value (extended
6277 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6279 bool
6280 tree_fits_uhwi_p (const_tree t)
6282 return (t != NULL_TREE
6283 && TREE_CODE (t) == INTEGER_CST
6284 && wi::fits_uhwi_p (wi::to_widest (t)));
6287 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6288 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
6290 bool
6291 tree_fits_poly_uint64_p (const_tree t)
6293 if (t == NULL_TREE)
6294 return false;
6295 if (POLY_INT_CST_P (t))
6297 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6298 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
6299 return false;
6300 return true;
6302 return (TREE_CODE (t) == INTEGER_CST
6303 && wi::fits_uhwi_p (wi::to_widest (t)));
6306 /* T is an INTEGER_CST whose numerical value (extended according to
6307 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6308 HOST_WIDE_INT. */
6310 HOST_WIDE_INT
6311 tree_to_shwi (const_tree t)
6313 gcc_assert (tree_fits_shwi_p (t));
6314 return TREE_INT_CST_LOW (t);
6317 /* T is an INTEGER_CST whose numerical value (extended according to
6318 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6319 HOST_WIDE_INT. */
6321 unsigned HOST_WIDE_INT
6322 tree_to_uhwi (const_tree t)
6324 gcc_assert (tree_fits_uhwi_p (t));
6325 return TREE_INT_CST_LOW (t);
6328 /* Return the most significant (sign) bit of T. */
6331 tree_int_cst_sign_bit (const_tree t)
6333 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
6335 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
6338 /* Return an indication of the sign of the integer constant T.
6339 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6340 Note that -1 will never be returned if T's type is unsigned. */
6343 tree_int_cst_sgn (const_tree t)
6345 if (wi::to_wide (t) == 0)
6346 return 0;
6347 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
6348 return 1;
6349 else if (wi::neg_p (wi::to_wide (t)))
6350 return -1;
6351 else
6352 return 1;
6355 /* Return the minimum number of bits needed to represent VALUE in a
6356 signed or unsigned type, UNSIGNEDP says which. */
6358 unsigned int
6359 tree_int_cst_min_precision (tree value, signop sgn)
6361 /* If the value is negative, compute its negative minus 1. The latter
6362 adjustment is because the absolute value of the largest negative value
6363 is one larger than the largest positive value. This is equivalent to
6364 a bit-wise negation, so use that operation instead. */
6366 if (tree_int_cst_sgn (value) < 0)
6367 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
6369 /* Return the number of bits needed, taking into account the fact
6370 that we need one more bit for a signed than unsigned type.
6371 If value is 0 or -1, the minimum precision is 1 no matter
6372 whether unsignedp is true or false. */
6374 if (integer_zerop (value))
6375 return 1;
6376 else
6377 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
6380 /* Return truthvalue of whether T1 is the same tree structure as T2.
6381 Return 1 if they are the same.
6382 Return 0 if they are understandably different.
6383 Return -1 if either contains tree structure not understood by
6384 this function. */
6387 simple_cst_equal (const_tree t1, const_tree t2)
6389 enum tree_code code1, code2;
6390 int cmp;
6391 int i;
6393 if (t1 == t2)
6394 return 1;
6395 if (t1 == 0 || t2 == 0)
6396 return 0;
6398 /* For location wrappers to be the same, they must be at the same
6399 source location (and wrap the same thing). */
6400 if (location_wrapper_p (t1) && location_wrapper_p (t2))
6402 if (EXPR_LOCATION (t1) != EXPR_LOCATION (t2))
6403 return 0;
6404 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6407 code1 = TREE_CODE (t1);
6408 code2 = TREE_CODE (t2);
6410 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
6412 if (CONVERT_EXPR_CODE_P (code2)
6413 || code2 == NON_LVALUE_EXPR)
6414 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6415 else
6416 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
6419 else if (CONVERT_EXPR_CODE_P (code2)
6420 || code2 == NON_LVALUE_EXPR)
6421 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
6423 if (code1 != code2)
6424 return 0;
6426 switch (code1)
6428 case INTEGER_CST:
6429 return wi::to_widest (t1) == wi::to_widest (t2);
6431 case REAL_CST:
6432 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
6434 case FIXED_CST:
6435 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
6437 case STRING_CST:
6438 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
6439 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
6440 TREE_STRING_LENGTH (t1)));
6442 case CONSTRUCTOR:
6444 unsigned HOST_WIDE_INT idx;
6445 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
6446 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
6448 if (vec_safe_length (v1) != vec_safe_length (v2))
6449 return false;
6451 for (idx = 0; idx < vec_safe_length (v1); ++idx)
6452 /* ??? Should we handle also fields here? */
6453 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
6454 return false;
6455 return true;
6458 case SAVE_EXPR:
6459 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6461 case CALL_EXPR:
6462 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
6463 if (cmp <= 0)
6464 return cmp;
6465 if (call_expr_nargs (t1) != call_expr_nargs (t2))
6466 return 0;
6468 const_tree arg1, arg2;
6469 const_call_expr_arg_iterator iter1, iter2;
6470 for (arg1 = first_const_call_expr_arg (t1, &iter1),
6471 arg2 = first_const_call_expr_arg (t2, &iter2);
6472 arg1 && arg2;
6473 arg1 = next_const_call_expr_arg (&iter1),
6474 arg2 = next_const_call_expr_arg (&iter2))
6476 cmp = simple_cst_equal (arg1, arg2);
6477 if (cmp <= 0)
6478 return cmp;
6480 return arg1 == arg2;
6483 case TARGET_EXPR:
6484 /* Special case: if either target is an unallocated VAR_DECL,
6485 it means that it's going to be unified with whatever the
6486 TARGET_EXPR is really supposed to initialize, so treat it
6487 as being equivalent to anything. */
6488 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
6489 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
6490 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
6491 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
6492 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
6493 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
6494 cmp = 1;
6495 else
6496 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6498 if (cmp <= 0)
6499 return cmp;
6501 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
6503 case WITH_CLEANUP_EXPR:
6504 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6505 if (cmp <= 0)
6506 return cmp;
6508 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
6510 case COMPONENT_REF:
6511 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
6512 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6514 return 0;
6516 case VAR_DECL:
6517 case PARM_DECL:
6518 case CONST_DECL:
6519 case FUNCTION_DECL:
6520 return 0;
6522 default:
6523 if (POLY_INT_CST_P (t1))
6524 /* A false return means maybe_ne rather than known_ne. */
6525 return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
6526 TYPE_SIGN (TREE_TYPE (t1))),
6527 poly_widest_int::from (poly_int_cst_value (t2),
6528 TYPE_SIGN (TREE_TYPE (t2))));
6529 break;
6532 /* This general rule works for most tree codes. All exceptions should be
6533 handled above. If this is a language-specific tree code, we can't
6534 trust what might be in the operand, so say we don't know
6535 the situation. */
6536 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
6537 return -1;
6539 switch (TREE_CODE_CLASS (code1))
6541 case tcc_unary:
6542 case tcc_binary:
6543 case tcc_comparison:
6544 case tcc_expression:
6545 case tcc_reference:
6546 case tcc_statement:
6547 cmp = 1;
6548 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
6550 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
6551 if (cmp <= 0)
6552 return cmp;
6555 return cmp;
6557 default:
6558 return -1;
6562 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
6563 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
6564 than U, respectively. */
6567 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
6569 if (tree_int_cst_sgn (t) < 0)
6570 return -1;
6571 else if (!tree_fits_uhwi_p (t))
6572 return 1;
6573 else if (TREE_INT_CST_LOW (t) == u)
6574 return 0;
6575 else if (TREE_INT_CST_LOW (t) < u)
6576 return -1;
6577 else
6578 return 1;
6581 /* Return true if SIZE represents a constant size that is in bounds of
6582 what the middle-end and the backend accepts (covering not more than
6583 half of the address-space).
6584 When PERR is non-null, set *PERR on failure to the description of
6585 why SIZE is not valid. */
6587 bool
6588 valid_constant_size_p (const_tree size, cst_size_error *perr /* = NULL */)
6590 if (POLY_INT_CST_P (size))
6592 if (TREE_OVERFLOW (size))
6593 return false;
6594 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
6595 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
6596 return false;
6597 return true;
6600 cst_size_error error;
6601 if (!perr)
6602 perr = &error;
6604 if (TREE_CODE (size) != INTEGER_CST)
6606 *perr = cst_size_not_constant;
6607 return false;
6610 if (TREE_OVERFLOW_P (size))
6612 *perr = cst_size_overflow;
6613 return false;
6616 if (tree_int_cst_sgn (size) < 0)
6618 *perr = cst_size_negative;
6619 return false;
6621 if (!tree_fits_uhwi_p (size)
6622 || (wi::to_widest (TYPE_MAX_VALUE (sizetype))
6623 < wi::to_widest (size) * 2))
6625 *perr = cst_size_too_big;
6626 return false;
6629 return true;
6632 /* Return the precision of the type, or for a complex or vector type the
6633 precision of the type of its elements. */
6635 unsigned int
6636 element_precision (const_tree type)
6638 if (!TYPE_P (type))
6639 type = TREE_TYPE (type);
6640 enum tree_code code = TREE_CODE (type);
6641 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
6642 type = TREE_TYPE (type);
6644 return TYPE_PRECISION (type);
6647 /* Return true if CODE represents an associative tree code. Otherwise
6648 return false. */
6649 bool
6650 associative_tree_code (enum tree_code code)
6652 switch (code)
6654 case BIT_IOR_EXPR:
6655 case BIT_AND_EXPR:
6656 case BIT_XOR_EXPR:
6657 case PLUS_EXPR:
6658 case MULT_EXPR:
6659 case MIN_EXPR:
6660 case MAX_EXPR:
6661 return true;
6663 default:
6664 break;
6666 return false;
6669 /* Return true if CODE represents a commutative tree code. Otherwise
6670 return false. */
6671 bool
6672 commutative_tree_code (enum tree_code code)
6674 switch (code)
6676 case PLUS_EXPR:
6677 case MULT_EXPR:
6678 case MULT_HIGHPART_EXPR:
6679 case MIN_EXPR:
6680 case MAX_EXPR:
6681 case BIT_IOR_EXPR:
6682 case BIT_XOR_EXPR:
6683 case BIT_AND_EXPR:
6684 case NE_EXPR:
6685 case EQ_EXPR:
6686 case UNORDERED_EXPR:
6687 case ORDERED_EXPR:
6688 case UNEQ_EXPR:
6689 case LTGT_EXPR:
6690 case TRUTH_AND_EXPR:
6691 case TRUTH_XOR_EXPR:
6692 case TRUTH_OR_EXPR:
6693 case WIDEN_MULT_EXPR:
6694 case VEC_WIDEN_MULT_HI_EXPR:
6695 case VEC_WIDEN_MULT_LO_EXPR:
6696 case VEC_WIDEN_MULT_EVEN_EXPR:
6697 case VEC_WIDEN_MULT_ODD_EXPR:
6698 return true;
6700 default:
6701 break;
6703 return false;
6706 /* Return true if CODE represents a ternary tree code for which the
6707 first two operands are commutative. Otherwise return false. */
6708 bool
6709 commutative_ternary_tree_code (enum tree_code code)
6711 switch (code)
6713 case WIDEN_MULT_PLUS_EXPR:
6714 case WIDEN_MULT_MINUS_EXPR:
6715 case DOT_PROD_EXPR:
6716 return true;
6718 default:
6719 break;
6721 return false;
6724 /* Returns true if CODE can overflow. */
6726 bool
6727 operation_can_overflow (enum tree_code code)
6729 switch (code)
6731 case PLUS_EXPR:
6732 case MINUS_EXPR:
6733 case MULT_EXPR:
6734 case LSHIFT_EXPR:
6735 /* Can overflow in various ways. */
6736 return true;
6737 case TRUNC_DIV_EXPR:
6738 case EXACT_DIV_EXPR:
6739 case FLOOR_DIV_EXPR:
6740 case CEIL_DIV_EXPR:
6741 /* For INT_MIN / -1. */
6742 return true;
6743 case NEGATE_EXPR:
6744 case ABS_EXPR:
6745 /* For -INT_MIN. */
6746 return true;
6747 default:
6748 /* These operators cannot overflow. */
6749 return false;
6753 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
6754 ftrapv doesn't generate trapping insns for CODE. */
6756 bool
6757 operation_no_trapping_overflow (tree type, enum tree_code code)
6759 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
6761 /* We don't generate instructions that trap on overflow for complex or vector
6762 types. */
6763 if (!INTEGRAL_TYPE_P (type))
6764 return true;
6766 if (!TYPE_OVERFLOW_TRAPS (type))
6767 return true;
6769 switch (code)
6771 case PLUS_EXPR:
6772 case MINUS_EXPR:
6773 case MULT_EXPR:
6774 case NEGATE_EXPR:
6775 case ABS_EXPR:
6776 /* These operators can overflow, and -ftrapv generates trapping code for
6777 these. */
6778 return false;
6779 case TRUNC_DIV_EXPR:
6780 case EXACT_DIV_EXPR:
6781 case FLOOR_DIV_EXPR:
6782 case CEIL_DIV_EXPR:
6783 case LSHIFT_EXPR:
6784 /* These operators can overflow, but -ftrapv does not generate trapping
6785 code for these. */
6786 return true;
6787 default:
6788 /* These operators cannot overflow. */
6789 return true;
6793 /* Constructors for pointer, array and function types.
6794 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
6795 constructed by language-dependent code, not here.) */
6797 /* Construct, lay out and return the type of pointers to TO_TYPE with
6798 mode MODE. If MODE is VOIDmode, a pointer mode for the address
6799 space of TO_TYPE will be picked. If CAN_ALIAS_ALL is TRUE,
6800 indicate this type can reference all of memory. If such a type has
6801 already been constructed, reuse it. */
6803 tree
6804 build_pointer_type_for_mode (tree to_type, machine_mode mode,
6805 bool can_alias_all)
6807 tree t;
6808 bool could_alias = can_alias_all;
6810 if (to_type == error_mark_node)
6811 return error_mark_node;
6813 if (mode == VOIDmode)
6815 addr_space_t as = TYPE_ADDR_SPACE (to_type);
6816 mode = targetm.addr_space.pointer_mode (as);
6819 /* If the pointed-to type has the may_alias attribute set, force
6820 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6821 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
6822 can_alias_all = true;
6824 /* In some cases, languages will have things that aren't a POINTER_TYPE
6825 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
6826 In that case, return that type without regard to the rest of our
6827 operands.
6829 ??? This is a kludge, but consistent with the way this function has
6830 always operated and there doesn't seem to be a good way to avoid this
6831 at the moment. */
6832 if (TYPE_POINTER_TO (to_type) != 0
6833 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
6834 return TYPE_POINTER_TO (to_type);
6836 /* First, if we already have a type for pointers to TO_TYPE and it's
6837 the proper mode, use it. */
6838 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
6839 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
6840 return t;
6842 t = make_node (POINTER_TYPE);
6844 TREE_TYPE (t) = to_type;
6845 SET_TYPE_MODE (t, mode);
6846 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
6847 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
6848 TYPE_POINTER_TO (to_type) = t;
6850 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6851 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
6852 SET_TYPE_STRUCTURAL_EQUALITY (t);
6853 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
6854 TYPE_CANONICAL (t)
6855 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
6856 mode, false);
6858 /* Lay out the type. This function has many callers that are concerned
6859 with expression-construction, and this simplifies them all. */
6860 layout_type (t);
6862 return t;
6865 /* By default build pointers in ptr_mode. */
6867 tree
6868 build_pointer_type (tree to_type)
6870 return build_pointer_type_for_mode (to_type, VOIDmode, false);
6873 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
6875 tree
6876 build_reference_type_for_mode (tree to_type, machine_mode mode,
6877 bool can_alias_all)
6879 tree t;
6880 bool could_alias = can_alias_all;
6882 if (to_type == error_mark_node)
6883 return error_mark_node;
6885 if (mode == VOIDmode)
6887 addr_space_t as = TYPE_ADDR_SPACE (to_type);
6888 mode = targetm.addr_space.pointer_mode (as);
6891 /* If the pointed-to type has the may_alias attribute set, force
6892 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6893 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
6894 can_alias_all = true;
6896 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
6897 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
6898 In that case, return that type without regard to the rest of our
6899 operands.
6901 ??? This is a kludge, but consistent with the way this function has
6902 always operated and there doesn't seem to be a good way to avoid this
6903 at the moment. */
6904 if (TYPE_REFERENCE_TO (to_type) != 0
6905 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
6906 return TYPE_REFERENCE_TO (to_type);
6908 /* First, if we already have a type for pointers to TO_TYPE and it's
6909 the proper mode, use it. */
6910 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
6911 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
6912 return t;
6914 t = make_node (REFERENCE_TYPE);
6916 TREE_TYPE (t) = to_type;
6917 SET_TYPE_MODE (t, mode);
6918 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
6919 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
6920 TYPE_REFERENCE_TO (to_type) = t;
6922 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6923 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
6924 SET_TYPE_STRUCTURAL_EQUALITY (t);
6925 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
6926 TYPE_CANONICAL (t)
6927 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
6928 mode, false);
6930 layout_type (t);
6932 return t;
6936 /* Build the node for the type of references-to-TO_TYPE by default
6937 in ptr_mode. */
6939 tree
6940 build_reference_type (tree to_type)
6942 return build_reference_type_for_mode (to_type, VOIDmode, false);
6945 #define MAX_INT_CACHED_PREC \
6946 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
6947 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
6949 /* Builds a signed or unsigned integer type of precision PRECISION.
6950 Used for C bitfields whose precision does not match that of
6951 built-in target types. */
6952 tree
6953 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
6954 int unsignedp)
6956 tree itype, ret;
6958 if (unsignedp)
6959 unsignedp = MAX_INT_CACHED_PREC + 1;
6961 if (precision <= MAX_INT_CACHED_PREC)
6963 itype = nonstandard_integer_type_cache[precision + unsignedp];
6964 if (itype)
6965 return itype;
6968 itype = make_node (INTEGER_TYPE);
6969 TYPE_PRECISION (itype) = precision;
6971 if (unsignedp)
6972 fixup_unsigned_type (itype);
6973 else
6974 fixup_signed_type (itype);
6976 inchash::hash hstate;
6977 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
6978 ret = type_hash_canon (hstate.end (), itype);
6979 if (precision <= MAX_INT_CACHED_PREC)
6980 nonstandard_integer_type_cache[precision + unsignedp] = ret;
6982 return ret;
6985 #define MAX_BOOL_CACHED_PREC \
6986 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
6987 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
6989 /* Builds a boolean type of precision PRECISION.
6990 Used for boolean vectors to choose proper vector element size. */
6991 tree
6992 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
6994 tree type;
6996 if (precision <= MAX_BOOL_CACHED_PREC)
6998 type = nonstandard_boolean_type_cache[precision];
6999 if (type)
7000 return type;
7003 type = make_node (BOOLEAN_TYPE);
7004 TYPE_PRECISION (type) = precision;
7005 fixup_signed_type (type);
7007 if (precision <= MAX_INT_CACHED_PREC)
7008 nonstandard_boolean_type_cache[precision] = type;
7010 return type;
7013 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7014 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7015 is true, reuse such a type that has already been constructed. */
7017 static tree
7018 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7020 tree itype = make_node (INTEGER_TYPE);
7022 TREE_TYPE (itype) = type;
7024 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7025 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7027 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7028 SET_TYPE_MODE (itype, TYPE_MODE (type));
7029 TYPE_SIZE (itype) = TYPE_SIZE (type);
7030 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7031 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
7032 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7033 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
7035 if (!shared)
7036 return itype;
7038 if ((TYPE_MIN_VALUE (itype)
7039 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7040 || (TYPE_MAX_VALUE (itype)
7041 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7043 /* Since we cannot reliably merge this type, we need to compare it using
7044 structural equality checks. */
7045 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7046 return itype;
7049 hashval_t hash = type_hash_canon_hash (itype);
7050 itype = type_hash_canon (hash, itype);
7052 return itype;
7055 /* Wrapper around build_range_type_1 with SHARED set to true. */
7057 tree
7058 build_range_type (tree type, tree lowval, tree highval)
7060 return build_range_type_1 (type, lowval, highval, true);
7063 /* Wrapper around build_range_type_1 with SHARED set to false. */
7065 tree
7066 build_nonshared_range_type (tree type, tree lowval, tree highval)
7068 return build_range_type_1 (type, lowval, highval, false);
7071 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7072 MAXVAL should be the maximum value in the domain
7073 (one less than the length of the array).
7075 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7076 We don't enforce this limit, that is up to caller (e.g. language front end).
7077 The limit exists because the result is a signed type and we don't handle
7078 sizes that use more than one HOST_WIDE_INT. */
7080 tree
7081 build_index_type (tree maxval)
7083 return build_range_type (sizetype, size_zero_node, maxval);
7086 /* Return true if the debug information for TYPE, a subtype, should be emitted
7087 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7088 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7089 debug info and doesn't reflect the source code. */
7091 bool
7092 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7094 tree base_type = TREE_TYPE (type), low, high;
7096 /* Subrange types have a base type which is an integral type. */
7097 if (!INTEGRAL_TYPE_P (base_type))
7098 return false;
7100 /* Get the real bounds of the subtype. */
7101 if (lang_hooks.types.get_subrange_bounds)
7102 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7103 else
7105 low = TYPE_MIN_VALUE (type);
7106 high = TYPE_MAX_VALUE (type);
7109 /* If the type and its base type have the same representation and the same
7110 name, then the type is not a subrange but a copy of the base type. */
7111 if ((TREE_CODE (base_type) == INTEGER_TYPE
7112 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7113 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7114 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7115 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7116 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7117 return false;
7119 if (lowval)
7120 *lowval = low;
7121 if (highval)
7122 *highval = high;
7123 return true;
7126 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7127 and number of elements specified by the range of values of INDEX_TYPE.
7128 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7129 If SHARED is true, reuse such a type that has already been constructed.
7130 If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type. */
7132 tree
7133 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
7134 bool shared, bool set_canonical)
7136 tree t;
7138 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7140 error ("arrays of functions are not meaningful");
7141 elt_type = integer_type_node;
7144 t = make_node (ARRAY_TYPE);
7145 TREE_TYPE (t) = elt_type;
7146 TYPE_DOMAIN (t) = index_type;
7147 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7148 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
7149 layout_type (t);
7151 if (shared)
7153 hashval_t hash = type_hash_canon_hash (t);
7154 t = type_hash_canon (hash, t);
7157 if (TYPE_CANONICAL (t) == t && set_canonical)
7159 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7160 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
7161 || in_lto_p)
7162 SET_TYPE_STRUCTURAL_EQUALITY (t);
7163 else if (TYPE_CANONICAL (elt_type) != elt_type
7164 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7165 TYPE_CANONICAL (t)
7166 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7167 index_type
7168 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7169 typeless_storage, shared, set_canonical);
7172 return t;
7175 /* Wrapper around build_array_type_1 with SHARED set to true. */
7177 tree
7178 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
7180 return
7181 build_array_type_1 (elt_type, index_type, typeless_storage, true, true);
7184 /* Wrapper around build_array_type_1 with SHARED set to false. */
7186 tree
7187 build_nonshared_array_type (tree elt_type, tree index_type)
7189 return build_array_type_1 (elt_type, index_type, false, false, true);
7192 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7193 sizetype. */
7195 tree
7196 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
7198 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7201 /* Recursively examines the array elements of TYPE, until a non-array
7202 element type is found. */
7204 tree
7205 strip_array_types (tree type)
7207 while (TREE_CODE (type) == ARRAY_TYPE)
7208 type = TREE_TYPE (type);
7210 return type;
7213 /* Computes the canonical argument types from the argument type list
7214 ARGTYPES.
7216 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7217 on entry to this function, or if any of the ARGTYPES are
7218 structural.
7220 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7221 true on entry to this function, or if any of the ARGTYPES are
7222 non-canonical.
7224 Returns a canonical argument list, which may be ARGTYPES when the
7225 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7226 true) or would not differ from ARGTYPES. */
7228 static tree
7229 maybe_canonicalize_argtypes (tree argtypes,
7230 bool *any_structural_p,
7231 bool *any_noncanonical_p)
7233 tree arg;
7234 bool any_noncanonical_argtypes_p = false;
7236 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7238 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7239 /* Fail gracefully by stating that the type is structural. */
7240 *any_structural_p = true;
7241 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7242 *any_structural_p = true;
7243 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7244 || TREE_PURPOSE (arg))
7245 /* If the argument has a default argument, we consider it
7246 non-canonical even though the type itself is canonical.
7247 That way, different variants of function and method types
7248 with default arguments will all point to the variant with
7249 no defaults as their canonical type. */
7250 any_noncanonical_argtypes_p = true;
7253 if (*any_structural_p)
7254 return argtypes;
7256 if (any_noncanonical_argtypes_p)
7258 /* Build the canonical list of argument types. */
7259 tree canon_argtypes = NULL_TREE;
7260 bool is_void = false;
7262 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7264 if (arg == void_list_node)
7265 is_void = true;
7266 else
7267 canon_argtypes = tree_cons (NULL_TREE,
7268 TYPE_CANONICAL (TREE_VALUE (arg)),
7269 canon_argtypes);
7272 canon_argtypes = nreverse (canon_argtypes);
7273 if (is_void)
7274 canon_argtypes = chainon (canon_argtypes, void_list_node);
7276 /* There is a non-canonical type. */
7277 *any_noncanonical_p = true;
7278 return canon_argtypes;
7281 /* The canonical argument types are the same as ARGTYPES. */
7282 return argtypes;
7285 /* Construct, lay out and return
7286 the type of functions returning type VALUE_TYPE
7287 given arguments of types ARG_TYPES.
7288 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7289 are data type nodes for the arguments of the function.
7290 If such a type has already been constructed, reuse it. */
7292 tree
7293 build_function_type (tree value_type, tree arg_types)
7295 tree t;
7296 inchash::hash hstate;
7297 bool any_structural_p, any_noncanonical_p;
7298 tree canon_argtypes;
7300 gcc_assert (arg_types != error_mark_node);
7302 if (TREE_CODE (value_type) == FUNCTION_TYPE)
7304 error ("function return type cannot be function");
7305 value_type = integer_type_node;
7308 /* Make a node of the sort we want. */
7309 t = make_node (FUNCTION_TYPE);
7310 TREE_TYPE (t) = value_type;
7311 TYPE_ARG_TYPES (t) = arg_types;
7313 /* If we already have such a type, use the old one. */
7314 hashval_t hash = type_hash_canon_hash (t);
7315 t = type_hash_canon (hash, t);
7317 /* Set up the canonical type. */
7318 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
7319 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
7320 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
7321 &any_structural_p,
7322 &any_noncanonical_p);
7323 if (any_structural_p)
7324 SET_TYPE_STRUCTURAL_EQUALITY (t);
7325 else if (any_noncanonical_p)
7326 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
7327 canon_argtypes);
7329 if (!COMPLETE_TYPE_P (t))
7330 layout_type (t);
7331 return t;
7334 /* Build a function type. The RETURN_TYPE is the type returned by the
7335 function. If VAARGS is set, no void_type_node is appended to the
7336 list. ARGP must be always be terminated be a NULL_TREE. */
7338 static tree
7339 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
7341 tree t, args, last;
7343 t = va_arg (argp, tree);
7344 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
7345 args = tree_cons (NULL_TREE, t, args);
7347 if (vaargs)
7349 last = args;
7350 if (args != NULL_TREE)
7351 args = nreverse (args);
7352 gcc_assert (last != void_list_node);
7354 else if (args == NULL_TREE)
7355 args = void_list_node;
7356 else
7358 last = args;
7359 args = nreverse (args);
7360 TREE_CHAIN (last) = void_list_node;
7362 args = build_function_type (return_type, args);
7364 return args;
7367 /* Build a function type. The RETURN_TYPE is the type returned by the
7368 function. If additional arguments are provided, they are
7369 additional argument types. The list of argument types must always
7370 be terminated by NULL_TREE. */
7372 tree
7373 build_function_type_list (tree return_type, ...)
7375 tree args;
7376 va_list p;
7378 va_start (p, return_type);
7379 args = build_function_type_list_1 (false, return_type, p);
7380 va_end (p);
7381 return args;
7384 /* Build a variable argument function type. The RETURN_TYPE is the
7385 type returned by the function. If additional arguments are provided,
7386 they are additional argument types. The list of argument types must
7387 always be terminated by NULL_TREE. */
7389 tree
7390 build_varargs_function_type_list (tree return_type, ...)
7392 tree args;
7393 va_list p;
7395 va_start (p, return_type);
7396 args = build_function_type_list_1 (true, return_type, p);
7397 va_end (p);
7399 return args;
7402 /* Build a function type. RETURN_TYPE is the type returned by the
7403 function; VAARGS indicates whether the function takes varargs. The
7404 function takes N named arguments, the types of which are provided in
7405 ARG_TYPES. */
7407 static tree
7408 build_function_type_array_1 (bool vaargs, tree return_type, int n,
7409 tree *arg_types)
7411 int i;
7412 tree t = vaargs ? NULL_TREE : void_list_node;
7414 for (i = n - 1; i >= 0; i--)
7415 t = tree_cons (NULL_TREE, arg_types[i], t);
7417 return build_function_type (return_type, t);
7420 /* Build a function type. RETURN_TYPE is the type returned by the
7421 function. The function takes N named arguments, the types of which
7422 are provided in ARG_TYPES. */
7424 tree
7425 build_function_type_array (tree return_type, int n, tree *arg_types)
7427 return build_function_type_array_1 (false, return_type, n, arg_types);
7430 /* Build a variable argument function type. RETURN_TYPE is the type
7431 returned by the function. The function takes N named arguments, the
7432 types of which are provided in ARG_TYPES. */
7434 tree
7435 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
7437 return build_function_type_array_1 (true, return_type, n, arg_types);
7440 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
7441 and ARGTYPES (a TREE_LIST) are the return type and arguments types
7442 for the method. An implicit additional parameter (of type
7443 pointer-to-BASETYPE) is added to the ARGTYPES. */
7445 tree
7446 build_method_type_directly (tree basetype,
7447 tree rettype,
7448 tree argtypes)
7450 tree t;
7451 tree ptype;
7452 bool any_structural_p, any_noncanonical_p;
7453 tree canon_argtypes;
7455 /* Make a node of the sort we want. */
7456 t = make_node (METHOD_TYPE);
7458 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7459 TREE_TYPE (t) = rettype;
7460 ptype = build_pointer_type (basetype);
7462 /* The actual arglist for this function includes a "hidden" argument
7463 which is "this". Put it into the list of argument types. */
7464 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
7465 TYPE_ARG_TYPES (t) = argtypes;
7467 /* If we already have such a type, use the old one. */
7468 hashval_t hash = type_hash_canon_hash (t);
7469 t = type_hash_canon (hash, t);
7471 /* Set up the canonical type. */
7472 any_structural_p
7473 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7474 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
7475 any_noncanonical_p
7476 = (TYPE_CANONICAL (basetype) != basetype
7477 || TYPE_CANONICAL (rettype) != rettype);
7478 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
7479 &any_structural_p,
7480 &any_noncanonical_p);
7481 if (any_structural_p)
7482 SET_TYPE_STRUCTURAL_EQUALITY (t);
7483 else if (any_noncanonical_p)
7484 TYPE_CANONICAL (t)
7485 = build_method_type_directly (TYPE_CANONICAL (basetype),
7486 TYPE_CANONICAL (rettype),
7487 canon_argtypes);
7488 if (!COMPLETE_TYPE_P (t))
7489 layout_type (t);
7491 return t;
7494 /* Construct, lay out and return the type of methods belonging to class
7495 BASETYPE and whose arguments and values are described by TYPE.
7496 If that type exists already, reuse it.
7497 TYPE must be a FUNCTION_TYPE node. */
7499 tree
7500 build_method_type (tree basetype, tree type)
7502 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
7504 return build_method_type_directly (basetype,
7505 TREE_TYPE (type),
7506 TYPE_ARG_TYPES (type));
7509 /* Construct, lay out and return the type of offsets to a value
7510 of type TYPE, within an object of type BASETYPE.
7511 If a suitable offset type exists already, reuse it. */
7513 tree
7514 build_offset_type (tree basetype, tree type)
7516 tree t;
7518 /* Make a node of the sort we want. */
7519 t = make_node (OFFSET_TYPE);
7521 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7522 TREE_TYPE (t) = type;
7524 /* If we already have such a type, use the old one. */
7525 hashval_t hash = type_hash_canon_hash (t);
7526 t = type_hash_canon (hash, t);
7528 if (!COMPLETE_TYPE_P (t))
7529 layout_type (t);
7531 if (TYPE_CANONICAL (t) == t)
7533 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7534 || TYPE_STRUCTURAL_EQUALITY_P (type))
7535 SET_TYPE_STRUCTURAL_EQUALITY (t);
7536 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
7537 || TYPE_CANONICAL (type) != type)
7538 TYPE_CANONICAL (t)
7539 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
7540 TYPE_CANONICAL (type));
7543 return t;
7546 /* Create a complex type whose components are COMPONENT_TYPE.
7548 If NAMED is true, the type is given a TYPE_NAME. We do not always
7549 do so because this creates a DECL node and thus make the DECL_UIDs
7550 dependent on the type canonicalization hashtable, which is GC-ed,
7551 so the DECL_UIDs would not be stable wrt garbage collection. */
7553 tree
7554 build_complex_type (tree component_type, bool named)
7556 gcc_assert (INTEGRAL_TYPE_P (component_type)
7557 || SCALAR_FLOAT_TYPE_P (component_type)
7558 || FIXED_POINT_TYPE_P (component_type));
7560 /* Make a node of the sort we want. */
7561 tree probe = make_node (COMPLEX_TYPE);
7563 TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
7565 /* If we already have such a type, use the old one. */
7566 hashval_t hash = type_hash_canon_hash (probe);
7567 tree t = type_hash_canon (hash, probe);
7569 if (t == probe)
7571 /* We created a new type. The hash insertion will have laid
7572 out the type. We need to check the canonicalization and
7573 maybe set the name. */
7574 gcc_checking_assert (COMPLETE_TYPE_P (t)
7575 && !TYPE_NAME (t)
7576 && TYPE_CANONICAL (t) == t);
7578 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
7579 SET_TYPE_STRUCTURAL_EQUALITY (t);
7580 else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
7581 TYPE_CANONICAL (t)
7582 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
7584 /* We need to create a name, since complex is a fundamental type. */
7585 if (named)
7587 const char *name = NULL;
7589 if (TREE_TYPE (t) == char_type_node)
7590 name = "complex char";
7591 else if (TREE_TYPE (t) == signed_char_type_node)
7592 name = "complex signed char";
7593 else if (TREE_TYPE (t) == unsigned_char_type_node)
7594 name = "complex unsigned char";
7595 else if (TREE_TYPE (t) == short_integer_type_node)
7596 name = "complex short int";
7597 else if (TREE_TYPE (t) == short_unsigned_type_node)
7598 name = "complex short unsigned int";
7599 else if (TREE_TYPE (t) == integer_type_node)
7600 name = "complex int";
7601 else if (TREE_TYPE (t) == unsigned_type_node)
7602 name = "complex unsigned int";
7603 else if (TREE_TYPE (t) == long_integer_type_node)
7604 name = "complex long int";
7605 else if (TREE_TYPE (t) == long_unsigned_type_node)
7606 name = "complex long unsigned int";
7607 else if (TREE_TYPE (t) == long_long_integer_type_node)
7608 name = "complex long long int";
7609 else if (TREE_TYPE (t) == long_long_unsigned_type_node)
7610 name = "complex long long unsigned int";
7612 if (name != NULL)
7613 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
7614 get_identifier (name), t);
7618 return build_qualified_type (t, TYPE_QUALS (component_type));
7621 /* If TYPE is a real or complex floating-point type and the target
7622 does not directly support arithmetic on TYPE then return the wider
7623 type to be used for arithmetic on TYPE. Otherwise, return
7624 NULL_TREE. */
7626 tree
7627 excess_precision_type (tree type)
7629 /* The target can give two different responses to the question of
7630 which excess precision mode it would like depending on whether we
7631 are in -fexcess-precision=standard or -fexcess-precision=fast. */
7633 enum excess_precision_type requested_type
7634 = (flag_excess_precision == EXCESS_PRECISION_FAST
7635 ? EXCESS_PRECISION_TYPE_FAST
7636 : EXCESS_PRECISION_TYPE_STANDARD);
7638 enum flt_eval_method target_flt_eval_method
7639 = targetm.c.excess_precision (requested_type);
7641 /* The target should not ask for unpredictable float evaluation (though
7642 it might advertise that implicitly the evaluation is unpredictable,
7643 but we don't care about that here, it will have been reported
7644 elsewhere). If it does ask for unpredictable evaluation, we have
7645 nothing to do here. */
7646 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
7648 /* Nothing to do. The target has asked for all types we know about
7649 to be computed with their native precision and range. */
7650 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
7651 return NULL_TREE;
7653 /* The target will promote this type in a target-dependent way, so excess
7654 precision ought to leave it alone. */
7655 if (targetm.promoted_type (type) != NULL_TREE)
7656 return NULL_TREE;
7658 machine_mode float16_type_mode = (float16_type_node
7659 ? TYPE_MODE (float16_type_node)
7660 : VOIDmode);
7661 machine_mode float_type_mode = TYPE_MODE (float_type_node);
7662 machine_mode double_type_mode = TYPE_MODE (double_type_node);
7664 switch (TREE_CODE (type))
7666 case REAL_TYPE:
7668 machine_mode type_mode = TYPE_MODE (type);
7669 switch (target_flt_eval_method)
7671 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7672 if (type_mode == float16_type_mode)
7673 return float_type_node;
7674 break;
7675 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7676 if (type_mode == float16_type_mode
7677 || type_mode == float_type_mode)
7678 return double_type_node;
7679 break;
7680 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7681 if (type_mode == float16_type_mode
7682 || type_mode == float_type_mode
7683 || type_mode == double_type_mode)
7684 return long_double_type_node;
7685 break;
7686 default:
7687 gcc_unreachable ();
7689 break;
7691 case COMPLEX_TYPE:
7693 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
7694 return NULL_TREE;
7695 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
7696 switch (target_flt_eval_method)
7698 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7699 if (type_mode == float16_type_mode)
7700 return complex_float_type_node;
7701 break;
7702 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7703 if (type_mode == float16_type_mode
7704 || type_mode == float_type_mode)
7705 return complex_double_type_node;
7706 break;
7707 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7708 if (type_mode == float16_type_mode
7709 || type_mode == float_type_mode
7710 || type_mode == double_type_mode)
7711 return complex_long_double_type_node;
7712 break;
7713 default:
7714 gcc_unreachable ();
7716 break;
7718 default:
7719 break;
7722 return NULL_TREE;
7725 /* Return OP, stripped of any conversions to wider types as much as is safe.
7726 Converting the value back to OP's type makes a value equivalent to OP.
7728 If FOR_TYPE is nonzero, we return a value which, if converted to
7729 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
7731 OP must have integer, real or enumeral type. Pointers are not allowed!
7733 There are some cases where the obvious value we could return
7734 would regenerate to OP if converted to OP's type,
7735 but would not extend like OP to wider types.
7736 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
7737 For example, if OP is (unsigned short)(signed char)-1,
7738 we avoid returning (signed char)-1 if FOR_TYPE is int,
7739 even though extending that to an unsigned short would regenerate OP,
7740 since the result of extending (signed char)-1 to (int)
7741 is different from (int) OP. */
7743 tree
7744 get_unwidened (tree op, tree for_type)
7746 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
7747 tree type = TREE_TYPE (op);
7748 unsigned final_prec
7749 = TYPE_PRECISION (for_type != 0 ? for_type : type);
7750 int uns
7751 = (for_type != 0 && for_type != type
7752 && final_prec > TYPE_PRECISION (type)
7753 && TYPE_UNSIGNED (type));
7754 tree win = op;
7756 while (CONVERT_EXPR_P (op))
7758 int bitschange;
7760 /* TYPE_PRECISION on vector types has different meaning
7761 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
7762 so avoid them here. */
7763 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
7764 break;
7766 bitschange = TYPE_PRECISION (TREE_TYPE (op))
7767 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
7769 /* Truncations are many-one so cannot be removed.
7770 Unless we are later going to truncate down even farther. */
7771 if (bitschange < 0
7772 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
7773 break;
7775 /* See what's inside this conversion. If we decide to strip it,
7776 we will set WIN. */
7777 op = TREE_OPERAND (op, 0);
7779 /* If we have not stripped any zero-extensions (uns is 0),
7780 we can strip any kind of extension.
7781 If we have previously stripped a zero-extension,
7782 only zero-extensions can safely be stripped.
7783 Any extension can be stripped if the bits it would produce
7784 are all going to be discarded later by truncating to FOR_TYPE. */
7786 if (bitschange > 0)
7788 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
7789 win = op;
7790 /* TYPE_UNSIGNED says whether this is a zero-extension.
7791 Let's avoid computing it if it does not affect WIN
7792 and if UNS will not be needed again. */
7793 if ((uns
7794 || CONVERT_EXPR_P (op))
7795 && TYPE_UNSIGNED (TREE_TYPE (op)))
7797 uns = 1;
7798 win = op;
7803 /* If we finally reach a constant see if it fits in sth smaller and
7804 in that case convert it. */
7805 if (TREE_CODE (win) == INTEGER_CST)
7807 tree wtype = TREE_TYPE (win);
7808 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
7809 if (for_type)
7810 prec = MAX (prec, final_prec);
7811 if (prec < TYPE_PRECISION (wtype))
7813 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
7814 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
7815 win = fold_convert (t, win);
7819 return win;
7822 /* Return OP or a simpler expression for a narrower value
7823 which can be sign-extended or zero-extended to give back OP.
7824 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
7825 or 0 if the value should be sign-extended. */
7827 tree
7828 get_narrower (tree op, int *unsignedp_ptr)
7830 int uns = 0;
7831 int first = 1;
7832 tree win = op;
7833 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
7835 if (TREE_CODE (op) == COMPOUND_EXPR)
7838 op = TREE_OPERAND (op, 1);
7839 while (TREE_CODE (op) == COMPOUND_EXPR);
7840 tree ret = get_narrower (op, unsignedp_ptr);
7841 if (ret == op)
7842 return win;
7843 auto_vec <tree, 16> v;
7844 unsigned int i;
7845 for (op = win; TREE_CODE (op) == COMPOUND_EXPR;
7846 op = TREE_OPERAND (op, 1))
7847 v.safe_push (op);
7848 FOR_EACH_VEC_ELT_REVERSE (v, i, op)
7849 ret = build2_loc (EXPR_LOCATION (op), COMPOUND_EXPR,
7850 TREE_TYPE (ret), TREE_OPERAND (op, 0),
7851 ret);
7852 return ret;
7854 while (TREE_CODE (op) == NOP_EXPR)
7856 int bitschange
7857 = (TYPE_PRECISION (TREE_TYPE (op))
7858 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
7860 /* Truncations are many-one so cannot be removed. */
7861 if (bitschange < 0)
7862 break;
7864 /* See what's inside this conversion. If we decide to strip it,
7865 we will set WIN. */
7867 if (bitschange > 0)
7869 op = TREE_OPERAND (op, 0);
7870 /* An extension: the outermost one can be stripped,
7871 but remember whether it is zero or sign extension. */
7872 if (first)
7873 uns = TYPE_UNSIGNED (TREE_TYPE (op));
7874 /* Otherwise, if a sign extension has been stripped,
7875 only sign extensions can now be stripped;
7876 if a zero extension has been stripped, only zero-extensions. */
7877 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
7878 break;
7879 first = 0;
7881 else /* bitschange == 0 */
7883 /* A change in nominal type can always be stripped, but we must
7884 preserve the unsignedness. */
7885 if (first)
7886 uns = TYPE_UNSIGNED (TREE_TYPE (op));
7887 first = 0;
7888 op = TREE_OPERAND (op, 0);
7889 /* Keep trying to narrow, but don't assign op to win if it
7890 would turn an integral type into something else. */
7891 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
7892 continue;
7895 win = op;
7898 if (TREE_CODE (op) == COMPONENT_REF
7899 /* Since type_for_size always gives an integer type. */
7900 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
7901 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
7902 /* Ensure field is laid out already. */
7903 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
7904 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
7906 unsigned HOST_WIDE_INT innerprec
7907 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
7908 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
7909 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
7910 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
7912 /* We can get this structure field in a narrower type that fits it,
7913 but the resulting extension to its nominal type (a fullword type)
7914 must satisfy the same conditions as for other extensions.
7916 Do this only for fields that are aligned (not bit-fields),
7917 because when bit-field insns will be used there is no
7918 advantage in doing this. */
7920 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
7921 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
7922 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
7923 && type != 0)
7925 if (first)
7926 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
7927 win = fold_convert (type, op);
7931 *unsignedp_ptr = uns;
7932 return win;
7935 /* Return true if integer constant C has a value that is permissible
7936 for TYPE, an integral type. */
7938 bool
7939 int_fits_type_p (const_tree c, const_tree type)
7941 tree type_low_bound, type_high_bound;
7942 bool ok_for_low_bound, ok_for_high_bound;
7943 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
7945 /* Non-standard boolean types can have arbitrary precision but various
7946 transformations assume that they can only take values 0 and +/-1. */
7947 if (TREE_CODE (type) == BOOLEAN_TYPE)
7948 return wi::fits_to_boolean_p (wi::to_wide (c), type);
7950 retry:
7951 type_low_bound = TYPE_MIN_VALUE (type);
7952 type_high_bound = TYPE_MAX_VALUE (type);
7954 /* If at least one bound of the type is a constant integer, we can check
7955 ourselves and maybe make a decision. If no such decision is possible, but
7956 this type is a subtype, try checking against that. Otherwise, use
7957 fits_to_tree_p, which checks against the precision.
7959 Compute the status for each possibly constant bound, and return if we see
7960 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
7961 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
7962 for "constant known to fit". */
7964 /* Check if c >= type_low_bound. */
7965 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
7967 if (tree_int_cst_lt (c, type_low_bound))
7968 return false;
7969 ok_for_low_bound = true;
7971 else
7972 ok_for_low_bound = false;
7974 /* Check if c <= type_high_bound. */
7975 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
7977 if (tree_int_cst_lt (type_high_bound, c))
7978 return false;
7979 ok_for_high_bound = true;
7981 else
7982 ok_for_high_bound = false;
7984 /* If the constant fits both bounds, the result is known. */
7985 if (ok_for_low_bound && ok_for_high_bound)
7986 return true;
7988 /* Perform some generic filtering which may allow making a decision
7989 even if the bounds are not constant. First, negative integers
7990 never fit in unsigned types, */
7991 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
7992 return false;
7994 /* Second, narrower types always fit in wider ones. */
7995 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
7996 return true;
7998 /* Third, unsigned integers with top bit set never fit signed types. */
7999 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8001 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
8002 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8004 /* When a tree_cst is converted to a wide-int, the precision
8005 is taken from the type. However, if the precision of the
8006 mode underneath the type is smaller than that, it is
8007 possible that the value will not fit. The test below
8008 fails if any bit is set between the sign bit of the
8009 underlying mode and the top bit of the type. */
8010 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
8011 return false;
8013 else if (wi::neg_p (wi::to_wide (c)))
8014 return false;
8017 /* If we haven't been able to decide at this point, there nothing more we
8018 can check ourselves here. Look at the base type if we have one and it
8019 has the same precision. */
8020 if (TREE_CODE (type) == INTEGER_TYPE
8021 && TREE_TYPE (type) != 0
8022 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8024 type = TREE_TYPE (type);
8025 goto retry;
8028 /* Or to fits_to_tree_p, if nothing else. */
8029 return wi::fits_to_tree_p (wi::to_wide (c), type);
8032 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8033 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8034 represented (assuming two's-complement arithmetic) within the bit
8035 precision of the type are returned instead. */
8037 void
8038 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8040 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8041 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8042 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
8043 else
8045 if (TYPE_UNSIGNED (type))
8046 mpz_set_ui (min, 0);
8047 else
8049 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8050 wi::to_mpz (mn, min, SIGNED);
8054 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8055 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8056 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
8057 else
8059 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8060 wi::to_mpz (mn, max, TYPE_SIGN (type));
8064 /* Return true if VAR is an automatic variable. */
8066 bool
8067 auto_var_p (const_tree var)
8069 return ((((VAR_P (var) && ! DECL_EXTERNAL (var))
8070 || TREE_CODE (var) == PARM_DECL)
8071 && ! TREE_STATIC (var))
8072 || TREE_CODE (var) == RESULT_DECL);
8075 /* Return true if VAR is an automatic variable defined in function FN. */
8077 bool
8078 auto_var_in_fn_p (const_tree var, const_tree fn)
8080 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8081 && (auto_var_p (var)
8082 || TREE_CODE (var) == LABEL_DECL));
8085 /* Subprogram of following function. Called by walk_tree.
8087 Return *TP if it is an automatic variable or parameter of the
8088 function passed in as DATA. */
8090 static tree
8091 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8093 tree fn = (tree) data;
8095 if (TYPE_P (*tp))
8096 *walk_subtrees = 0;
8098 else if (DECL_P (*tp)
8099 && auto_var_in_fn_p (*tp, fn))
8100 return *tp;
8102 return NULL_TREE;
8105 /* Returns true if T is, contains, or refers to a type with variable
8106 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8107 arguments, but not the return type. If FN is nonzero, only return
8108 true if a modifier of the type or position of FN is a variable or
8109 parameter inside FN.
8111 This concept is more general than that of C99 'variably modified types':
8112 in C99, a struct type is never variably modified because a VLA may not
8113 appear as a structure member. However, in GNU C code like:
8115 struct S { int i[f()]; };
8117 is valid, and other languages may define similar constructs. */
8119 bool
8120 variably_modified_type_p (tree type, tree fn)
8122 tree t;
8124 /* Test if T is either variable (if FN is zero) or an expression containing
8125 a variable in FN. If TYPE isn't gimplified, return true also if
8126 gimplify_one_sizepos would gimplify the expression into a local
8127 variable. */
8128 #define RETURN_TRUE_IF_VAR(T) \
8129 do { tree _t = (T); \
8130 if (_t != NULL_TREE \
8131 && _t != error_mark_node \
8132 && !CONSTANT_CLASS_P (_t) \
8133 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8134 && (!fn \
8135 || (!TYPE_SIZES_GIMPLIFIED (type) \
8136 && (TREE_CODE (_t) != VAR_DECL \
8137 && !CONTAINS_PLACEHOLDER_P (_t))) \
8138 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8139 return true; } while (0)
8141 if (type == error_mark_node)
8142 return false;
8144 /* If TYPE itself has variable size, it is variably modified. */
8145 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8146 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8148 switch (TREE_CODE (type))
8150 case POINTER_TYPE:
8151 case REFERENCE_TYPE:
8152 case VECTOR_TYPE:
8153 /* Ada can have pointer types refering to themselves indirectly. */
8154 if (TREE_VISITED (type))
8155 return false;
8156 TREE_VISITED (type) = true;
8157 if (variably_modified_type_p (TREE_TYPE (type), fn))
8159 TREE_VISITED (type) = false;
8160 return true;
8162 TREE_VISITED (type) = false;
8163 break;
8165 case FUNCTION_TYPE:
8166 case METHOD_TYPE:
8167 /* If TYPE is a function type, it is variably modified if the
8168 return type is variably modified. */
8169 if (variably_modified_type_p (TREE_TYPE (type), fn))
8170 return true;
8171 break;
8173 case INTEGER_TYPE:
8174 case REAL_TYPE:
8175 case FIXED_POINT_TYPE:
8176 case ENUMERAL_TYPE:
8177 case BOOLEAN_TYPE:
8178 /* Scalar types are variably modified if their end points
8179 aren't constant. */
8180 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8181 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8182 break;
8184 case RECORD_TYPE:
8185 case UNION_TYPE:
8186 case QUAL_UNION_TYPE:
8187 /* We can't see if any of the fields are variably-modified by the
8188 definition we normally use, since that would produce infinite
8189 recursion via pointers. */
8190 /* This is variably modified if some field's type is. */
8191 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8192 if (TREE_CODE (t) == FIELD_DECL)
8194 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8195 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8196 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8198 /* If the type is a qualified union, then the DECL_QUALIFIER
8199 of fields can also be an expression containing a variable. */
8200 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8201 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8203 /* If the field is a qualified union, then it's only a container
8204 for what's inside so we look into it. That's necessary in LTO
8205 mode because the sizes of the field tested above have been set
8206 to PLACEHOLDER_EXPRs by free_lang_data. */
8207 if (TREE_CODE (TREE_TYPE (t)) == QUAL_UNION_TYPE
8208 && variably_modified_type_p (TREE_TYPE (t), fn))
8209 return true;
8211 break;
8213 case ARRAY_TYPE:
8214 /* Do not call ourselves to avoid infinite recursion. This is
8215 variably modified if the element type is. */
8216 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8217 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8218 break;
8220 default:
8221 break;
8224 /* The current language may have other cases to check, but in general,
8225 all other types are not variably modified. */
8226 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8228 #undef RETURN_TRUE_IF_VAR
8231 /* Given a DECL or TYPE, return the scope in which it was declared, or
8232 NULL_TREE if there is no containing scope. */
8234 tree
8235 get_containing_scope (const_tree t)
8237 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8240 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
8242 const_tree
8243 get_ultimate_context (const_tree decl)
8245 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
8247 if (TREE_CODE (decl) == BLOCK)
8248 decl = BLOCK_SUPERCONTEXT (decl);
8249 else
8250 decl = get_containing_scope (decl);
8252 return decl;
8255 /* Return the innermost context enclosing DECL that is
8256 a FUNCTION_DECL, or zero if none. */
8258 tree
8259 decl_function_context (const_tree decl)
8261 tree context;
8263 if (TREE_CODE (decl) == ERROR_MARK)
8264 return 0;
8266 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8267 where we look up the function at runtime. Such functions always take
8268 a first argument of type 'pointer to real context'.
8270 C++ should really be fixed to use DECL_CONTEXT for the real context,
8271 and use something else for the "virtual context". */
8272 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
8273 context
8274 = TYPE_MAIN_VARIANT
8275 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8276 else
8277 context = DECL_CONTEXT (decl);
8279 while (context && TREE_CODE (context) != FUNCTION_DECL)
8281 if (TREE_CODE (context) == BLOCK)
8282 context = BLOCK_SUPERCONTEXT (context);
8283 else
8284 context = get_containing_scope (context);
8287 return context;
8290 /* Return the innermost context enclosing DECL that is
8291 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8292 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8294 tree
8295 decl_type_context (const_tree decl)
8297 tree context = DECL_CONTEXT (decl);
8299 while (context)
8300 switch (TREE_CODE (context))
8302 case NAMESPACE_DECL:
8303 case TRANSLATION_UNIT_DECL:
8304 return NULL_TREE;
8306 case RECORD_TYPE:
8307 case UNION_TYPE:
8308 case QUAL_UNION_TYPE:
8309 return context;
8311 case TYPE_DECL:
8312 case FUNCTION_DECL:
8313 context = DECL_CONTEXT (context);
8314 break;
8316 case BLOCK:
8317 context = BLOCK_SUPERCONTEXT (context);
8318 break;
8320 default:
8321 gcc_unreachable ();
8324 return NULL_TREE;
8327 /* CALL is a CALL_EXPR. Return the declaration for the function
8328 called, or NULL_TREE if the called function cannot be
8329 determined. */
8331 tree
8332 get_callee_fndecl (const_tree call)
8334 tree addr;
8336 if (call == error_mark_node)
8337 return error_mark_node;
8339 /* It's invalid to call this function with anything but a
8340 CALL_EXPR. */
8341 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8343 /* The first operand to the CALL is the address of the function
8344 called. */
8345 addr = CALL_EXPR_FN (call);
8347 /* If there is no function, return early. */
8348 if (addr == NULL_TREE)
8349 return NULL_TREE;
8351 STRIP_NOPS (addr);
8353 /* If this is a readonly function pointer, extract its initial value. */
8354 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8355 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8356 && DECL_INITIAL (addr))
8357 addr = DECL_INITIAL (addr);
8359 /* If the address is just `&f' for some function `f', then we know
8360 that `f' is being called. */
8361 if (TREE_CODE (addr) == ADDR_EXPR
8362 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8363 return TREE_OPERAND (addr, 0);
8365 /* We couldn't figure out what was being called. */
8366 return NULL_TREE;
8369 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
8370 return the associated function code, otherwise return CFN_LAST. */
8372 combined_fn
8373 get_call_combined_fn (const_tree call)
8375 /* It's invalid to call this function with anything but a CALL_EXPR. */
8376 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8378 if (!CALL_EXPR_FN (call))
8379 return as_combined_fn (CALL_EXPR_IFN (call));
8381 tree fndecl = get_callee_fndecl (call);
8382 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8383 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
8385 return CFN_LAST;
8388 /* Comparator of indices based on tree_node_counts. */
8390 static int
8391 tree_nodes_cmp (const void *p1, const void *p2)
8393 const unsigned *n1 = (const unsigned *)p1;
8394 const unsigned *n2 = (const unsigned *)p2;
8396 return tree_node_counts[*n1] - tree_node_counts[*n2];
8399 /* Comparator of indices based on tree_code_counts. */
8401 static int
8402 tree_codes_cmp (const void *p1, const void *p2)
8404 const unsigned *n1 = (const unsigned *)p1;
8405 const unsigned *n2 = (const unsigned *)p2;
8407 return tree_code_counts[*n1] - tree_code_counts[*n2];
8410 #define TREE_MEM_USAGE_SPACES 40
8412 /* Print debugging information about tree nodes generated during the compile,
8413 and any language-specific information. */
8415 void
8416 dump_tree_statistics (void)
8418 if (GATHER_STATISTICS)
8420 uint64_t total_nodes, total_bytes;
8421 fprintf (stderr, "\nKind Nodes Bytes\n");
8422 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8423 total_nodes = total_bytes = 0;
8426 auto_vec<unsigned> indices (all_kinds);
8427 for (unsigned i = 0; i < all_kinds; i++)
8428 indices.quick_push (i);
8429 indices.qsort (tree_nodes_cmp);
8431 for (unsigned i = 0; i < (int) all_kinds; i++)
8433 unsigned j = indices[i];
8434 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
8435 tree_node_kind_names[j], SIZE_AMOUNT (tree_node_counts[j]),
8436 SIZE_AMOUNT (tree_node_sizes[j]));
8437 total_nodes += tree_node_counts[j];
8438 total_bytes += tree_node_sizes[j];
8440 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8441 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
8442 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
8443 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8447 fprintf (stderr, "Code Nodes\n");
8448 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8450 auto_vec<unsigned> indices (MAX_TREE_CODES);
8451 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8452 indices.quick_push (i);
8453 indices.qsort (tree_codes_cmp);
8455 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8457 unsigned j = indices[i];
8458 fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
8459 get_tree_code_name ((enum tree_code) j),
8460 SIZE_AMOUNT (tree_code_counts[j]));
8462 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8463 fprintf (stderr, "\n");
8464 ssanames_print_statistics ();
8465 fprintf (stderr, "\n");
8466 phinodes_print_statistics ();
8467 fprintf (stderr, "\n");
8470 else
8471 fprintf (stderr, "(No per-node statistics)\n");
8473 print_type_hash_statistics ();
8474 print_debug_expr_statistics ();
8475 print_value_expr_statistics ();
8476 lang_hooks.print_statistics ();
8479 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8481 /* Generate a crc32 of the low BYTES bytes of VALUE. */
8483 unsigned
8484 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
8486 /* This relies on the raw feedback's top 4 bits being zero. */
8487 #define FEEDBACK(X) ((X) * 0x04c11db7)
8488 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
8489 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
8490 static const unsigned syndromes[16] =
8492 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
8493 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
8494 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
8495 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
8497 #undef FEEDBACK
8498 #undef SYNDROME
8500 value <<= (32 - bytes * 8);
8501 for (unsigned ix = bytes * 2; ix--; value <<= 4)
8503 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
8505 chksum = (chksum << 4) ^ feedback;
8508 return chksum;
8511 /* Generate a crc32 of a string. */
8513 unsigned
8514 crc32_string (unsigned chksum, const char *string)
8517 chksum = crc32_byte (chksum, *string);
8518 while (*string++);
8519 return chksum;
8522 /* P is a string that will be used in a symbol. Mask out any characters
8523 that are not valid in that context. */
8525 void
8526 clean_symbol_name (char *p)
8528 for (; *p; p++)
8529 if (! (ISALNUM (*p)
8530 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8531 || *p == '$'
8532 #endif
8533 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
8534 || *p == '.'
8535 #endif
8537 *p = '_';
8540 static GTY(()) unsigned anon_cnt = 0; /* Saved for PCH. */
8542 /* Create a unique anonymous identifier. The identifier is still a
8543 valid assembly label. */
8545 tree
8546 make_anon_name ()
8548 const char *fmt =
8549 #if !defined (NO_DOT_IN_LABEL)
8551 #elif !defined (NO_DOLLAR_IN_LABEL)
8553 #else
8555 #endif
8556 "_anon_%d";
8558 char buf[24];
8559 int len = snprintf (buf, sizeof (buf), fmt, anon_cnt++);
8560 gcc_checking_assert (len < int (sizeof (buf)));
8562 tree id = get_identifier_with_length (buf, len);
8563 IDENTIFIER_ANON_P (id) = true;
8565 return id;
8568 /* Generate a name for a special-purpose function.
8569 The generated name may need to be unique across the whole link.
8570 Changes to this function may also require corresponding changes to
8571 xstrdup_mask_random.
8572 TYPE is some string to identify the purpose of this function to the
8573 linker or collect2; it must start with an uppercase letter,
8574 one of:
8575 I - for constructors
8576 D - for destructors
8577 N - for C++ anonymous namespaces
8578 F - for DWARF unwind frame information. */
8580 tree
8581 get_file_function_name (const char *type)
8583 char *buf;
8584 const char *p;
8585 char *q;
8587 /* If we already have a name we know to be unique, just use that. */
8588 if (first_global_object_name)
8589 p = q = ASTRDUP (first_global_object_name);
8590 /* If the target is handling the constructors/destructors, they
8591 will be local to this file and the name is only necessary for
8592 debugging purposes.
8593 We also assign sub_I and sub_D sufixes to constructors called from
8594 the global static constructors. These are always local. */
8595 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
8596 || (startswith (type, "sub_")
8597 && (type[4] == 'I' || type[4] == 'D')))
8599 const char *file = main_input_filename;
8600 if (! file)
8601 file = LOCATION_FILE (input_location);
8602 /* Just use the file's basename, because the full pathname
8603 might be quite long. */
8604 p = q = ASTRDUP (lbasename (file));
8606 else
8608 /* Otherwise, the name must be unique across the entire link.
8609 We don't have anything that we know to be unique to this translation
8610 unit, so use what we do have and throw in some randomness. */
8611 unsigned len;
8612 const char *name = weak_global_object_name;
8613 const char *file = main_input_filename;
8615 if (! name)
8616 name = "";
8617 if (! file)
8618 file = LOCATION_FILE (input_location);
8620 len = strlen (file);
8621 q = (char *) alloca (9 + 19 + len + 1);
8622 memcpy (q, file, len + 1);
8624 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
8625 crc32_string (0, name), get_random_seed (false));
8627 p = q;
8630 clean_symbol_name (q);
8631 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
8632 + strlen (type));
8634 /* Set up the name of the file-level functions we may need.
8635 Use a global object (which is already required to be unique over
8636 the program) rather than the file name (which imposes extra
8637 constraints). */
8638 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
8640 return get_identifier (buf);
8643 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
8645 /* Complain that the tree code of NODE does not match the expected 0
8646 terminated list of trailing codes. The trailing code list can be
8647 empty, for a more vague error message. FILE, LINE, and FUNCTION
8648 are of the caller. */
8650 void
8651 tree_check_failed (const_tree node, const char *file,
8652 int line, const char *function, ...)
8654 va_list args;
8655 const char *buffer;
8656 unsigned length = 0;
8657 enum tree_code code;
8659 va_start (args, function);
8660 while ((code = (enum tree_code) va_arg (args, int)))
8661 length += 4 + strlen (get_tree_code_name (code));
8662 va_end (args);
8663 if (length)
8665 char *tmp;
8666 va_start (args, function);
8667 length += strlen ("expected ");
8668 buffer = tmp = (char *) alloca (length);
8669 length = 0;
8670 while ((code = (enum tree_code) va_arg (args, int)))
8672 const char *prefix = length ? " or " : "expected ";
8674 strcpy (tmp + length, prefix);
8675 length += strlen (prefix);
8676 strcpy (tmp + length, get_tree_code_name (code));
8677 length += strlen (get_tree_code_name (code));
8679 va_end (args);
8681 else
8682 buffer = "unexpected node";
8684 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8685 buffer, get_tree_code_name (TREE_CODE (node)),
8686 function, trim_filename (file), line);
8689 /* Complain that the tree code of NODE does match the expected 0
8690 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
8691 the caller. */
8693 void
8694 tree_not_check_failed (const_tree node, const char *file,
8695 int line, const char *function, ...)
8697 va_list args;
8698 char *buffer;
8699 unsigned length = 0;
8700 enum tree_code code;
8702 va_start (args, function);
8703 while ((code = (enum tree_code) va_arg (args, int)))
8704 length += 4 + strlen (get_tree_code_name (code));
8705 va_end (args);
8706 va_start (args, function);
8707 buffer = (char *) alloca (length);
8708 length = 0;
8709 while ((code = (enum tree_code) va_arg (args, int)))
8711 if (length)
8713 strcpy (buffer + length, " or ");
8714 length += 4;
8716 strcpy (buffer + length, get_tree_code_name (code));
8717 length += strlen (get_tree_code_name (code));
8719 va_end (args);
8721 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
8722 buffer, get_tree_code_name (TREE_CODE (node)),
8723 function, trim_filename (file), line);
8726 /* Similar to tree_check_failed, except that we check for a class of tree
8727 code, given in CL. */
8729 void
8730 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
8731 const char *file, int line, const char *function)
8733 internal_error
8734 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
8735 TREE_CODE_CLASS_STRING (cl),
8736 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
8737 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8740 /* Similar to tree_check_failed, except that instead of specifying a
8741 dozen codes, use the knowledge that they're all sequential. */
8743 void
8744 tree_range_check_failed (const_tree node, const char *file, int line,
8745 const char *function, enum tree_code c1,
8746 enum tree_code c2)
8748 char *buffer;
8749 unsigned length = 0;
8750 unsigned int c;
8752 for (c = c1; c <= c2; ++c)
8753 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
8755 length += strlen ("expected ");
8756 buffer = (char *) alloca (length);
8757 length = 0;
8759 for (c = c1; c <= c2; ++c)
8761 const char *prefix = length ? " or " : "expected ";
8763 strcpy (buffer + length, prefix);
8764 length += strlen (prefix);
8765 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
8766 length += strlen (get_tree_code_name ((enum tree_code) c));
8769 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8770 buffer, get_tree_code_name (TREE_CODE (node)),
8771 function, trim_filename (file), line);
8775 /* Similar to tree_check_failed, except that we check that a tree does
8776 not have the specified code, given in CL. */
8778 void
8779 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
8780 const char *file, int line, const char *function)
8782 internal_error
8783 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
8784 TREE_CODE_CLASS_STRING (cl),
8785 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
8786 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8790 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
8792 void
8793 omp_clause_check_failed (const_tree node, const char *file, int line,
8794 const char *function, enum omp_clause_code code)
8796 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
8797 "in %s, at %s:%d",
8798 omp_clause_code_name[code],
8799 get_tree_code_name (TREE_CODE (node)),
8800 function, trim_filename (file), line);
8804 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
8806 void
8807 omp_clause_range_check_failed (const_tree node, const char *file, int line,
8808 const char *function, enum omp_clause_code c1,
8809 enum omp_clause_code c2)
8811 char *buffer;
8812 unsigned length = 0;
8813 unsigned int c;
8815 for (c = c1; c <= c2; ++c)
8816 length += 4 + strlen (omp_clause_code_name[c]);
8818 length += strlen ("expected ");
8819 buffer = (char *) alloca (length);
8820 length = 0;
8822 for (c = c1; c <= c2; ++c)
8824 const char *prefix = length ? " or " : "expected ";
8826 strcpy (buffer + length, prefix);
8827 length += strlen (prefix);
8828 strcpy (buffer + length, omp_clause_code_name[c]);
8829 length += strlen (omp_clause_code_name[c]);
8832 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8833 buffer, omp_clause_code_name[TREE_CODE (node)],
8834 function, trim_filename (file), line);
8838 #undef DEFTREESTRUCT
8839 #define DEFTREESTRUCT(VAL, NAME) NAME,
8841 static const char *ts_enum_names[] = {
8842 #include "treestruct.def"
8844 #undef DEFTREESTRUCT
8846 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
8848 /* Similar to tree_class_check_failed, except that we check for
8849 whether CODE contains the tree structure identified by EN. */
8851 void
8852 tree_contains_struct_check_failed (const_tree node,
8853 const enum tree_node_structure_enum en,
8854 const char *file, int line,
8855 const char *function)
8857 internal_error
8858 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
8859 TS_ENUM_NAME (en),
8860 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8864 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
8865 (dynamically sized) vector. */
8867 void
8868 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
8869 const char *function)
8871 internal_error
8872 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
8873 "at %s:%d",
8874 idx + 1, len, function, trim_filename (file), line);
8877 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
8878 (dynamically sized) vector. */
8880 void
8881 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
8882 const char *function)
8884 internal_error
8885 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
8886 idx + 1, len, function, trim_filename (file), line);
8889 /* Similar to above, except that the check is for the bounds of the operand
8890 vector of an expression node EXP. */
8892 void
8893 tree_operand_check_failed (int idx, const_tree exp, const char *file,
8894 int line, const char *function)
8896 enum tree_code code = TREE_CODE (exp);
8897 internal_error
8898 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
8899 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
8900 function, trim_filename (file), line);
8903 /* Similar to above, except that the check is for the number of
8904 operands of an OMP_CLAUSE node. */
8906 void
8907 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
8908 int line, const char *function)
8910 internal_error
8911 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
8912 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
8913 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
8914 trim_filename (file), line);
8916 #endif /* ENABLE_TREE_CHECKING */
8918 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
8919 and mapped to the machine mode MODE. Initialize its fields and build
8920 the information necessary for debugging output. */
8922 static tree
8923 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
8925 tree t;
8926 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
8928 t = make_node (VECTOR_TYPE);
8929 TREE_TYPE (t) = mv_innertype;
8930 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
8931 SET_TYPE_MODE (t, mode);
8933 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
8934 SET_TYPE_STRUCTURAL_EQUALITY (t);
8935 else if ((TYPE_CANONICAL (mv_innertype) != innertype
8936 || mode != VOIDmode)
8937 && !VECTOR_BOOLEAN_TYPE_P (t))
8938 TYPE_CANONICAL (t)
8939 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
8941 layout_type (t);
8943 hashval_t hash = type_hash_canon_hash (t);
8944 t = type_hash_canon (hash, t);
8946 /* We have built a main variant, based on the main variant of the
8947 inner type. Use it to build the variant we return. */
8948 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
8949 && TREE_TYPE (t) != innertype)
8950 return build_type_attribute_qual_variant (t,
8951 TYPE_ATTRIBUTES (innertype),
8952 TYPE_QUALS (innertype));
8954 return t;
8957 static tree
8958 make_or_reuse_type (unsigned size, int unsignedp)
8960 int i;
8962 if (size == INT_TYPE_SIZE)
8963 return unsignedp ? unsigned_type_node : integer_type_node;
8964 if (size == CHAR_TYPE_SIZE)
8965 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
8966 if (size == SHORT_TYPE_SIZE)
8967 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
8968 if (size == LONG_TYPE_SIZE)
8969 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
8970 if (size == LONG_LONG_TYPE_SIZE)
8971 return (unsignedp ? long_long_unsigned_type_node
8972 : long_long_integer_type_node);
8974 for (i = 0; i < NUM_INT_N_ENTS; i ++)
8975 if (size == int_n_data[i].bitsize
8976 && int_n_enabled_p[i])
8977 return (unsignedp ? int_n_trees[i].unsigned_type
8978 : int_n_trees[i].signed_type);
8980 if (unsignedp)
8981 return make_unsigned_type (size);
8982 else
8983 return make_signed_type (size);
8986 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
8988 static tree
8989 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
8991 if (satp)
8993 if (size == SHORT_FRACT_TYPE_SIZE)
8994 return unsignedp ? sat_unsigned_short_fract_type_node
8995 : sat_short_fract_type_node;
8996 if (size == FRACT_TYPE_SIZE)
8997 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
8998 if (size == LONG_FRACT_TYPE_SIZE)
8999 return unsignedp ? sat_unsigned_long_fract_type_node
9000 : sat_long_fract_type_node;
9001 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9002 return unsignedp ? sat_unsigned_long_long_fract_type_node
9003 : sat_long_long_fract_type_node;
9005 else
9007 if (size == SHORT_FRACT_TYPE_SIZE)
9008 return unsignedp ? unsigned_short_fract_type_node
9009 : short_fract_type_node;
9010 if (size == FRACT_TYPE_SIZE)
9011 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9012 if (size == LONG_FRACT_TYPE_SIZE)
9013 return unsignedp ? unsigned_long_fract_type_node
9014 : long_fract_type_node;
9015 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9016 return unsignedp ? unsigned_long_long_fract_type_node
9017 : long_long_fract_type_node;
9020 return make_fract_type (size, unsignedp, satp);
9023 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9025 static tree
9026 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9028 if (satp)
9030 if (size == SHORT_ACCUM_TYPE_SIZE)
9031 return unsignedp ? sat_unsigned_short_accum_type_node
9032 : sat_short_accum_type_node;
9033 if (size == ACCUM_TYPE_SIZE)
9034 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9035 if (size == LONG_ACCUM_TYPE_SIZE)
9036 return unsignedp ? sat_unsigned_long_accum_type_node
9037 : sat_long_accum_type_node;
9038 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9039 return unsignedp ? sat_unsigned_long_long_accum_type_node
9040 : sat_long_long_accum_type_node;
9042 else
9044 if (size == SHORT_ACCUM_TYPE_SIZE)
9045 return unsignedp ? unsigned_short_accum_type_node
9046 : short_accum_type_node;
9047 if (size == ACCUM_TYPE_SIZE)
9048 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9049 if (size == LONG_ACCUM_TYPE_SIZE)
9050 return unsignedp ? unsigned_long_accum_type_node
9051 : long_accum_type_node;
9052 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9053 return unsignedp ? unsigned_long_long_accum_type_node
9054 : long_long_accum_type_node;
9057 return make_accum_type (size, unsignedp, satp);
9061 /* Create an atomic variant node for TYPE. This routine is called
9062 during initialization of data types to create the 5 basic atomic
9063 types. The generic build_variant_type function requires these to
9064 already be set up in order to function properly, so cannot be
9065 called from there. If ALIGN is non-zero, then ensure alignment is
9066 overridden to this value. */
9068 static tree
9069 build_atomic_base (tree type, unsigned int align)
9071 tree t;
9073 /* Make sure its not already registered. */
9074 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9075 return t;
9077 t = build_variant_type_copy (type);
9078 set_type_quals (t, TYPE_QUAL_ATOMIC);
9080 if (align)
9081 SET_TYPE_ALIGN (t, align);
9083 return t;
9086 /* Information about the _FloatN and _FloatNx types. This must be in
9087 the same order as the corresponding TI_* enum values. */
9088 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
9090 { 16, false },
9091 { 32, false },
9092 { 64, false },
9093 { 128, false },
9094 { 32, true },
9095 { 64, true },
9096 { 128, true },
9100 /* Create nodes for all integer types (and error_mark_node) using the sizes
9101 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9103 void
9104 build_common_tree_nodes (bool signed_char)
9106 int i;
9108 error_mark_node = make_node (ERROR_MARK);
9109 TREE_TYPE (error_mark_node) = error_mark_node;
9111 initialize_sizetypes ();
9113 /* Define both `signed char' and `unsigned char'. */
9114 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9115 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9116 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9117 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9119 /* Define `char', which is like either `signed char' or `unsigned char'
9120 but not the same as either. */
9121 char_type_node
9122 = (signed_char
9123 ? make_signed_type (CHAR_TYPE_SIZE)
9124 : make_unsigned_type (CHAR_TYPE_SIZE));
9125 TYPE_STRING_FLAG (char_type_node) = 1;
9127 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9128 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9129 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9130 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9131 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9132 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9133 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9134 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9136 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9138 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9139 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9141 if (int_n_enabled_p[i])
9143 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9144 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9148 /* Define a boolean type. This type only represents boolean values but
9149 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9150 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9151 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9152 TYPE_PRECISION (boolean_type_node) = 1;
9153 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9155 /* Define what type to use for size_t. */
9156 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9157 size_type_node = unsigned_type_node;
9158 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9159 size_type_node = long_unsigned_type_node;
9160 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9161 size_type_node = long_long_unsigned_type_node;
9162 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9163 size_type_node = short_unsigned_type_node;
9164 else
9166 int i;
9168 size_type_node = NULL_TREE;
9169 for (i = 0; i < NUM_INT_N_ENTS; i++)
9170 if (int_n_enabled_p[i])
9172 char name[50], altname[50];
9173 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9174 sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
9176 if (strcmp (name, SIZE_TYPE) == 0
9177 || strcmp (altname, SIZE_TYPE) == 0)
9179 size_type_node = int_n_trees[i].unsigned_type;
9182 if (size_type_node == NULL_TREE)
9183 gcc_unreachable ();
9186 /* Define what type to use for ptrdiff_t. */
9187 if (strcmp (PTRDIFF_TYPE, "int") == 0)
9188 ptrdiff_type_node = integer_type_node;
9189 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
9190 ptrdiff_type_node = long_integer_type_node;
9191 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
9192 ptrdiff_type_node = long_long_integer_type_node;
9193 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
9194 ptrdiff_type_node = short_integer_type_node;
9195 else
9197 ptrdiff_type_node = NULL_TREE;
9198 for (int i = 0; i < NUM_INT_N_ENTS; i++)
9199 if (int_n_enabled_p[i])
9201 char name[50], altname[50];
9202 sprintf (name, "__int%d", int_n_data[i].bitsize);
9203 sprintf (altname, "__int%d__", int_n_data[i].bitsize);
9205 if (strcmp (name, PTRDIFF_TYPE) == 0
9206 || strcmp (altname, PTRDIFF_TYPE) == 0)
9207 ptrdiff_type_node = int_n_trees[i].signed_type;
9209 if (ptrdiff_type_node == NULL_TREE)
9210 gcc_unreachable ();
9213 /* Fill in the rest of the sized types. Reuse existing type nodes
9214 when possible. */
9215 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9216 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9217 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9218 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9219 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9221 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9222 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9223 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9224 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9225 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9227 /* Don't call build_qualified type for atomics. That routine does
9228 special processing for atomics, and until they are initialized
9229 it's better not to make that call.
9231 Check to see if there is a target override for atomic types. */
9233 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9234 targetm.atomic_align_for_mode (QImode));
9235 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9236 targetm.atomic_align_for_mode (HImode));
9237 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9238 targetm.atomic_align_for_mode (SImode));
9239 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9240 targetm.atomic_align_for_mode (DImode));
9241 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9242 targetm.atomic_align_for_mode (TImode));
9244 access_public_node = get_identifier ("public");
9245 access_protected_node = get_identifier ("protected");
9246 access_private_node = get_identifier ("private");
9248 /* Define these next since types below may used them. */
9249 integer_zero_node = build_int_cst (integer_type_node, 0);
9250 integer_one_node = build_int_cst (integer_type_node, 1);
9251 integer_three_node = build_int_cst (integer_type_node, 3);
9252 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9254 size_zero_node = size_int (0);
9255 size_one_node = size_int (1);
9256 bitsize_zero_node = bitsize_int (0);
9257 bitsize_one_node = bitsize_int (1);
9258 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9260 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9261 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9263 void_type_node = make_node (VOID_TYPE);
9264 layout_type (void_type_node);
9266 /* We are not going to have real types in C with less than byte alignment,
9267 so we might as well not have any types that claim to have it. */
9268 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
9269 TYPE_USER_ALIGN (void_type_node) = 0;
9271 void_node = make_node (VOID_CST);
9272 TREE_TYPE (void_node) = void_type_node;
9274 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9275 layout_type (TREE_TYPE (null_pointer_node));
9277 ptr_type_node = build_pointer_type (void_type_node);
9278 const_ptr_type_node
9279 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9280 for (unsigned i = 0;
9281 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
9282 ++i)
9283 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
9285 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9287 float_type_node = make_node (REAL_TYPE);
9288 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9289 layout_type (float_type_node);
9291 double_type_node = make_node (REAL_TYPE);
9292 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9293 layout_type (double_type_node);
9295 long_double_type_node = make_node (REAL_TYPE);
9296 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9297 layout_type (long_double_type_node);
9299 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9301 int n = floatn_nx_types[i].n;
9302 bool extended = floatn_nx_types[i].extended;
9303 scalar_float_mode mode;
9304 if (!targetm.floatn_mode (n, extended).exists (&mode))
9305 continue;
9306 int precision = GET_MODE_PRECISION (mode);
9307 /* Work around the rs6000 KFmode having precision 113 not
9308 128. */
9309 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
9310 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
9311 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
9312 if (!extended)
9313 gcc_assert (min_precision == n);
9314 if (precision < min_precision)
9315 precision = min_precision;
9316 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
9317 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
9318 layout_type (FLOATN_NX_TYPE_NODE (i));
9319 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
9322 float_ptr_type_node = build_pointer_type (float_type_node);
9323 double_ptr_type_node = build_pointer_type (double_type_node);
9324 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9325 integer_ptr_type_node = build_pointer_type (integer_type_node);
9327 /* Fixed size integer types. */
9328 uint16_type_node = make_or_reuse_type (16, 1);
9329 uint32_type_node = make_or_reuse_type (32, 1);
9330 uint64_type_node = make_or_reuse_type (64, 1);
9331 if (targetm.scalar_mode_supported_p (TImode))
9332 uint128_type_node = make_or_reuse_type (128, 1);
9334 /* Decimal float types. */
9335 if (targetm.decimal_float_supported_p ())
9337 dfloat32_type_node = make_node (REAL_TYPE);
9338 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9339 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9340 layout_type (dfloat32_type_node);
9342 dfloat64_type_node = make_node (REAL_TYPE);
9343 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9344 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9345 layout_type (dfloat64_type_node);
9347 dfloat128_type_node = make_node (REAL_TYPE);
9348 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9349 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9350 layout_type (dfloat128_type_node);
9353 complex_integer_type_node = build_complex_type (integer_type_node, true);
9354 complex_float_type_node = build_complex_type (float_type_node, true);
9355 complex_double_type_node = build_complex_type (double_type_node, true);
9356 complex_long_double_type_node = build_complex_type (long_double_type_node,
9357 true);
9359 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9361 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
9362 COMPLEX_FLOATN_NX_TYPE_NODE (i)
9363 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
9366 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9367 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9368 sat_ ## KIND ## _type_node = \
9369 make_sat_signed_ ## KIND ## _type (SIZE); \
9370 sat_unsigned_ ## KIND ## _type_node = \
9371 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9372 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9373 unsigned_ ## KIND ## _type_node = \
9374 make_unsigned_ ## KIND ## _type (SIZE);
9376 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9377 sat_ ## WIDTH ## KIND ## _type_node = \
9378 make_sat_signed_ ## KIND ## _type (SIZE); \
9379 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9380 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9381 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9382 unsigned_ ## WIDTH ## KIND ## _type_node = \
9383 make_unsigned_ ## KIND ## _type (SIZE);
9385 /* Make fixed-point type nodes based on four different widths. */
9386 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9387 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9388 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9389 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9390 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9392 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9393 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9394 NAME ## _type_node = \
9395 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9396 u ## NAME ## _type_node = \
9397 make_or_reuse_unsigned_ ## KIND ## _type \
9398 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9399 sat_ ## NAME ## _type_node = \
9400 make_or_reuse_sat_signed_ ## KIND ## _type \
9401 (GET_MODE_BITSIZE (MODE ## mode)); \
9402 sat_u ## NAME ## _type_node = \
9403 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9404 (GET_MODE_BITSIZE (U ## MODE ## mode));
9406 /* Fixed-point type and mode nodes. */
9407 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9408 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9409 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9410 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9411 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9412 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9413 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9414 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9415 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9416 MAKE_FIXED_MODE_NODE (accum, da, DA)
9417 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9420 tree t = targetm.build_builtin_va_list ();
9422 /* Many back-ends define record types without setting TYPE_NAME.
9423 If we copied the record type here, we'd keep the original
9424 record type without a name. This breaks name mangling. So,
9425 don't copy record types and let c_common_nodes_and_builtins()
9426 declare the type to be __builtin_va_list. */
9427 if (TREE_CODE (t) != RECORD_TYPE)
9428 t = build_variant_type_copy (t);
9430 va_list_type_node = t;
9433 /* SCEV analyzer global shared trees. */
9434 chrec_dont_know = make_node (SCEV_NOT_KNOWN);
9435 TREE_TYPE (chrec_dont_know) = void_type_node;
9436 chrec_known = make_node (SCEV_KNOWN);
9437 TREE_TYPE (chrec_known) = void_type_node;
9440 /* Modify DECL for given flags.
9441 TM_PURE attribute is set only on types, so the function will modify
9442 DECL's type when ECF_TM_PURE is used. */
9444 void
9445 set_call_expr_flags (tree decl, int flags)
9447 if (flags & ECF_NOTHROW)
9448 TREE_NOTHROW (decl) = 1;
9449 if (flags & ECF_CONST)
9450 TREE_READONLY (decl) = 1;
9451 if (flags & ECF_PURE)
9452 DECL_PURE_P (decl) = 1;
9453 if (flags & ECF_LOOPING_CONST_OR_PURE)
9454 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9455 if (flags & ECF_NOVOPS)
9456 DECL_IS_NOVOPS (decl) = 1;
9457 if (flags & ECF_NORETURN)
9458 TREE_THIS_VOLATILE (decl) = 1;
9459 if (flags & ECF_MALLOC)
9460 DECL_IS_MALLOC (decl) = 1;
9461 if (flags & ECF_RETURNS_TWICE)
9462 DECL_IS_RETURNS_TWICE (decl) = 1;
9463 if (flags & ECF_LEAF)
9464 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9465 NULL, DECL_ATTRIBUTES (decl));
9466 if (flags & ECF_COLD)
9467 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
9468 NULL, DECL_ATTRIBUTES (decl));
9469 if (flags & ECF_RET1)
9470 DECL_ATTRIBUTES (decl)
9471 = tree_cons (get_identifier ("fn spec"),
9472 build_tree_list (NULL_TREE, build_string (2, "1 ")),
9473 DECL_ATTRIBUTES (decl));
9474 if ((flags & ECF_TM_PURE) && flag_tm)
9475 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9476 /* Looping const or pure is implied by noreturn.
9477 There is currently no way to declare looping const or looping pure alone. */
9478 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9479 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9483 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9485 static void
9486 local_define_builtin (const char *name, tree type, enum built_in_function code,
9487 const char *library_name, int ecf_flags)
9489 tree decl;
9491 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9492 library_name, NULL_TREE);
9493 set_call_expr_flags (decl, ecf_flags);
9495 set_builtin_decl (code, decl, true);
9498 /* Call this function after instantiating all builtins that the language
9499 front end cares about. This will build the rest of the builtins
9500 and internal functions that are relied upon by the tree optimizers and
9501 the middle-end. */
9503 void
9504 build_common_builtin_nodes (void)
9506 tree tmp, ftype;
9507 int ecf_flags;
9509 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
9510 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
9512 ftype = build_function_type (void_type_node, void_list_node);
9513 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9514 local_define_builtin ("__builtin_unreachable", ftype,
9515 BUILT_IN_UNREACHABLE,
9516 "__builtin_unreachable",
9517 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9518 | ECF_CONST | ECF_COLD);
9519 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
9520 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
9521 "abort",
9522 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
9525 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9526 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9528 ftype = build_function_type_list (ptr_type_node,
9529 ptr_type_node, const_ptr_type_node,
9530 size_type_node, NULL_TREE);
9532 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9533 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9534 "memcpy", ECF_NOTHROW | ECF_LEAF);
9535 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9536 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9537 "memmove", ECF_NOTHROW | ECF_LEAF);
9540 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9542 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9543 const_ptr_type_node, size_type_node,
9544 NULL_TREE);
9545 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9546 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9549 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9551 ftype = build_function_type_list (ptr_type_node,
9552 ptr_type_node, integer_type_node,
9553 size_type_node, NULL_TREE);
9554 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9555 "memset", ECF_NOTHROW | ECF_LEAF);
9558 /* If we're checking the stack, `alloca' can throw. */
9559 const int alloca_flags
9560 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
9562 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9564 ftype = build_function_type_list (ptr_type_node,
9565 size_type_node, NULL_TREE);
9566 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9567 "alloca", alloca_flags);
9570 ftype = build_function_type_list (ptr_type_node, size_type_node,
9571 size_type_node, NULL_TREE);
9572 local_define_builtin ("__builtin_alloca_with_align", ftype,
9573 BUILT_IN_ALLOCA_WITH_ALIGN,
9574 "__builtin_alloca_with_align",
9575 alloca_flags);
9577 ftype = build_function_type_list (ptr_type_node, size_type_node,
9578 size_type_node, size_type_node, NULL_TREE);
9579 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
9580 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
9581 "__builtin_alloca_with_align_and_max",
9582 alloca_flags);
9584 ftype = build_function_type_list (void_type_node,
9585 ptr_type_node, ptr_type_node,
9586 ptr_type_node, NULL_TREE);
9587 local_define_builtin ("__builtin_init_trampoline", ftype,
9588 BUILT_IN_INIT_TRAMPOLINE,
9589 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9590 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9591 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9592 "__builtin_init_heap_trampoline",
9593 ECF_NOTHROW | ECF_LEAF);
9594 local_define_builtin ("__builtin_init_descriptor", ftype,
9595 BUILT_IN_INIT_DESCRIPTOR,
9596 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
9598 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9599 local_define_builtin ("__builtin_adjust_trampoline", ftype,
9600 BUILT_IN_ADJUST_TRAMPOLINE,
9601 "__builtin_adjust_trampoline",
9602 ECF_CONST | ECF_NOTHROW);
9603 local_define_builtin ("__builtin_adjust_descriptor", ftype,
9604 BUILT_IN_ADJUST_DESCRIPTOR,
9605 "__builtin_adjust_descriptor",
9606 ECF_CONST | ECF_NOTHROW);
9608 ftype = build_function_type_list (void_type_node,
9609 ptr_type_node, ptr_type_node, NULL_TREE);
9610 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_CACHE))
9611 local_define_builtin ("__builtin___clear_cache", ftype,
9612 BUILT_IN_CLEAR_CACHE,
9613 "__clear_cache",
9614 ECF_NOTHROW);
9616 local_define_builtin ("__builtin_nonlocal_goto", ftype,
9617 BUILT_IN_NONLOCAL_GOTO,
9618 "__builtin_nonlocal_goto",
9619 ECF_NORETURN | ECF_NOTHROW);
9621 ftype = build_function_type_list (void_type_node,
9622 ptr_type_node, ptr_type_node, NULL_TREE);
9623 local_define_builtin ("__builtin_setjmp_setup", ftype,
9624 BUILT_IN_SETJMP_SETUP,
9625 "__builtin_setjmp_setup", ECF_NOTHROW);
9627 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9628 local_define_builtin ("__builtin_setjmp_receiver", ftype,
9629 BUILT_IN_SETJMP_RECEIVER,
9630 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
9632 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
9633 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
9634 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
9636 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9637 local_define_builtin ("__builtin_stack_restore", ftype,
9638 BUILT_IN_STACK_RESTORE,
9639 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
9641 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9642 const_ptr_type_node, size_type_node,
9643 NULL_TREE);
9644 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
9645 "__builtin_memcmp_eq",
9646 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9648 local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
9649 "__builtin_strncmp_eq",
9650 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9652 local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
9653 "__builtin_strcmp_eq",
9654 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9656 /* If there's a possibility that we might use the ARM EABI, build the
9657 alternate __cxa_end_cleanup node used to resume from C++. */
9658 if (targetm.arm_eabi_unwinder)
9660 ftype = build_function_type_list (void_type_node, NULL_TREE);
9661 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
9662 BUILT_IN_CXA_END_CLEANUP,
9663 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
9666 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9667 local_define_builtin ("__builtin_unwind_resume", ftype,
9668 BUILT_IN_UNWIND_RESUME,
9669 ((targetm_common.except_unwind_info (&global_options)
9670 == UI_SJLJ)
9671 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
9672 ECF_NORETURN);
9674 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
9676 ftype = build_function_type_list (ptr_type_node, integer_type_node,
9677 NULL_TREE);
9678 local_define_builtin ("__builtin_return_address", ftype,
9679 BUILT_IN_RETURN_ADDRESS,
9680 "__builtin_return_address",
9681 ECF_NOTHROW);
9684 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
9685 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9687 ftype = build_function_type_list (void_type_node, ptr_type_node,
9688 ptr_type_node, NULL_TREE);
9689 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
9690 local_define_builtin ("__cyg_profile_func_enter", ftype,
9691 BUILT_IN_PROFILE_FUNC_ENTER,
9692 "__cyg_profile_func_enter", 0);
9693 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9694 local_define_builtin ("__cyg_profile_func_exit", ftype,
9695 BUILT_IN_PROFILE_FUNC_EXIT,
9696 "__cyg_profile_func_exit", 0);
9699 /* The exception object and filter values from the runtime. The argument
9700 must be zero before exception lowering, i.e. from the front end. After
9701 exception lowering, it will be the region number for the exception
9702 landing pad. These functions are PURE instead of CONST to prevent
9703 them from being hoisted past the exception edge that will initialize
9704 its value in the landing pad. */
9705 ftype = build_function_type_list (ptr_type_node,
9706 integer_type_node, NULL_TREE);
9707 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
9708 /* Only use TM_PURE if we have TM language support. */
9709 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
9710 ecf_flags |= ECF_TM_PURE;
9711 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
9712 "__builtin_eh_pointer", ecf_flags);
9714 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
9715 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
9716 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
9717 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9719 ftype = build_function_type_list (void_type_node,
9720 integer_type_node, integer_type_node,
9721 NULL_TREE);
9722 local_define_builtin ("__builtin_eh_copy_values", ftype,
9723 BUILT_IN_EH_COPY_VALUES,
9724 "__builtin_eh_copy_values", ECF_NOTHROW);
9726 /* Complex multiplication and division. These are handled as builtins
9727 rather than optabs because emit_library_call_value doesn't support
9728 complex. Further, we can do slightly better with folding these
9729 beasties if the real and complex parts of the arguments are separate. */
9731 int mode;
9733 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
9735 char mode_name_buf[4], *q;
9736 const char *p;
9737 enum built_in_function mcode, dcode;
9738 tree type, inner_type;
9739 const char *prefix = "__";
9741 if (targetm.libfunc_gnu_prefix)
9742 prefix = "__gnu_";
9744 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
9745 if (type == NULL)
9746 continue;
9747 inner_type = TREE_TYPE (type);
9749 ftype = build_function_type_list (type, inner_type, inner_type,
9750 inner_type, inner_type, NULL_TREE);
9752 mcode = ((enum built_in_function)
9753 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
9754 dcode = ((enum built_in_function)
9755 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
9757 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
9758 *q = TOLOWER (*p);
9759 *q = '\0';
9761 /* For -ftrapping-math these should throw from a former
9762 -fnon-call-exception stmt. */
9763 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
9764 NULL);
9765 local_define_builtin (built_in_names[mcode], ftype, mcode,
9766 built_in_names[mcode],
9767 ECF_CONST | ECF_LEAF);
9769 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
9770 NULL);
9771 local_define_builtin (built_in_names[dcode], ftype, dcode,
9772 built_in_names[dcode],
9773 ECF_CONST | ECF_LEAF);
9777 init_internal_fns ();
9780 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
9781 better way.
9783 If we requested a pointer to a vector, build up the pointers that
9784 we stripped off while looking for the inner type. Similarly for
9785 return values from functions.
9787 The argument TYPE is the top of the chain, and BOTTOM is the
9788 new type which we will point to. */
9790 tree
9791 reconstruct_complex_type (tree type, tree bottom)
9793 tree inner, outer;
9795 if (TREE_CODE (type) == POINTER_TYPE)
9797 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9798 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
9799 TYPE_REF_CAN_ALIAS_ALL (type));
9801 else if (TREE_CODE (type) == REFERENCE_TYPE)
9803 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9804 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
9805 TYPE_REF_CAN_ALIAS_ALL (type));
9807 else if (TREE_CODE (type) == ARRAY_TYPE)
9809 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9810 outer = build_array_type (inner, TYPE_DOMAIN (type));
9812 else if (TREE_CODE (type) == FUNCTION_TYPE)
9814 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9815 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
9817 else if (TREE_CODE (type) == METHOD_TYPE)
9819 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9820 /* The build_method_type_directly() routine prepends 'this' to argument list,
9821 so we must compensate by getting rid of it. */
9822 outer
9823 = build_method_type_directly
9824 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
9825 inner,
9826 TREE_CHAIN (TYPE_ARG_TYPES (type)));
9828 else if (TREE_CODE (type) == OFFSET_TYPE)
9830 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9831 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
9833 else
9834 return bottom;
9836 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
9837 TYPE_QUALS (type));
9840 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
9841 the inner type. */
9842 tree
9843 build_vector_type_for_mode (tree innertype, machine_mode mode)
9845 poly_int64 nunits;
9846 unsigned int bitsize;
9848 switch (GET_MODE_CLASS (mode))
9850 case MODE_VECTOR_BOOL:
9851 case MODE_VECTOR_INT:
9852 case MODE_VECTOR_FLOAT:
9853 case MODE_VECTOR_FRACT:
9854 case MODE_VECTOR_UFRACT:
9855 case MODE_VECTOR_ACCUM:
9856 case MODE_VECTOR_UACCUM:
9857 nunits = GET_MODE_NUNITS (mode);
9858 break;
9860 case MODE_INT:
9861 /* Check that there are no leftover bits. */
9862 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
9863 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
9864 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
9865 break;
9867 default:
9868 gcc_unreachable ();
9871 return make_vector_type (innertype, nunits, mode);
9874 /* Similarly, but takes the inner type and number of units, which must be
9875 a power of two. */
9877 tree
9878 build_vector_type (tree innertype, poly_int64 nunits)
9880 return make_vector_type (innertype, nunits, VOIDmode);
9883 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE. */
9885 tree
9886 build_truth_vector_type_for_mode (poly_uint64 nunits, machine_mode mask_mode)
9888 gcc_assert (mask_mode != BLKmode);
9890 unsigned HOST_WIDE_INT esize;
9891 if (VECTOR_MODE_P (mask_mode))
9893 poly_uint64 vsize = GET_MODE_BITSIZE (mask_mode);
9894 esize = vector_element_size (vsize, nunits);
9896 else
9897 esize = 1;
9899 tree bool_type = build_nonstandard_boolean_type (esize);
9901 return make_vector_type (bool_type, nunits, mask_mode);
9904 /* Build a vector type that holds one boolean result for each element of
9905 vector type VECTYPE. The public interface for this operation is
9906 truth_type_for. */
9908 static tree
9909 build_truth_vector_type_for (tree vectype)
9911 machine_mode vector_mode = TYPE_MODE (vectype);
9912 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
9914 machine_mode mask_mode;
9915 if (VECTOR_MODE_P (vector_mode)
9916 && targetm.vectorize.get_mask_mode (vector_mode).exists (&mask_mode))
9917 return build_truth_vector_type_for_mode (nunits, mask_mode);
9919 poly_uint64 vsize = tree_to_poly_uint64 (TYPE_SIZE (vectype));
9920 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
9921 tree bool_type = build_nonstandard_boolean_type (esize);
9923 return make_vector_type (bool_type, nunits, VOIDmode);
9926 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
9927 set. */
9929 tree
9930 build_opaque_vector_type (tree innertype, poly_int64 nunits)
9932 tree t = make_vector_type (innertype, nunits, VOIDmode);
9933 tree cand;
9934 /* We always build the non-opaque variant before the opaque one,
9935 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
9936 cand = TYPE_NEXT_VARIANT (t);
9937 if (cand
9938 && TYPE_VECTOR_OPAQUE (cand)
9939 && check_qualified_type (cand, t, TYPE_QUALS (t)))
9940 return cand;
9941 /* Othewise build a variant type and make sure to queue it after
9942 the non-opaque type. */
9943 cand = build_distinct_type_copy (t);
9944 TYPE_VECTOR_OPAQUE (cand) = true;
9945 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
9946 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
9947 TYPE_NEXT_VARIANT (t) = cand;
9948 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
9949 return cand;
9952 /* Return the value of element I of VECTOR_CST T as a wide_int. */
9954 static poly_wide_int
9955 vector_cst_int_elt (const_tree t, unsigned int i)
9957 /* First handle elements that are directly encoded. */
9958 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
9959 if (i < encoded_nelts)
9960 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, i));
9962 /* Identify the pattern that contains element I and work out the index of
9963 the last encoded element for that pattern. */
9964 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
9965 unsigned int pattern = i % npatterns;
9966 unsigned int count = i / npatterns;
9967 unsigned int final_i = encoded_nelts - npatterns + pattern;
9969 /* If there are no steps, the final encoded value is the right one. */
9970 if (!VECTOR_CST_STEPPED_P (t))
9971 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
9973 /* Otherwise work out the value from the last two encoded elements. */
9974 tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
9975 tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
9976 poly_wide_int diff = wi::to_poly_wide (v2) - wi::to_poly_wide (v1);
9977 return wi::to_poly_wide (v2) + (count - 2) * diff;
9980 /* Return the value of element I of VECTOR_CST T. */
9982 tree
9983 vector_cst_elt (const_tree t, unsigned int i)
9985 /* First handle elements that are directly encoded. */
9986 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
9987 if (i < encoded_nelts)
9988 return VECTOR_CST_ENCODED_ELT (t, i);
9990 /* If there are no steps, the final encoded value is the right one. */
9991 if (!VECTOR_CST_STEPPED_P (t))
9993 /* Identify the pattern that contains element I and work out the index of
9994 the last encoded element for that pattern. */
9995 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
9996 unsigned int pattern = i % npatterns;
9997 unsigned int final_i = encoded_nelts - npatterns + pattern;
9998 return VECTOR_CST_ENCODED_ELT (t, final_i);
10001 /* Otherwise work out the value from the last two encoded elements. */
10002 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
10003 vector_cst_int_elt (t, i));
10006 /* Given an initializer INIT, return TRUE if INIT is zero or some
10007 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
10008 null, set *NONZERO if and only if INIT is known not to be all
10009 zeros. The combination of return value of false and *NONZERO
10010 false implies that INIT may but need not be all zeros. Other
10011 combinations indicate definitive answers. */
10013 bool
10014 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
10016 bool dummy;
10017 if (!nonzero)
10018 nonzero = &dummy;
10020 /* Conservatively clear NONZERO and set it only if INIT is definitely
10021 not all zero. */
10022 *nonzero = false;
10024 STRIP_NOPS (init);
10026 unsigned HOST_WIDE_INT off = 0;
10028 switch (TREE_CODE (init))
10030 case INTEGER_CST:
10031 if (integer_zerop (init))
10032 return true;
10034 *nonzero = true;
10035 return false;
10037 case REAL_CST:
10038 /* ??? Note that this is not correct for C4X float formats. There,
10039 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10040 negative exponent. */
10041 if (real_zerop (init)
10042 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
10043 return true;
10045 *nonzero = true;
10046 return false;
10048 case FIXED_CST:
10049 if (fixed_zerop (init))
10050 return true;
10052 *nonzero = true;
10053 return false;
10055 case COMPLEX_CST:
10056 if (integer_zerop (init)
10057 || (real_zerop (init)
10058 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10059 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
10060 return true;
10062 *nonzero = true;
10063 return false;
10065 case VECTOR_CST:
10066 if (VECTOR_CST_NPATTERNS (init) == 1
10067 && VECTOR_CST_DUPLICATE_P (init)
10068 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
10069 return true;
10071 *nonzero = true;
10072 return false;
10074 case CONSTRUCTOR:
10076 if (TREE_CLOBBER_P (init))
10077 return false;
10079 unsigned HOST_WIDE_INT idx;
10080 tree elt;
10082 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10083 if (!initializer_zerop (elt, nonzero))
10084 return false;
10086 return true;
10089 case MEM_REF:
10091 tree arg = TREE_OPERAND (init, 0);
10092 if (TREE_CODE (arg) != ADDR_EXPR)
10093 return false;
10094 tree offset = TREE_OPERAND (init, 1);
10095 if (TREE_CODE (offset) != INTEGER_CST
10096 || !tree_fits_uhwi_p (offset))
10097 return false;
10098 off = tree_to_uhwi (offset);
10099 if (INT_MAX < off)
10100 return false;
10101 arg = TREE_OPERAND (arg, 0);
10102 if (TREE_CODE (arg) != STRING_CST)
10103 return false;
10104 init = arg;
10106 /* Fall through. */
10108 case STRING_CST:
10110 gcc_assert (off <= INT_MAX);
10112 int i = off;
10113 int n = TREE_STRING_LENGTH (init);
10114 if (n <= i)
10115 return false;
10117 /* We need to loop through all elements to handle cases like
10118 "\0" and "\0foobar". */
10119 for (i = 0; i < n; ++i)
10120 if (TREE_STRING_POINTER (init)[i] != '\0')
10122 *nonzero = true;
10123 return false;
10126 return true;
10129 default:
10130 return false;
10134 /* Return true if EXPR is an initializer expression in which every element
10135 is a constant that is numerically equal to 0 or 1. The elements do not
10136 need to be equal to each other. */
10138 bool
10139 initializer_each_zero_or_onep (const_tree expr)
10141 STRIP_ANY_LOCATION_WRAPPER (expr);
10143 switch (TREE_CODE (expr))
10145 case INTEGER_CST:
10146 return integer_zerop (expr) || integer_onep (expr);
10148 case REAL_CST:
10149 return real_zerop (expr) || real_onep (expr);
10151 case VECTOR_CST:
10153 unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (expr);
10154 if (VECTOR_CST_STEPPED_P (expr)
10155 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)).is_constant (&nelts))
10156 return false;
10158 for (unsigned int i = 0; i < nelts; ++i)
10160 tree elt = vector_cst_elt (expr, i);
10161 if (!initializer_each_zero_or_onep (elt))
10162 return false;
10165 return true;
10168 default:
10169 return false;
10173 /* Check if vector VEC consists of all the equal elements and
10174 that the number of elements corresponds to the type of VEC.
10175 The function returns first element of the vector
10176 or NULL_TREE if the vector is not uniform. */
10177 tree
10178 uniform_vector_p (const_tree vec)
10180 tree first, t;
10181 unsigned HOST_WIDE_INT i, nelts;
10183 if (vec == NULL_TREE)
10184 return NULL_TREE;
10186 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10188 if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
10189 return TREE_OPERAND (vec, 0);
10191 else if (TREE_CODE (vec) == VECTOR_CST)
10193 if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
10194 return VECTOR_CST_ENCODED_ELT (vec, 0);
10195 return NULL_TREE;
10198 else if (TREE_CODE (vec) == CONSTRUCTOR
10199 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
10201 first = error_mark_node;
10203 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10205 if (i == 0)
10207 first = t;
10208 continue;
10210 if (!operand_equal_p (first, t, 0))
10211 return NULL_TREE;
10213 if (i != nelts)
10214 return NULL_TREE;
10216 return first;
10219 return NULL_TREE;
10222 /* If the argument is INTEGER_CST, return it. If the argument is vector
10223 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
10224 return NULL_TREE.
10225 Look through location wrappers. */
10227 tree
10228 uniform_integer_cst_p (tree t)
10230 STRIP_ANY_LOCATION_WRAPPER (t);
10232 if (TREE_CODE (t) == INTEGER_CST)
10233 return t;
10235 if (VECTOR_TYPE_P (TREE_TYPE (t)))
10237 t = uniform_vector_p (t);
10238 if (t && TREE_CODE (t) == INTEGER_CST)
10239 return t;
10242 return NULL_TREE;
10245 /* If VECTOR_CST T has a single nonzero element, return the index of that
10246 element, otherwise return -1. */
10249 single_nonzero_element (const_tree t)
10251 unsigned HOST_WIDE_INT nelts;
10252 unsigned int repeat_nelts;
10253 if (VECTOR_CST_NELTS (t).is_constant (&nelts))
10254 repeat_nelts = nelts;
10255 else if (VECTOR_CST_NELTS_PER_PATTERN (t) == 2)
10257 nelts = vector_cst_encoded_nelts (t);
10258 repeat_nelts = VECTOR_CST_NPATTERNS (t);
10260 else
10261 return -1;
10263 int res = -1;
10264 for (unsigned int i = 0; i < nelts; ++i)
10266 tree elt = vector_cst_elt (t, i);
10267 if (!integer_zerop (elt) && !real_zerop (elt))
10269 if (res >= 0 || i >= repeat_nelts)
10270 return -1;
10271 res = i;
10274 return res;
10277 /* Build an empty statement at location LOC. */
10279 tree
10280 build_empty_stmt (location_t loc)
10282 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10283 SET_EXPR_LOCATION (t, loc);
10284 return t;
10288 /* Build an OpenMP clause with code CODE. LOC is the location of the
10289 clause. */
10291 tree
10292 build_omp_clause (location_t loc, enum omp_clause_code code)
10294 tree t;
10295 int size, length;
10297 length = omp_clause_num_ops[code];
10298 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10300 record_node_allocation_statistics (OMP_CLAUSE, size);
10302 t = (tree) ggc_internal_alloc (size);
10303 memset (t, 0, size);
10304 TREE_SET_CODE (t, OMP_CLAUSE);
10305 OMP_CLAUSE_SET_CODE (t, code);
10306 OMP_CLAUSE_LOCATION (t) = loc;
10308 return t;
10311 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10312 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10313 Except for the CODE and operand count field, other storage for the
10314 object is initialized to zeros. */
10316 tree
10317 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
10319 tree t;
10320 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10322 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10323 gcc_assert (len >= 1);
10325 record_node_allocation_statistics (code, length);
10327 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10329 TREE_SET_CODE (t, code);
10331 /* Can't use TREE_OPERAND to store the length because if checking is
10332 enabled, it will try to check the length before we store it. :-P */
10333 t->exp.operands[0] = build_int_cst (sizetype, len);
10335 return t;
10338 /* Helper function for build_call_* functions; build a CALL_EXPR with
10339 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10340 the argument slots. */
10342 static tree
10343 build_call_1 (tree return_type, tree fn, int nargs)
10345 tree t;
10347 t = build_vl_exp (CALL_EXPR, nargs + 3);
10348 TREE_TYPE (t) = return_type;
10349 CALL_EXPR_FN (t) = fn;
10350 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10352 return t;
10355 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10356 FN and a null static chain slot. NARGS is the number of call arguments
10357 which are specified as "..." arguments. */
10359 tree
10360 build_call_nary (tree return_type, tree fn, int nargs, ...)
10362 tree ret;
10363 va_list args;
10364 va_start (args, nargs);
10365 ret = build_call_valist (return_type, fn, nargs, args);
10366 va_end (args);
10367 return ret;
10370 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10371 FN and a null static chain slot. NARGS is the number of call arguments
10372 which are specified as a va_list ARGS. */
10374 tree
10375 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10377 tree t;
10378 int i;
10380 t = build_call_1 (return_type, fn, nargs);
10381 for (i = 0; i < nargs; i++)
10382 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10383 process_call_operands (t);
10384 return t;
10387 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10388 FN and a null static chain slot. NARGS is the number of call arguments
10389 which are specified as a tree array ARGS. */
10391 tree
10392 build_call_array_loc (location_t loc, tree return_type, tree fn,
10393 int nargs, const tree *args)
10395 tree t;
10396 int i;
10398 t = build_call_1 (return_type, fn, nargs);
10399 for (i = 0; i < nargs; i++)
10400 CALL_EXPR_ARG (t, i) = args[i];
10401 process_call_operands (t);
10402 SET_EXPR_LOCATION (t, loc);
10403 return t;
10406 /* Like build_call_array, but takes a vec. */
10408 tree
10409 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10411 tree ret, t;
10412 unsigned int ix;
10414 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10415 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10416 CALL_EXPR_ARG (ret, ix) = t;
10417 process_call_operands (ret);
10418 return ret;
10421 /* Conveniently construct a function call expression. FNDECL names the
10422 function to be called and N arguments are passed in the array
10423 ARGARRAY. */
10425 tree
10426 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10428 tree fntype = TREE_TYPE (fndecl);
10429 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10431 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10434 /* Conveniently construct a function call expression. FNDECL names the
10435 function to be called and the arguments are passed in the vector
10436 VEC. */
10438 tree
10439 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10441 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10442 vec_safe_address (vec));
10446 /* Conveniently construct a function call expression. FNDECL names the
10447 function to be called, N is the number of arguments, and the "..."
10448 parameters are the argument expressions. */
10450 tree
10451 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10453 va_list ap;
10454 tree *argarray = XALLOCAVEC (tree, n);
10455 int i;
10457 va_start (ap, n);
10458 for (i = 0; i < n; i++)
10459 argarray[i] = va_arg (ap, tree);
10460 va_end (ap);
10461 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10464 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10465 varargs macros aren't supported by all bootstrap compilers. */
10467 tree
10468 build_call_expr (tree fndecl, int n, ...)
10470 va_list ap;
10471 tree *argarray = XALLOCAVEC (tree, n);
10472 int i;
10474 va_start (ap, n);
10475 for (i = 0; i < n; i++)
10476 argarray[i] = va_arg (ap, tree);
10477 va_end (ap);
10478 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10481 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10482 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10483 It will get gimplified later into an ordinary internal function. */
10485 tree
10486 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
10487 tree type, int n, const tree *args)
10489 tree t = build_call_1 (type, NULL_TREE, n);
10490 for (int i = 0; i < n; ++i)
10491 CALL_EXPR_ARG (t, i) = args[i];
10492 SET_EXPR_LOCATION (t, loc);
10493 CALL_EXPR_IFN (t) = ifn;
10494 process_call_operands (t);
10495 return t;
10498 /* Build internal call expression. This is just like CALL_EXPR, except
10499 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10500 internal function. */
10502 tree
10503 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10504 tree type, int n, ...)
10506 va_list ap;
10507 tree *argarray = XALLOCAVEC (tree, n);
10508 int i;
10510 va_start (ap, n);
10511 for (i = 0; i < n; i++)
10512 argarray[i] = va_arg (ap, tree);
10513 va_end (ap);
10514 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10517 /* Return a function call to FN, if the target is guaranteed to support it,
10518 or null otherwise.
10520 N is the number of arguments, passed in the "...", and TYPE is the
10521 type of the return value. */
10523 tree
10524 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
10525 int n, ...)
10527 va_list ap;
10528 tree *argarray = XALLOCAVEC (tree, n);
10529 int i;
10531 va_start (ap, n);
10532 for (i = 0; i < n; i++)
10533 argarray[i] = va_arg (ap, tree);
10534 va_end (ap);
10535 if (internal_fn_p (fn))
10537 internal_fn ifn = as_internal_fn (fn);
10538 if (direct_internal_fn_p (ifn))
10540 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
10541 if (!direct_internal_fn_supported_p (ifn, types,
10542 OPTIMIZE_FOR_BOTH))
10543 return NULL_TREE;
10545 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10547 else
10549 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
10550 if (!fndecl)
10551 return NULL_TREE;
10552 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10556 /* Return a function call to the appropriate builtin alloca variant.
10558 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
10559 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
10560 bound for SIZE in case it is not a fixed value. */
10562 tree
10563 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
10565 if (max_size >= 0)
10567 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
10568 return
10569 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
10571 else if (align > 0)
10573 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10574 return build_call_expr (t, 2, size, size_int (align));
10576 else
10578 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
10579 return build_call_expr (t, 1, size);
10583 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
10584 if SIZE == -1) and return a tree node representing char* pointer to
10585 it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). When STR is nonnull
10586 the STRING_CST value is the LEN bytes at STR (the representation
10587 of the string, which may be wide). Otherwise it's all zeros. */
10589 tree
10590 build_string_literal (unsigned len, const char *str /* = NULL */,
10591 tree eltype /* = char_type_node */,
10592 unsigned HOST_WIDE_INT size /* = -1 */)
10594 tree t = build_string (len, str);
10595 /* Set the maximum valid index based on the string length or SIZE. */
10596 unsigned HOST_WIDE_INT maxidx
10597 = (size == HOST_WIDE_INT_M1U ? len : size) - 1;
10599 tree index = build_index_type (size_int (maxidx));
10600 eltype = build_type_variant (eltype, 1, 0);
10601 tree type = build_array_type (eltype, index);
10602 TREE_TYPE (t) = type;
10603 TREE_CONSTANT (t) = 1;
10604 TREE_READONLY (t) = 1;
10605 TREE_STATIC (t) = 1;
10607 type = build_pointer_type (eltype);
10608 t = build1 (ADDR_EXPR, type,
10609 build4 (ARRAY_REF, eltype,
10610 t, integer_zero_node, NULL_TREE, NULL_TREE));
10611 return t;
10616 /* Return true if T (assumed to be a DECL) must be assigned a memory
10617 location. */
10619 bool
10620 needs_to_live_in_memory (const_tree t)
10622 return (TREE_ADDRESSABLE (t)
10623 || is_global_var (t)
10624 || (TREE_CODE (t) == RESULT_DECL
10625 && !DECL_BY_REFERENCE (t)
10626 && aggregate_value_p (t, current_function_decl)));
10629 /* Return value of a constant X and sign-extend it. */
10631 HOST_WIDE_INT
10632 int_cst_value (const_tree x)
10634 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10635 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10637 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10638 gcc_assert (cst_and_fits_in_hwi (x));
10640 if (bits < HOST_BITS_PER_WIDE_INT)
10642 bool negative = ((val >> (bits - 1)) & 1) != 0;
10643 if (negative)
10644 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
10645 else
10646 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
10649 return val;
10652 /* If TYPE is an integral or pointer type, return an integer type with
10653 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10654 if TYPE is already an integer type of signedness UNSIGNEDP.
10655 If TYPE is a floating-point type, return an integer type with the same
10656 bitsize and with the signedness given by UNSIGNEDP; this is useful
10657 when doing bit-level operations on a floating-point value. */
10659 tree
10660 signed_or_unsigned_type_for (int unsignedp, tree type)
10662 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
10663 return type;
10665 if (TREE_CODE (type) == VECTOR_TYPE)
10667 tree inner = TREE_TYPE (type);
10668 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10669 if (!inner2)
10670 return NULL_TREE;
10671 if (inner == inner2)
10672 return type;
10673 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10676 if (TREE_CODE (type) == COMPLEX_TYPE)
10678 tree inner = TREE_TYPE (type);
10679 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10680 if (!inner2)
10681 return NULL_TREE;
10682 if (inner == inner2)
10683 return type;
10684 return build_complex_type (inner2);
10687 unsigned int bits;
10688 if (INTEGRAL_TYPE_P (type)
10689 || POINTER_TYPE_P (type)
10690 || TREE_CODE (type) == OFFSET_TYPE)
10691 bits = TYPE_PRECISION (type);
10692 else if (TREE_CODE (type) == REAL_TYPE)
10693 bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type));
10694 else
10695 return NULL_TREE;
10697 return build_nonstandard_integer_type (bits, unsignedp);
10700 /* If TYPE is an integral or pointer type, return an integer type with
10701 the same precision which is unsigned, or itself if TYPE is already an
10702 unsigned integer type. If TYPE is a floating-point type, return an
10703 unsigned integer type with the same bitsize as TYPE. */
10705 tree
10706 unsigned_type_for (tree type)
10708 return signed_or_unsigned_type_for (1, type);
10711 /* If TYPE is an integral or pointer type, return an integer type with
10712 the same precision which is signed, or itself if TYPE is already a
10713 signed integer type. If TYPE is a floating-point type, return a
10714 signed integer type with the same bitsize as TYPE. */
10716 tree
10717 signed_type_for (tree type)
10719 return signed_or_unsigned_type_for (0, type);
10722 /* If TYPE is a vector type, return a signed integer vector type with the
10723 same width and number of subparts. Otherwise return boolean_type_node. */
10725 tree
10726 truth_type_for (tree type)
10728 if (TREE_CODE (type) == VECTOR_TYPE)
10730 if (VECTOR_BOOLEAN_TYPE_P (type))
10731 return type;
10732 return build_truth_vector_type_for (type);
10734 else
10735 return boolean_type_node;
10738 /* Returns the largest value obtainable by casting something in INNER type to
10739 OUTER type. */
10741 tree
10742 upper_bound_in_type (tree outer, tree inner)
10744 unsigned int det = 0;
10745 unsigned oprec = TYPE_PRECISION (outer);
10746 unsigned iprec = TYPE_PRECISION (inner);
10747 unsigned prec;
10749 /* Compute a unique number for every combination. */
10750 det |= (oprec > iprec) ? 4 : 0;
10751 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10752 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10754 /* Determine the exponent to use. */
10755 switch (det)
10757 case 0:
10758 case 1:
10759 /* oprec <= iprec, outer: signed, inner: don't care. */
10760 prec = oprec - 1;
10761 break;
10762 case 2:
10763 case 3:
10764 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10765 prec = oprec;
10766 break;
10767 case 4:
10768 /* oprec > iprec, outer: signed, inner: signed. */
10769 prec = iprec - 1;
10770 break;
10771 case 5:
10772 /* oprec > iprec, outer: signed, inner: unsigned. */
10773 prec = iprec;
10774 break;
10775 case 6:
10776 /* oprec > iprec, outer: unsigned, inner: signed. */
10777 prec = oprec;
10778 break;
10779 case 7:
10780 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10781 prec = iprec;
10782 break;
10783 default:
10784 gcc_unreachable ();
10787 return wide_int_to_tree (outer,
10788 wi::mask (prec, false, TYPE_PRECISION (outer)));
10791 /* Returns the smallest value obtainable by casting something in INNER type to
10792 OUTER type. */
10794 tree
10795 lower_bound_in_type (tree outer, tree inner)
10797 unsigned oprec = TYPE_PRECISION (outer);
10798 unsigned iprec = TYPE_PRECISION (inner);
10800 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10801 and obtain 0. */
10802 if (TYPE_UNSIGNED (outer)
10803 /* If we are widening something of an unsigned type, OUTER type
10804 contains all values of INNER type. In particular, both INNER
10805 and OUTER types have zero in common. */
10806 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10807 return build_int_cst (outer, 0);
10808 else
10810 /* If we are widening a signed type to another signed type, we
10811 want to obtain -2^^(iprec-1). If we are keeping the
10812 precision or narrowing to a signed type, we want to obtain
10813 -2^(oprec-1). */
10814 unsigned prec = oprec > iprec ? iprec : oprec;
10815 return wide_int_to_tree (outer,
10816 wi::mask (prec - 1, true,
10817 TYPE_PRECISION (outer)));
10821 /* Return nonzero if two operands that are suitable for PHI nodes are
10822 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10823 SSA_NAME or invariant. Note that this is strictly an optimization.
10824 That is, callers of this function can directly call operand_equal_p
10825 and get the same result, only slower. */
10828 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10830 if (arg0 == arg1)
10831 return 1;
10832 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10833 return 0;
10834 return operand_equal_p (arg0, arg1, 0);
10837 /* Returns number of zeros at the end of binary representation of X. */
10839 tree
10840 num_ending_zeros (const_tree x)
10842 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
10846 #define WALK_SUBTREE(NODE) \
10847 do \
10849 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10850 if (result) \
10851 return result; \
10853 while (0)
10855 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10856 be walked whenever a type is seen in the tree. Rest of operands and return
10857 value are as for walk_tree. */
10859 static tree
10860 walk_type_fields (tree type, walk_tree_fn func, void *data,
10861 hash_set<tree> *pset, walk_tree_lh lh)
10863 tree result = NULL_TREE;
10865 switch (TREE_CODE (type))
10867 case POINTER_TYPE:
10868 case REFERENCE_TYPE:
10869 case VECTOR_TYPE:
10870 /* We have to worry about mutually recursive pointers. These can't
10871 be written in C. They can in Ada. It's pathological, but
10872 there's an ACATS test (c38102a) that checks it. Deal with this
10873 by checking if we're pointing to another pointer, that one
10874 points to another pointer, that one does too, and we have no htab.
10875 If so, get a hash table. We check three levels deep to avoid
10876 the cost of the hash table if we don't need one. */
10877 if (POINTER_TYPE_P (TREE_TYPE (type))
10878 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10879 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10880 && !pset)
10882 result = walk_tree_without_duplicates (&TREE_TYPE (type),
10883 func, data);
10884 if (result)
10885 return result;
10887 break;
10890 /* fall through */
10892 case COMPLEX_TYPE:
10893 WALK_SUBTREE (TREE_TYPE (type));
10894 break;
10896 case METHOD_TYPE:
10897 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
10899 /* Fall through. */
10901 case FUNCTION_TYPE:
10902 WALK_SUBTREE (TREE_TYPE (type));
10904 tree arg;
10906 /* We never want to walk into default arguments. */
10907 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
10908 WALK_SUBTREE (TREE_VALUE (arg));
10910 break;
10912 case ARRAY_TYPE:
10913 /* Don't follow this nodes's type if a pointer for fear that
10914 we'll have infinite recursion. If we have a PSET, then we
10915 need not fear. */
10916 if (pset
10917 || (!POINTER_TYPE_P (TREE_TYPE (type))
10918 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
10919 WALK_SUBTREE (TREE_TYPE (type));
10920 WALK_SUBTREE (TYPE_DOMAIN (type));
10921 break;
10923 case OFFSET_TYPE:
10924 WALK_SUBTREE (TREE_TYPE (type));
10925 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
10926 break;
10928 default:
10929 break;
10932 return NULL_TREE;
10935 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10936 called with the DATA and the address of each sub-tree. If FUNC returns a
10937 non-NULL value, the traversal is stopped, and the value returned by FUNC
10938 is returned. If PSET is non-NULL it is used to record the nodes visited,
10939 and to avoid visiting a node more than once. */
10941 tree
10942 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
10943 hash_set<tree> *pset, walk_tree_lh lh)
10945 enum tree_code code;
10946 int walk_subtrees;
10947 tree result;
10949 #define WALK_SUBTREE_TAIL(NODE) \
10950 do \
10952 tp = & (NODE); \
10953 goto tail_recurse; \
10955 while (0)
10957 tail_recurse:
10958 /* Skip empty subtrees. */
10959 if (!*tp)
10960 return NULL_TREE;
10962 /* Don't walk the same tree twice, if the user has requested
10963 that we avoid doing so. */
10964 if (pset && pset->add (*tp))
10965 return NULL_TREE;
10967 /* Call the function. */
10968 walk_subtrees = 1;
10969 result = (*func) (tp, &walk_subtrees, data);
10971 /* If we found something, return it. */
10972 if (result)
10973 return result;
10975 code = TREE_CODE (*tp);
10977 /* Even if we didn't, FUNC may have decided that there was nothing
10978 interesting below this point in the tree. */
10979 if (!walk_subtrees)
10981 /* But we still need to check our siblings. */
10982 if (code == TREE_LIST)
10983 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10984 else if (code == OMP_CLAUSE)
10985 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
10986 else
10987 return NULL_TREE;
10990 if (lh)
10992 result = (*lh) (tp, &walk_subtrees, func, data, pset);
10993 if (result || !walk_subtrees)
10994 return result;
10997 switch (code)
10999 case ERROR_MARK:
11000 case IDENTIFIER_NODE:
11001 case INTEGER_CST:
11002 case REAL_CST:
11003 case FIXED_CST:
11004 case STRING_CST:
11005 case BLOCK:
11006 case PLACEHOLDER_EXPR:
11007 case SSA_NAME:
11008 case FIELD_DECL:
11009 case RESULT_DECL:
11010 /* None of these have subtrees other than those already walked
11011 above. */
11012 break;
11014 case TREE_LIST:
11015 WALK_SUBTREE (TREE_VALUE (*tp));
11016 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11017 break;
11019 case TREE_VEC:
11021 int len = TREE_VEC_LENGTH (*tp);
11023 if (len == 0)
11024 break;
11026 /* Walk all elements but the first. */
11027 while (--len)
11028 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11030 /* Now walk the first one as a tail call. */
11031 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11034 case VECTOR_CST:
11036 unsigned len = vector_cst_encoded_nelts (*tp);
11037 if (len == 0)
11038 break;
11039 /* Walk all elements but the first. */
11040 while (--len)
11041 WALK_SUBTREE (VECTOR_CST_ENCODED_ELT (*tp, len));
11042 /* Now walk the first one as a tail call. */
11043 WALK_SUBTREE_TAIL (VECTOR_CST_ENCODED_ELT (*tp, 0));
11046 case COMPLEX_CST:
11047 WALK_SUBTREE (TREE_REALPART (*tp));
11048 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11050 case CONSTRUCTOR:
11052 unsigned HOST_WIDE_INT idx;
11053 constructor_elt *ce;
11055 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11056 idx++)
11057 WALK_SUBTREE (ce->value);
11059 break;
11061 case SAVE_EXPR:
11062 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11064 case BIND_EXPR:
11066 tree decl;
11067 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11069 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11070 into declarations that are just mentioned, rather than
11071 declared; they don't really belong to this part of the tree.
11072 And, we can see cycles: the initializer for a declaration
11073 can refer to the declaration itself. */
11074 WALK_SUBTREE (DECL_INITIAL (decl));
11075 WALK_SUBTREE (DECL_SIZE (decl));
11076 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11078 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11081 case STATEMENT_LIST:
11083 tree_stmt_iterator i;
11084 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11085 WALK_SUBTREE (*tsi_stmt_ptr (i));
11087 break;
11089 case OMP_CLAUSE:
11090 switch (OMP_CLAUSE_CODE (*tp))
11092 case OMP_CLAUSE_GANG:
11093 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11094 /* FALLTHRU */
11096 case OMP_CLAUSE_AFFINITY:
11097 case OMP_CLAUSE_ASYNC:
11098 case OMP_CLAUSE_WAIT:
11099 case OMP_CLAUSE_WORKER:
11100 case OMP_CLAUSE_VECTOR:
11101 case OMP_CLAUSE_NUM_GANGS:
11102 case OMP_CLAUSE_NUM_WORKERS:
11103 case OMP_CLAUSE_VECTOR_LENGTH:
11104 case OMP_CLAUSE_PRIVATE:
11105 case OMP_CLAUSE_SHARED:
11106 case OMP_CLAUSE_FIRSTPRIVATE:
11107 case OMP_CLAUSE_COPYIN:
11108 case OMP_CLAUSE_COPYPRIVATE:
11109 case OMP_CLAUSE_FINAL:
11110 case OMP_CLAUSE_IF:
11111 case OMP_CLAUSE_NUM_THREADS:
11112 case OMP_CLAUSE_SCHEDULE:
11113 case OMP_CLAUSE_UNIFORM:
11114 case OMP_CLAUSE_DEPEND:
11115 case OMP_CLAUSE_NONTEMPORAL:
11116 case OMP_CLAUSE_NUM_TEAMS:
11117 case OMP_CLAUSE_THREAD_LIMIT:
11118 case OMP_CLAUSE_DEVICE:
11119 case OMP_CLAUSE_DIST_SCHEDULE:
11120 case OMP_CLAUSE_SAFELEN:
11121 case OMP_CLAUSE_SIMDLEN:
11122 case OMP_CLAUSE_ORDERED:
11123 case OMP_CLAUSE_PRIORITY:
11124 case OMP_CLAUSE_GRAINSIZE:
11125 case OMP_CLAUSE_NUM_TASKS:
11126 case OMP_CLAUSE_HINT:
11127 case OMP_CLAUSE_TO_DECLARE:
11128 case OMP_CLAUSE_LINK:
11129 case OMP_CLAUSE_DETACH:
11130 case OMP_CLAUSE_USE_DEVICE_PTR:
11131 case OMP_CLAUSE_USE_DEVICE_ADDR:
11132 case OMP_CLAUSE_IS_DEVICE_PTR:
11133 case OMP_CLAUSE_INCLUSIVE:
11134 case OMP_CLAUSE_EXCLUSIVE:
11135 case OMP_CLAUSE__LOOPTEMP_:
11136 case OMP_CLAUSE__REDUCTEMP_:
11137 case OMP_CLAUSE__CONDTEMP_:
11138 case OMP_CLAUSE__SCANTEMP_:
11139 case OMP_CLAUSE__SIMDUID_:
11140 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11141 /* FALLTHRU */
11143 case OMP_CLAUSE_INDEPENDENT:
11144 case OMP_CLAUSE_NOWAIT:
11145 case OMP_CLAUSE_DEFAULT:
11146 case OMP_CLAUSE_UNTIED:
11147 case OMP_CLAUSE_MERGEABLE:
11148 case OMP_CLAUSE_PROC_BIND:
11149 case OMP_CLAUSE_DEVICE_TYPE:
11150 case OMP_CLAUSE_INBRANCH:
11151 case OMP_CLAUSE_NOTINBRANCH:
11152 case OMP_CLAUSE_FOR:
11153 case OMP_CLAUSE_PARALLEL:
11154 case OMP_CLAUSE_SECTIONS:
11155 case OMP_CLAUSE_TASKGROUP:
11156 case OMP_CLAUSE_NOGROUP:
11157 case OMP_CLAUSE_THREADS:
11158 case OMP_CLAUSE_SIMD:
11159 case OMP_CLAUSE_DEFAULTMAP:
11160 case OMP_CLAUSE_ORDER:
11161 case OMP_CLAUSE_BIND:
11162 case OMP_CLAUSE_AUTO:
11163 case OMP_CLAUSE_SEQ:
11164 case OMP_CLAUSE_TILE:
11165 case OMP_CLAUSE__SIMT_:
11166 case OMP_CLAUSE_IF_PRESENT:
11167 case OMP_CLAUSE_FINALIZE:
11168 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11170 case OMP_CLAUSE_LASTPRIVATE:
11171 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11172 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11173 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11175 case OMP_CLAUSE_COLLAPSE:
11177 int i;
11178 for (i = 0; i < 3; i++)
11179 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11180 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11183 case OMP_CLAUSE_LINEAR:
11184 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11185 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11186 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11187 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11189 case OMP_CLAUSE_ALIGNED:
11190 case OMP_CLAUSE_ALLOCATE:
11191 case OMP_CLAUSE_FROM:
11192 case OMP_CLAUSE_TO:
11193 case OMP_CLAUSE_MAP:
11194 case OMP_CLAUSE__CACHE_:
11195 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11196 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11197 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11199 case OMP_CLAUSE_REDUCTION:
11200 case OMP_CLAUSE_TASK_REDUCTION:
11201 case OMP_CLAUSE_IN_REDUCTION:
11203 int i;
11204 for (i = 0; i < 5; i++)
11205 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11206 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11209 default:
11210 gcc_unreachable ();
11212 break;
11214 case TARGET_EXPR:
11216 int i, len;
11218 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11219 But, we only want to walk once. */
11220 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11221 for (i = 0; i < len; ++i)
11222 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11223 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11226 case DECL_EXPR:
11227 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11228 defining. We only want to walk into these fields of a type in this
11229 case and not in the general case of a mere reference to the type.
11231 The criterion is as follows: if the field can be an expression, it
11232 must be walked only here. This should be in keeping with the fields
11233 that are directly gimplified in gimplify_type_sizes in order for the
11234 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11235 variable-sized types.
11237 Note that DECLs get walked as part of processing the BIND_EXPR. */
11238 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11240 /* Call the function for the decl so e.g. copy_tree_body_r can
11241 replace it with the remapped one. */
11242 result = (*func) (&DECL_EXPR_DECL (*tp), &walk_subtrees, data);
11243 if (result || !walk_subtrees)
11244 return result;
11246 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11247 if (TREE_CODE (*type_p) == ERROR_MARK)
11248 return NULL_TREE;
11250 /* Call the function for the type. See if it returns anything or
11251 doesn't want us to continue. If we are to continue, walk both
11252 the normal fields and those for the declaration case. */
11253 result = (*func) (type_p, &walk_subtrees, data);
11254 if (result || !walk_subtrees)
11255 return result;
11257 /* But do not walk a pointed-to type since it may itself need to
11258 be walked in the declaration case if it isn't anonymous. */
11259 if (!POINTER_TYPE_P (*type_p))
11261 result = walk_type_fields (*type_p, func, data, pset, lh);
11262 if (result)
11263 return result;
11266 /* If this is a record type, also walk the fields. */
11267 if (RECORD_OR_UNION_TYPE_P (*type_p))
11269 tree field;
11271 for (field = TYPE_FIELDS (*type_p); field;
11272 field = DECL_CHAIN (field))
11274 /* We'd like to look at the type of the field, but we can
11275 easily get infinite recursion. So assume it's pointed
11276 to elsewhere in the tree. Also, ignore things that
11277 aren't fields. */
11278 if (TREE_CODE (field) != FIELD_DECL)
11279 continue;
11281 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11282 WALK_SUBTREE (DECL_SIZE (field));
11283 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11284 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11285 WALK_SUBTREE (DECL_QUALIFIER (field));
11289 /* Same for scalar types. */
11290 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11291 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11292 || TREE_CODE (*type_p) == INTEGER_TYPE
11293 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11294 || TREE_CODE (*type_p) == REAL_TYPE)
11296 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11297 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11300 WALK_SUBTREE (TYPE_SIZE (*type_p));
11301 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11303 /* FALLTHRU */
11305 default:
11306 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11308 int i, len;
11310 /* Walk over all the sub-trees of this operand. */
11311 len = TREE_OPERAND_LENGTH (*tp);
11313 /* Go through the subtrees. We need to do this in forward order so
11314 that the scope of a FOR_EXPR is handled properly. */
11315 if (len)
11317 for (i = 0; i < len - 1; ++i)
11318 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11319 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11322 /* If this is a type, walk the needed fields in the type. */
11323 else if (TYPE_P (*tp))
11324 return walk_type_fields (*tp, func, data, pset, lh);
11325 break;
11328 /* We didn't find what we were looking for. */
11329 return NULL_TREE;
11331 #undef WALK_SUBTREE_TAIL
11333 #undef WALK_SUBTREE
11335 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11337 tree
11338 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11339 walk_tree_lh lh)
11341 tree result;
11343 hash_set<tree> pset;
11344 result = walk_tree_1 (tp, func, data, &pset, lh);
11345 return result;
11349 tree
11350 tree_block (tree t)
11352 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11354 if (IS_EXPR_CODE_CLASS (c))
11355 return LOCATION_BLOCK (t->exp.locus);
11356 gcc_unreachable ();
11357 return NULL;
11360 void
11361 tree_set_block (tree t, tree b)
11363 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11365 if (IS_EXPR_CODE_CLASS (c))
11367 t->exp.locus = set_block (t->exp.locus, b);
11369 else
11370 gcc_unreachable ();
11373 /* Create a nameless artificial label and put it in the current
11374 function context. The label has a location of LOC. Returns the
11375 newly created label. */
11377 tree
11378 create_artificial_label (location_t loc)
11380 tree lab = build_decl (loc,
11381 LABEL_DECL, NULL_TREE, void_type_node);
11383 DECL_ARTIFICIAL (lab) = 1;
11384 DECL_IGNORED_P (lab) = 1;
11385 DECL_CONTEXT (lab) = current_function_decl;
11386 return lab;
11389 /* Given a tree, try to return a useful variable name that we can use
11390 to prefix a temporary that is being assigned the value of the tree.
11391 I.E. given <temp> = &A, return A. */
11393 const char *
11394 get_name (tree t)
11396 tree stripped_decl;
11398 stripped_decl = t;
11399 STRIP_NOPS (stripped_decl);
11400 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11401 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11402 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11404 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11405 if (!name)
11406 return NULL;
11407 return IDENTIFIER_POINTER (name);
11409 else
11411 switch (TREE_CODE (stripped_decl))
11413 case ADDR_EXPR:
11414 return get_name (TREE_OPERAND (stripped_decl, 0));
11415 default:
11416 return NULL;
11421 /* Return true if TYPE has a variable argument list. */
11423 bool
11424 stdarg_p (const_tree fntype)
11426 function_args_iterator args_iter;
11427 tree n = NULL_TREE, t;
11429 if (!fntype)
11430 return false;
11432 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11434 n = t;
11437 return n != NULL_TREE && n != void_type_node;
11440 /* Return true if TYPE has a prototype. */
11442 bool
11443 prototype_p (const_tree fntype)
11445 tree t;
11447 gcc_assert (fntype != NULL_TREE);
11449 t = TYPE_ARG_TYPES (fntype);
11450 return (t != NULL_TREE);
11453 /* If BLOCK is inlined from an __attribute__((__artificial__))
11454 routine, return pointer to location from where it has been
11455 called. */
11456 location_t *
11457 block_nonartificial_location (tree block)
11459 location_t *ret = NULL;
11461 while (block && TREE_CODE (block) == BLOCK
11462 && BLOCK_ABSTRACT_ORIGIN (block))
11464 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11465 if (TREE_CODE (ao) == FUNCTION_DECL)
11467 /* If AO is an artificial inline, point RET to the
11468 call site locus at which it has been inlined and continue
11469 the loop, in case AO's caller is also an artificial
11470 inline. */
11471 if (DECL_DECLARED_INLINE_P (ao)
11472 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11473 ret = &BLOCK_SOURCE_LOCATION (block);
11474 else
11475 break;
11477 else if (TREE_CODE (ao) != BLOCK)
11478 break;
11480 block = BLOCK_SUPERCONTEXT (block);
11482 return ret;
11486 /* If EXP is inlined from an __attribute__((__artificial__))
11487 function, return the location of the original call expression. */
11489 location_t
11490 tree_nonartificial_location (tree exp)
11492 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11494 if (loc)
11495 return *loc;
11496 else
11497 return EXPR_LOCATION (exp);
11500 /* Return the location into which EXP has been inlined. Analogous
11501 to tree_nonartificial_location() above but not limited to artificial
11502 functions declared inline. If SYSTEM_HEADER is true, return
11503 the macro expansion point of the location if it's in a system header */
11505 location_t
11506 tree_inlined_location (tree exp, bool system_header /* = true */)
11508 location_t loc = UNKNOWN_LOCATION;
11510 tree block = TREE_BLOCK (exp);
11512 while (block && TREE_CODE (block) == BLOCK
11513 && BLOCK_ABSTRACT_ORIGIN (block))
11515 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11516 if (TREE_CODE (ao) == FUNCTION_DECL)
11517 loc = BLOCK_SOURCE_LOCATION (block);
11518 else if (TREE_CODE (ao) != BLOCK)
11519 break;
11521 block = BLOCK_SUPERCONTEXT (block);
11524 if (loc == UNKNOWN_LOCATION)
11526 loc = EXPR_LOCATION (exp);
11527 if (system_header)
11528 /* Only consider macro expansion when the block traversal failed
11529 to find a location. Otherwise it's not relevant. */
11530 return expansion_point_location_if_in_system_header (loc);
11533 return loc;
11536 /* These are the hash table functions for the hash table of OPTIMIZATION_NODE
11537 nodes. */
11539 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11541 hashval_t
11542 cl_option_hasher::hash (tree x)
11544 const_tree const t = x;
11545 const char *p;
11546 size_t i;
11547 size_t len = 0;
11548 hashval_t hash = 0;
11550 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11552 p = (const char *)TREE_OPTIMIZATION (t);
11553 len = sizeof (struct cl_optimization);
11556 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11557 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11559 else
11560 gcc_unreachable ();
11562 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11563 something else. */
11564 for (i = 0; i < len; i++)
11565 if (p[i])
11566 hash = (hash << 4) ^ ((i << 2) | p[i]);
11568 return hash;
11571 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11572 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11573 same. */
11575 bool
11576 cl_option_hasher::equal (tree x, tree y)
11578 const_tree const xt = x;
11579 const_tree const yt = y;
11581 if (TREE_CODE (xt) != TREE_CODE (yt))
11582 return 0;
11584 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11585 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
11586 TREE_OPTIMIZATION (yt));
11587 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11588 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11589 TREE_TARGET_OPTION (yt));
11590 else
11591 gcc_unreachable ();
11594 /* Build an OPTIMIZATION_NODE based on the options in OPTS and OPTS_SET. */
11596 tree
11597 build_optimization_node (struct gcc_options *opts,
11598 struct gcc_options *opts_set)
11600 tree t;
11602 /* Use the cache of optimization nodes. */
11604 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11605 opts, opts_set);
11607 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11608 t = *slot;
11609 if (!t)
11611 /* Insert this one into the hash table. */
11612 t = cl_optimization_node;
11613 *slot = t;
11615 /* Make a new node for next time round. */
11616 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11619 return t;
11622 /* Build a TARGET_OPTION_NODE based on the options in OPTS and OPTS_SET. */
11624 tree
11625 build_target_option_node (struct gcc_options *opts,
11626 struct gcc_options *opts_set)
11628 tree t;
11630 /* Use the cache of optimization nodes. */
11632 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11633 opts, opts_set);
11635 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11636 t = *slot;
11637 if (!t)
11639 /* Insert this one into the hash table. */
11640 t = cl_target_option_node;
11641 *slot = t;
11643 /* Make a new node for next time round. */
11644 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11647 return t;
11650 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11651 so that they aren't saved during PCH writing. */
11653 void
11654 prepare_target_option_nodes_for_pch (void)
11656 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11657 for (; iter != cl_option_hash_table->end (); ++iter)
11658 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11659 TREE_TARGET_GLOBALS (*iter) = NULL;
11662 /* Determine the "ultimate origin" of a block. */
11664 tree
11665 block_ultimate_origin (const_tree block)
11667 tree origin = BLOCK_ABSTRACT_ORIGIN (block);
11669 if (origin == NULL_TREE)
11670 return NULL_TREE;
11671 else
11673 gcc_checking_assert ((DECL_P (origin)
11674 && DECL_ORIGIN (origin) == origin)
11675 || BLOCK_ORIGIN (origin) == origin);
11676 return origin;
11680 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11681 no instruction. */
11683 bool
11684 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11686 /* Do not strip casts into or out of differing address spaces. */
11687 if (POINTER_TYPE_P (outer_type)
11688 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
11690 if (!POINTER_TYPE_P (inner_type)
11691 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
11692 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
11693 return false;
11695 else if (POINTER_TYPE_P (inner_type)
11696 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
11698 /* We already know that outer_type is not a pointer with
11699 a non-generic address space. */
11700 return false;
11703 /* Use precision rather then machine mode when we can, which gives
11704 the correct answer even for submode (bit-field) types. */
11705 if ((INTEGRAL_TYPE_P (outer_type)
11706 || POINTER_TYPE_P (outer_type)
11707 || TREE_CODE (outer_type) == OFFSET_TYPE)
11708 && (INTEGRAL_TYPE_P (inner_type)
11709 || POINTER_TYPE_P (inner_type)
11710 || TREE_CODE (inner_type) == OFFSET_TYPE))
11711 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11713 /* Otherwise fall back on comparing machine modes (e.g. for
11714 aggregate types, floats). */
11715 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11718 /* Return true iff conversion in EXP generates no instruction. Mark
11719 it inline so that we fully inline into the stripping functions even
11720 though we have two uses of this function. */
11722 static inline bool
11723 tree_nop_conversion (const_tree exp)
11725 tree outer_type, inner_type;
11727 if (location_wrapper_p (exp))
11728 return true;
11729 if (!CONVERT_EXPR_P (exp)
11730 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11731 return false;
11733 outer_type = TREE_TYPE (exp);
11734 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11735 if (!inner_type || inner_type == error_mark_node)
11736 return false;
11738 return tree_nop_conversion_p (outer_type, inner_type);
11741 /* Return true iff conversion in EXP generates no instruction. Don't
11742 consider conversions changing the signedness. */
11744 static bool
11745 tree_sign_nop_conversion (const_tree exp)
11747 tree outer_type, inner_type;
11749 if (!tree_nop_conversion (exp))
11750 return false;
11752 outer_type = TREE_TYPE (exp);
11753 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11755 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11756 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11759 /* Strip conversions from EXP according to tree_nop_conversion and
11760 return the resulting expression. */
11762 tree
11763 tree_strip_nop_conversions (tree exp)
11765 while (tree_nop_conversion (exp))
11766 exp = TREE_OPERAND (exp, 0);
11767 return exp;
11770 /* Strip conversions from EXP according to tree_sign_nop_conversion
11771 and return the resulting expression. */
11773 tree
11774 tree_strip_sign_nop_conversions (tree exp)
11776 while (tree_sign_nop_conversion (exp))
11777 exp = TREE_OPERAND (exp, 0);
11778 return exp;
11781 /* Avoid any floating point extensions from EXP. */
11782 tree
11783 strip_float_extensions (tree exp)
11785 tree sub, expt, subt;
11787 /* For floating point constant look up the narrowest type that can hold
11788 it properly and handle it like (type)(narrowest_type)constant.
11789 This way we can optimize for instance a=a*2.0 where "a" is float
11790 but 2.0 is double constant. */
11791 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11793 REAL_VALUE_TYPE orig;
11794 tree type = NULL;
11796 orig = TREE_REAL_CST (exp);
11797 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11798 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11799 type = float_type_node;
11800 else if (TYPE_PRECISION (TREE_TYPE (exp))
11801 > TYPE_PRECISION (double_type_node)
11802 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11803 type = double_type_node;
11804 if (type)
11805 return build_real_truncate (type, orig);
11808 if (!CONVERT_EXPR_P (exp))
11809 return exp;
11811 sub = TREE_OPERAND (exp, 0);
11812 subt = TREE_TYPE (sub);
11813 expt = TREE_TYPE (exp);
11815 if (!FLOAT_TYPE_P (subt))
11816 return exp;
11818 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11819 return exp;
11821 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11822 return exp;
11824 return strip_float_extensions (sub);
11827 /* Strip out all handled components that produce invariant
11828 offsets. */
11830 const_tree
11831 strip_invariant_refs (const_tree op)
11833 while (handled_component_p (op))
11835 switch (TREE_CODE (op))
11837 case ARRAY_REF:
11838 case ARRAY_RANGE_REF:
11839 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11840 || TREE_OPERAND (op, 2) != NULL_TREE
11841 || TREE_OPERAND (op, 3) != NULL_TREE)
11842 return NULL;
11843 break;
11845 case COMPONENT_REF:
11846 if (TREE_OPERAND (op, 2) != NULL_TREE)
11847 return NULL;
11848 break;
11850 default:;
11852 op = TREE_OPERAND (op, 0);
11855 return op;
11858 static GTY(()) tree gcc_eh_personality_decl;
11860 /* Return the GCC personality function decl. */
11862 tree
11863 lhd_gcc_personality (void)
11865 if (!gcc_eh_personality_decl)
11866 gcc_eh_personality_decl = build_personality_function ("gcc");
11867 return gcc_eh_personality_decl;
11870 /* TARGET is a call target of GIMPLE call statement
11871 (obtained by gimple_call_fn). Return true if it is
11872 OBJ_TYPE_REF representing an virtual call of C++ method.
11873 (As opposed to OBJ_TYPE_REF representing objc calls
11874 through a cast where middle-end devirtualization machinery
11875 can't apply.) FOR_DUMP_P is true when being called from
11876 the dump routines. */
11878 bool
11879 virtual_method_call_p (const_tree target, bool for_dump_p)
11881 if (TREE_CODE (target) != OBJ_TYPE_REF)
11882 return false;
11883 tree t = TREE_TYPE (target);
11884 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
11885 t = TREE_TYPE (t);
11886 if (TREE_CODE (t) == FUNCTION_TYPE)
11887 return false;
11888 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
11889 /* If we do not have BINFO associated, it means that type was built
11890 without devirtualization enabled. Do not consider this a virtual
11891 call. */
11892 if (!TYPE_BINFO (obj_type_ref_class (target, for_dump_p)))
11893 return false;
11894 return true;
11897 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
11899 static tree
11900 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
11902 unsigned int i;
11903 tree base_binfo, b;
11905 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
11906 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
11907 && types_same_for_odr (TREE_TYPE (base_binfo), type))
11908 return base_binfo;
11909 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
11910 return b;
11911 return NULL;
11914 /* Try to find a base info of BINFO that would have its field decl at offset
11915 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11916 found, return, otherwise return NULL_TREE. */
11918 tree
11919 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
11921 tree type = BINFO_TYPE (binfo);
11923 while (true)
11925 HOST_WIDE_INT pos, size;
11926 tree fld;
11927 int i;
11929 if (types_same_for_odr (type, expected_type))
11930 return binfo;
11931 if (maybe_lt (offset, 0))
11932 return NULL_TREE;
11934 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
11936 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
11937 continue;
11939 pos = int_bit_position (fld);
11940 size = tree_to_uhwi (DECL_SIZE (fld));
11941 if (known_in_range_p (offset, pos, size))
11942 break;
11944 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
11945 return NULL_TREE;
11947 /* Offset 0 indicates the primary base, whose vtable contents are
11948 represented in the binfo for the derived class. */
11949 else if (maybe_ne (offset, 0))
11951 tree found_binfo = NULL, base_binfo;
11952 /* Offsets in BINFO are in bytes relative to the whole structure
11953 while POS is in bits relative to the containing field. */
11954 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
11955 / BITS_PER_UNIT);
11957 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
11958 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
11959 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
11961 found_binfo = base_binfo;
11962 break;
11964 if (found_binfo)
11965 binfo = found_binfo;
11966 else
11967 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
11968 binfo_offset);
11971 type = TREE_TYPE (fld);
11972 offset -= pos;
11976 /* Returns true if X is a typedef decl. */
11978 bool
11979 is_typedef_decl (const_tree x)
11981 return (x && TREE_CODE (x) == TYPE_DECL
11982 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
11985 /* Returns true iff TYPE is a type variant created for a typedef. */
11987 bool
11988 typedef_variant_p (const_tree type)
11990 return is_typedef_decl (TYPE_NAME (type));
11993 /* PR 84195: Replace control characters in "unescaped" with their
11994 escaped equivalents. Allow newlines if -fmessage-length has
11995 been set to a non-zero value. This is done here, rather than
11996 where the attribute is recorded as the message length can
11997 change between these two locations. */
11999 void
12000 escaped_string::escape (const char *unescaped)
12002 char *escaped;
12003 size_t i, new_i, len;
12005 if (m_owned)
12006 free (m_str);
12008 m_str = const_cast<char *> (unescaped);
12009 m_owned = false;
12011 if (unescaped == NULL || *unescaped == 0)
12012 return;
12014 len = strlen (unescaped);
12015 escaped = NULL;
12016 new_i = 0;
12018 for (i = 0; i < len; i++)
12020 char c = unescaped[i];
12022 if (!ISCNTRL (c))
12024 if (escaped)
12025 escaped[new_i++] = c;
12026 continue;
12029 if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
12031 if (escaped == NULL)
12033 /* We only allocate space for a new string if we
12034 actually encounter a control character that
12035 needs replacing. */
12036 escaped = (char *) xmalloc (len * 2 + 1);
12037 strncpy (escaped, unescaped, i);
12038 new_i = i;
12041 escaped[new_i++] = '\\';
12043 switch (c)
12045 case '\a': escaped[new_i++] = 'a'; break;
12046 case '\b': escaped[new_i++] = 'b'; break;
12047 case '\f': escaped[new_i++] = 'f'; break;
12048 case '\n': escaped[new_i++] = 'n'; break;
12049 case '\r': escaped[new_i++] = 'r'; break;
12050 case '\t': escaped[new_i++] = 't'; break;
12051 case '\v': escaped[new_i++] = 'v'; break;
12052 default: escaped[new_i++] = '?'; break;
12055 else if (escaped)
12056 escaped[new_i++] = c;
12059 if (escaped)
12061 escaped[new_i] = 0;
12062 m_str = escaped;
12063 m_owned = true;
12067 /* Warn about a use of an identifier which was marked deprecated. Returns
12068 whether a warning was given. */
12070 bool
12071 warn_deprecated_use (tree node, tree attr)
12073 escaped_string msg;
12075 if (node == 0 || !warn_deprecated_decl)
12076 return false;
12078 if (!attr)
12080 if (DECL_P (node))
12081 attr = DECL_ATTRIBUTES (node);
12082 else if (TYPE_P (node))
12084 tree decl = TYPE_STUB_DECL (node);
12085 if (decl)
12086 attr = lookup_attribute ("deprecated",
12087 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12091 if (attr)
12092 attr = lookup_attribute ("deprecated", attr);
12094 if (attr)
12095 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12097 bool w = false;
12098 if (DECL_P (node))
12100 auto_diagnostic_group d;
12101 if (msg)
12102 w = warning (OPT_Wdeprecated_declarations,
12103 "%qD is deprecated: %s", node, (const char *) msg);
12104 else
12105 w = warning (OPT_Wdeprecated_declarations,
12106 "%qD is deprecated", node);
12107 if (w)
12108 inform (DECL_SOURCE_LOCATION (node), "declared here");
12110 else if (TYPE_P (node))
12112 tree what = NULL_TREE;
12113 tree decl = TYPE_STUB_DECL (node);
12115 if (TYPE_NAME (node))
12117 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12118 what = TYPE_NAME (node);
12119 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12120 && DECL_NAME (TYPE_NAME (node)))
12121 what = DECL_NAME (TYPE_NAME (node));
12124 auto_diagnostic_group d;
12125 if (what)
12127 if (msg)
12128 w = warning (OPT_Wdeprecated_declarations,
12129 "%qE is deprecated: %s", what, (const char *) msg);
12130 else
12131 w = warning (OPT_Wdeprecated_declarations,
12132 "%qE is deprecated", what);
12134 else
12136 if (msg)
12137 w = warning (OPT_Wdeprecated_declarations,
12138 "type is deprecated: %s", (const char *) msg);
12139 else
12140 w = warning (OPT_Wdeprecated_declarations,
12141 "type is deprecated");
12144 if (w && decl)
12145 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12148 return w;
12151 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12152 somewhere in it. */
12154 bool
12155 contains_bitfld_component_ref_p (const_tree ref)
12157 while (handled_component_p (ref))
12159 if (TREE_CODE (ref) == COMPONENT_REF
12160 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12161 return true;
12162 ref = TREE_OPERAND (ref, 0);
12165 return false;
12168 /* Try to determine whether a TRY_CATCH expression can fall through.
12169 This is a subroutine of block_may_fallthru. */
12171 static bool
12172 try_catch_may_fallthru (const_tree stmt)
12174 tree_stmt_iterator i;
12176 /* If the TRY block can fall through, the whole TRY_CATCH can
12177 fall through. */
12178 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12179 return true;
12181 i = tsi_start (TREE_OPERAND (stmt, 1));
12182 switch (TREE_CODE (tsi_stmt (i)))
12184 case CATCH_EXPR:
12185 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12186 catch expression and a body. The whole TRY_CATCH may fall
12187 through iff any of the catch bodies falls through. */
12188 for (; !tsi_end_p (i); tsi_next (&i))
12190 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12191 return true;
12193 return false;
12195 case EH_FILTER_EXPR:
12196 /* The exception filter expression only matters if there is an
12197 exception. If the exception does not match EH_FILTER_TYPES,
12198 we will execute EH_FILTER_FAILURE, and we will fall through
12199 if that falls through. If the exception does match
12200 EH_FILTER_TYPES, the stack unwinder will continue up the
12201 stack, so we will not fall through. We don't know whether we
12202 will throw an exception which matches EH_FILTER_TYPES or not,
12203 so we just ignore EH_FILTER_TYPES and assume that we might
12204 throw an exception which doesn't match. */
12205 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12207 default:
12208 /* This case represents statements to be executed when an
12209 exception occurs. Those statements are implicitly followed
12210 by a RESX statement to resume execution after the exception.
12211 So in this case the TRY_CATCH never falls through. */
12212 return false;
12216 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12217 need not be 100% accurate; simply be conservative and return true if we
12218 don't know. This is used only to avoid stupidly generating extra code.
12219 If we're wrong, we'll just delete the extra code later. */
12221 bool
12222 block_may_fallthru (const_tree block)
12224 /* This CONST_CAST is okay because expr_last returns its argument
12225 unmodified and we assign it to a const_tree. */
12226 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12228 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12230 case GOTO_EXPR:
12231 case RETURN_EXPR:
12232 /* Easy cases. If the last statement of the block implies
12233 control transfer, then we can't fall through. */
12234 return false;
12236 case SWITCH_EXPR:
12237 /* If there is a default: label or case labels cover all possible
12238 SWITCH_COND values, then the SWITCH_EXPR will transfer control
12239 to some case label in all cases and all we care is whether the
12240 SWITCH_BODY falls through. */
12241 if (SWITCH_ALL_CASES_P (stmt))
12242 return block_may_fallthru (SWITCH_BODY (stmt));
12243 return true;
12245 case COND_EXPR:
12246 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12247 return true;
12248 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12250 case BIND_EXPR:
12251 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12253 case TRY_CATCH_EXPR:
12254 return try_catch_may_fallthru (stmt);
12256 case TRY_FINALLY_EXPR:
12257 /* The finally clause is always executed after the try clause,
12258 so if it does not fall through, then the try-finally will not
12259 fall through. Otherwise, if the try clause does not fall
12260 through, then when the finally clause falls through it will
12261 resume execution wherever the try clause was going. So the
12262 whole try-finally will only fall through if both the try
12263 clause and the finally clause fall through. */
12264 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12265 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12267 case EH_ELSE_EXPR:
12268 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12270 case MODIFY_EXPR:
12271 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12272 stmt = TREE_OPERAND (stmt, 1);
12273 else
12274 return true;
12275 /* FALLTHRU */
12277 case CALL_EXPR:
12278 /* Functions that do not return do not fall through. */
12279 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12281 case CLEANUP_POINT_EXPR:
12282 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12284 case TARGET_EXPR:
12285 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12287 case ERROR_MARK:
12288 return true;
12290 default:
12291 return lang_hooks.block_may_fallthru (stmt);
12295 /* True if we are using EH to handle cleanups. */
12296 static bool using_eh_for_cleanups_flag = false;
12298 /* This routine is called from front ends to indicate eh should be used for
12299 cleanups. */
12300 void
12301 using_eh_for_cleanups (void)
12303 using_eh_for_cleanups_flag = true;
12306 /* Query whether EH is used for cleanups. */
12307 bool
12308 using_eh_for_cleanups_p (void)
12310 return using_eh_for_cleanups_flag;
12313 /* Wrapper for tree_code_name to ensure that tree code is valid */
12314 const char *
12315 get_tree_code_name (enum tree_code code)
12317 const char *invalid = "<invalid tree code>";
12319 /* The tree_code enum promotes to signed, but we could be getting
12320 invalid values, so force an unsigned comparison. */
12321 if (unsigned (code) >= MAX_TREE_CODES)
12323 if ((unsigned)code == 0xa5a5)
12324 return "ggc_freed";
12325 return invalid;
12328 return tree_code_name[code];
12331 /* Drops the TREE_OVERFLOW flag from T. */
12333 tree
12334 drop_tree_overflow (tree t)
12336 gcc_checking_assert (TREE_OVERFLOW (t));
12338 /* For tree codes with a sharing machinery re-build the result. */
12339 if (poly_int_tree_p (t))
12340 return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
12342 /* For VECTOR_CST, remove the overflow bits from the encoded elements
12343 and canonicalize the result. */
12344 if (TREE_CODE (t) == VECTOR_CST)
12346 tree_vector_builder builder;
12347 builder.new_unary_operation (TREE_TYPE (t), t, true);
12348 unsigned int count = builder.encoded_nelts ();
12349 for (unsigned int i = 0; i < count; ++i)
12351 tree elt = VECTOR_CST_ELT (t, i);
12352 if (TREE_OVERFLOW (elt))
12353 elt = drop_tree_overflow (elt);
12354 builder.quick_push (elt);
12356 return builder.build ();
12359 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12360 and drop the flag. */
12361 t = copy_node (t);
12362 TREE_OVERFLOW (t) = 0;
12364 /* For constants that contain nested constants, drop the flag
12365 from those as well. */
12366 if (TREE_CODE (t) == COMPLEX_CST)
12368 if (TREE_OVERFLOW (TREE_REALPART (t)))
12369 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
12370 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
12371 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
12374 return t;
12377 /* Given a memory reference expression T, return its base address.
12378 The base address of a memory reference expression is the main
12379 object being referenced. For instance, the base address for
12380 'array[i].fld[j]' is 'array'. You can think of this as stripping
12381 away the offset part from a memory address.
12383 This function calls handled_component_p to strip away all the inner
12384 parts of the memory reference until it reaches the base object. */
12386 tree
12387 get_base_address (tree t)
12389 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12390 t = TREE_OPERAND (t, 0);
12391 while (handled_component_p (t))
12392 t = TREE_OPERAND (t, 0);
12394 if ((TREE_CODE (t) == MEM_REF
12395 || TREE_CODE (t) == TARGET_MEM_REF)
12396 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12397 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12399 return t;
12402 /* Return a tree of sizetype representing the size, in bytes, of the element
12403 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12405 tree
12406 array_ref_element_size (tree exp)
12408 tree aligned_size = TREE_OPERAND (exp, 3);
12409 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12410 location_t loc = EXPR_LOCATION (exp);
12412 /* If a size was specified in the ARRAY_REF, it's the size measured
12413 in alignment units of the element type. So multiply by that value. */
12414 if (aligned_size)
12416 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12417 sizetype from another type of the same width and signedness. */
12418 if (TREE_TYPE (aligned_size) != sizetype)
12419 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12420 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12421 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12424 /* Otherwise, take the size from that of the element type. Substitute
12425 any PLACEHOLDER_EXPR that we have. */
12426 else
12427 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12430 /* Return a tree representing the lower bound of the array mentioned in
12431 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12433 tree
12434 array_ref_low_bound (tree exp)
12436 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12438 /* If a lower bound is specified in EXP, use it. */
12439 if (TREE_OPERAND (exp, 2))
12440 return TREE_OPERAND (exp, 2);
12442 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12443 substituting for a PLACEHOLDER_EXPR as needed. */
12444 if (domain_type && TYPE_MIN_VALUE (domain_type))
12445 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12447 /* Otherwise, return a zero of the appropriate type. */
12448 tree idxtype = TREE_TYPE (TREE_OPERAND (exp, 1));
12449 return (idxtype == error_mark_node
12450 ? integer_zero_node : build_int_cst (idxtype, 0));
12453 /* Return a tree representing the upper bound of the array mentioned in
12454 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12456 tree
12457 array_ref_up_bound (tree exp)
12459 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12461 /* If there is a domain type and it has an upper bound, use it, substituting
12462 for a PLACEHOLDER_EXPR as needed. */
12463 if (domain_type && TYPE_MAX_VALUE (domain_type))
12464 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12466 /* Otherwise fail. */
12467 return NULL_TREE;
12470 /* Returns true if REF is an array reference, component reference,
12471 or memory reference to an array at the end of a structure.
12472 If this is the case, the array may be allocated larger
12473 than its upper bound implies. */
12475 bool
12476 array_at_struct_end_p (tree ref)
12478 tree atype;
12480 if (TREE_CODE (ref) == ARRAY_REF
12481 || TREE_CODE (ref) == ARRAY_RANGE_REF)
12483 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
12484 ref = TREE_OPERAND (ref, 0);
12486 else if (TREE_CODE (ref) == COMPONENT_REF
12487 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
12488 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
12489 else if (TREE_CODE (ref) == MEM_REF)
12491 tree arg = TREE_OPERAND (ref, 0);
12492 if (TREE_CODE (arg) == ADDR_EXPR)
12493 arg = TREE_OPERAND (arg, 0);
12494 tree argtype = TREE_TYPE (arg);
12495 if (TREE_CODE (argtype) == RECORD_TYPE)
12497 if (tree fld = last_field (argtype))
12499 atype = TREE_TYPE (fld);
12500 if (TREE_CODE (atype) != ARRAY_TYPE)
12501 return false;
12502 if (VAR_P (arg) && DECL_SIZE (fld))
12503 return false;
12505 else
12506 return false;
12508 else
12509 return false;
12511 else
12512 return false;
12514 if (TREE_CODE (ref) == STRING_CST)
12515 return false;
12517 tree ref_to_array = ref;
12518 while (handled_component_p (ref))
12520 /* If the reference chain contains a component reference to a
12521 non-union type and there follows another field the reference
12522 is not at the end of a structure. */
12523 if (TREE_CODE (ref) == COMPONENT_REF)
12525 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12527 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12528 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12529 nextf = DECL_CHAIN (nextf);
12530 if (nextf)
12531 return false;
12534 /* If we have a multi-dimensional array we do not consider
12535 a non-innermost dimension as flex array if the whole
12536 multi-dimensional array is at struct end.
12537 Same for an array of aggregates with a trailing array
12538 member. */
12539 else if (TREE_CODE (ref) == ARRAY_REF)
12540 return false;
12541 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
12543 /* If we view an underlying object as sth else then what we
12544 gathered up to now is what we have to rely on. */
12545 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
12546 break;
12547 else
12548 gcc_unreachable ();
12550 ref = TREE_OPERAND (ref, 0);
12553 /* The array now is at struct end. Treat flexible arrays as
12554 always subject to extend, even into just padding constrained by
12555 an underlying decl. */
12556 if (! TYPE_SIZE (atype)
12557 || ! TYPE_DOMAIN (atype)
12558 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12559 return true;
12561 /* If the reference is based on a declared entity, the size of the array
12562 is constrained by its given domain. (Do not trust commons PR/69368). */
12563 ref = get_base_address (ref);
12564 if (ref
12565 && DECL_P (ref)
12566 && !(flag_unconstrained_commons
12567 && VAR_P (ref) && DECL_COMMON (ref))
12568 && DECL_SIZE_UNIT (ref)
12569 && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
12571 /* Check whether the array domain covers all of the available
12572 padding. */
12573 poly_int64 offset;
12574 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
12575 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
12576 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
12577 return true;
12578 if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
12579 return true;
12581 /* If at least one extra element fits it is a flexarray. */
12582 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12583 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
12584 + 2)
12585 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
12586 wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
12587 return true;
12589 return false;
12592 return true;
12595 /* Return a tree representing the offset, in bytes, of the field referenced
12596 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12598 tree
12599 component_ref_field_offset (tree exp)
12601 tree aligned_offset = TREE_OPERAND (exp, 2);
12602 tree field = TREE_OPERAND (exp, 1);
12603 location_t loc = EXPR_LOCATION (exp);
12605 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12606 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12607 value. */
12608 if (aligned_offset)
12610 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12611 sizetype from another type of the same width and signedness. */
12612 if (TREE_TYPE (aligned_offset) != sizetype)
12613 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12614 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12615 size_int (DECL_OFFSET_ALIGN (field)
12616 / BITS_PER_UNIT));
12619 /* Otherwise, take the offset from that of the field. Substitute
12620 any PLACEHOLDER_EXPR that we have. */
12621 else
12622 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12625 /* Given the initializer INIT, return the initializer for the field
12626 DECL if it exists, otherwise null. Used to obtain the initializer
12627 for a flexible array member and determine its size. */
12629 static tree
12630 get_initializer_for (tree init, tree decl)
12632 STRIP_NOPS (init);
12634 tree fld, fld_init;
12635 unsigned HOST_WIDE_INT i;
12636 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), i, fld, fld_init)
12638 if (decl == fld)
12639 return fld_init;
12641 if (TREE_CODE (fld) == CONSTRUCTOR)
12643 fld_init = get_initializer_for (fld_init, decl);
12644 if (fld_init)
12645 return fld_init;
12649 return NULL_TREE;
12652 /* Determines the size of the member referenced by the COMPONENT_REF
12653 REF, using its initializer expression if necessary in order to
12654 determine the size of an initialized flexible array member.
12655 If non-null, set *ARK when REF refers to an interior zero-length
12656 array or a trailing one-element array.
12657 Returns the size as sizetype (which might be zero for an object
12658 with an uninitialized flexible array member) or null if the size
12659 cannot be determined. */
12661 tree
12662 component_ref_size (tree ref, special_array_member *sam /* = NULL */)
12664 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
12666 special_array_member sambuf;
12667 if (!sam)
12668 sam = &sambuf;
12669 *sam = special_array_member::none;
12671 /* The object/argument referenced by the COMPONENT_REF and its type. */
12672 tree arg = TREE_OPERAND (ref, 0);
12673 tree argtype = TREE_TYPE (arg);
12674 /* The referenced member. */
12675 tree member = TREE_OPERAND (ref, 1);
12677 tree memsize = DECL_SIZE_UNIT (member);
12678 if (memsize)
12680 tree memtype = TREE_TYPE (member);
12681 if (TREE_CODE (memtype) != ARRAY_TYPE)
12682 /* DECL_SIZE may be less than TYPE_SIZE in C++ when referring
12683 to the type of a class with a virtual base which doesn't
12684 reflect the size of the virtual's members (see pr97595).
12685 If that's the case fail for now and implement something
12686 more robust in the future. */
12687 return (tree_int_cst_equal (memsize, TYPE_SIZE_UNIT (memtype))
12688 ? memsize : NULL_TREE);
12690 bool trailing = array_at_struct_end_p (ref);
12691 bool zero_length = integer_zerop (memsize);
12692 if (!trailing && !zero_length)
12693 /* MEMBER is either an interior array or is an array with
12694 more than one element. */
12695 return memsize;
12697 if (zero_length)
12699 if (trailing)
12700 *sam = special_array_member::trail_0;
12701 else
12703 *sam = special_array_member::int_0;
12704 memsize = NULL_TREE;
12708 if (!zero_length)
12709 if (tree dom = TYPE_DOMAIN (memtype))
12710 if (tree min = TYPE_MIN_VALUE (dom))
12711 if (tree max = TYPE_MAX_VALUE (dom))
12712 if (TREE_CODE (min) == INTEGER_CST
12713 && TREE_CODE (max) == INTEGER_CST)
12715 offset_int minidx = wi::to_offset (min);
12716 offset_int maxidx = wi::to_offset (max);
12717 offset_int neltsm1 = maxidx - minidx;
12718 if (neltsm1 > 0)
12719 /* MEMBER is an array with more than one element. */
12720 return memsize;
12722 if (neltsm1 == 0)
12723 *sam = special_array_member::trail_1;
12726 /* For a reference to a zero- or one-element array member of a union
12727 use the size of the union instead of the size of the member. */
12728 if (TREE_CODE (argtype) == UNION_TYPE)
12729 memsize = TYPE_SIZE_UNIT (argtype);
12732 /* MEMBER is either a bona fide flexible array member, or a zero-length
12733 array member, or an array of length one treated as such. */
12735 /* If the reference is to a declared object and the member a true
12736 flexible array, try to determine its size from its initializer. */
12737 poly_int64 baseoff = 0;
12738 tree base = get_addr_base_and_unit_offset (ref, &baseoff);
12739 if (!base || !VAR_P (base))
12741 if (*sam != special_array_member::int_0)
12742 return NULL_TREE;
12744 if (TREE_CODE (arg) != COMPONENT_REF)
12745 return NULL_TREE;
12747 base = arg;
12748 while (TREE_CODE (base) == COMPONENT_REF)
12749 base = TREE_OPERAND (base, 0);
12750 baseoff = tree_to_poly_int64 (byte_position (TREE_OPERAND (ref, 1)));
12753 /* BASE is the declared object of which MEMBER is either a member
12754 or that is cast to ARGTYPE (e.g., a char buffer used to store
12755 an ARGTYPE object). */
12756 tree basetype = TREE_TYPE (base);
12758 /* Determine the base type of the referenced object. If it's
12759 the same as ARGTYPE and MEMBER has a known size, return it. */
12760 tree bt = basetype;
12761 if (*sam != special_array_member::int_0)
12762 while (TREE_CODE (bt) == ARRAY_TYPE)
12763 bt = TREE_TYPE (bt);
12764 bool typematch = useless_type_conversion_p (argtype, bt);
12765 if (memsize && typematch)
12766 return memsize;
12768 memsize = NULL_TREE;
12770 if (typematch)
12771 /* MEMBER is a true flexible array member. Compute its size from
12772 the initializer of the BASE object if it has one. */
12773 if (tree init = DECL_P (base) ? DECL_INITIAL (base) : NULL_TREE)
12774 if (init != error_mark_node)
12776 init = get_initializer_for (init, member);
12777 if (init)
12779 memsize = TYPE_SIZE_UNIT (TREE_TYPE (init));
12780 if (tree refsize = TYPE_SIZE_UNIT (argtype))
12782 /* Use the larger of the initializer size and the tail
12783 padding in the enclosing struct. */
12784 poly_int64 rsz = tree_to_poly_int64 (refsize);
12785 rsz -= baseoff;
12786 if (known_lt (tree_to_poly_int64 (memsize), rsz))
12787 memsize = wide_int_to_tree (TREE_TYPE (memsize), rsz);
12790 baseoff = 0;
12794 if (!memsize)
12796 if (typematch)
12798 if (DECL_P (base)
12799 && DECL_EXTERNAL (base)
12800 && bt == basetype
12801 && *sam != special_array_member::int_0)
12802 /* The size of a flexible array member of an extern struct
12803 with no initializer cannot be determined (it's defined
12804 in another translation unit and can have an initializer
12805 with an arbitrary number of elements). */
12806 return NULL_TREE;
12808 /* Use the size of the base struct or, for interior zero-length
12809 arrays, the size of the enclosing type. */
12810 memsize = TYPE_SIZE_UNIT (bt);
12812 else if (DECL_P (base))
12813 /* Use the size of the BASE object (possibly an array of some
12814 other type such as char used to store the struct). */
12815 memsize = DECL_SIZE_UNIT (base);
12816 else
12817 return NULL_TREE;
12820 /* If the flexible array member has a known size use the greater
12821 of it and the tail padding in the enclosing struct.
12822 Otherwise, when the size of the flexible array member is unknown
12823 and the referenced object is not a struct, use the size of its
12824 type when known. This detects sizes of array buffers when cast
12825 to struct types with flexible array members. */
12826 if (memsize)
12828 poly_int64 memsz64 = memsize ? tree_to_poly_int64 (memsize) : 0;
12829 if (known_lt (baseoff, memsz64))
12831 memsz64 -= baseoff;
12832 return wide_int_to_tree (TREE_TYPE (memsize), memsz64);
12834 return size_zero_node;
12837 /* Return "don't know" for an external non-array object since its
12838 flexible array member can be initialized to have any number of
12839 elements. Otherwise, return zero because the flexible array
12840 member has no elements. */
12841 return (DECL_P (base)
12842 && DECL_EXTERNAL (base)
12843 && (!typematch
12844 || TREE_CODE (basetype) != ARRAY_TYPE)
12845 ? NULL_TREE : size_zero_node);
12848 /* Return the machine mode of T. For vectors, returns the mode of the
12849 inner type. The main use case is to feed the result to HONOR_NANS,
12850 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12852 machine_mode
12853 element_mode (const_tree t)
12855 if (!TYPE_P (t))
12856 t = TREE_TYPE (t);
12857 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12858 t = TREE_TYPE (t);
12859 return TYPE_MODE (t);
12862 /* Vector types need to re-check the target flags each time we report
12863 the machine mode. We need to do this because attribute target can
12864 change the result of vector_mode_supported_p and have_regs_of_mode
12865 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
12866 change on a per-function basis. */
12867 /* ??? Possibly a better solution is to run through all the types
12868 referenced by a function and re-compute the TYPE_MODE once, rather
12869 than make the TYPE_MODE macro call a function. */
12871 machine_mode
12872 vector_type_mode (const_tree t)
12874 machine_mode mode;
12876 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
12878 mode = t->type_common.mode;
12879 if (VECTOR_MODE_P (mode)
12880 && (!targetm.vector_mode_supported_p (mode)
12881 || !have_regs_of_mode[mode]))
12883 scalar_int_mode innermode;
12885 /* For integers, try mapping it to a same-sized scalar mode. */
12886 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
12888 poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
12889 * GET_MODE_BITSIZE (innermode));
12890 scalar_int_mode mode;
12891 if (int_mode_for_size (size, 0).exists (&mode)
12892 && have_regs_of_mode[mode])
12893 return mode;
12896 return BLKmode;
12899 return mode;
12902 /* Return the size in bits of each element of vector type TYPE. */
12904 unsigned int
12905 vector_element_bits (const_tree type)
12907 gcc_checking_assert (VECTOR_TYPE_P (type));
12908 if (VECTOR_BOOLEAN_TYPE_P (type))
12909 return TYPE_PRECISION (TREE_TYPE (type));
12910 return tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
12913 /* Calculate the size in bits of each element of vector type TYPE
12914 and return the result as a tree of type bitsizetype. */
12916 tree
12917 vector_element_bits_tree (const_tree type)
12919 gcc_checking_assert (VECTOR_TYPE_P (type));
12920 if (VECTOR_BOOLEAN_TYPE_P (type))
12921 return bitsize_int (vector_element_bits (type));
12922 return TYPE_SIZE (TREE_TYPE (type));
12925 /* Verify that basic properties of T match TV and thus T can be a variant of
12926 TV. TV should be the more specified variant (i.e. the main variant). */
12928 static bool
12929 verify_type_variant (const_tree t, tree tv)
12931 /* Type variant can differ by:
12933 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
12934 ENCODE_QUAL_ADDR_SPACE.
12935 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
12936 in this case some values may not be set in the variant types
12937 (see TYPE_COMPLETE_P checks).
12938 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
12939 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
12940 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
12941 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
12942 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
12943 this is necessary to make it possible to merge types form different TUs
12944 - arrays, pointers and references may have TREE_TYPE that is a variant
12945 of TREE_TYPE of their main variants.
12946 - aggregates may have new TYPE_FIELDS list that list variants of
12947 the main variant TYPE_FIELDS.
12948 - vector types may differ by TYPE_VECTOR_OPAQUE
12951 /* Convenience macro for matching individual fields. */
12952 #define verify_variant_match(flag) \
12953 do { \
12954 if (flag (tv) != flag (t)) \
12956 error ("type variant differs by %s", #flag); \
12957 debug_tree (tv); \
12958 return false; \
12960 } while (false)
12962 /* tree_base checks. */
12964 verify_variant_match (TREE_CODE);
12965 /* FIXME: Ada builds non-artificial variants of artificial types. */
12966 #if 0
12967 if (TYPE_ARTIFICIAL (tv))
12968 verify_variant_match (TYPE_ARTIFICIAL);
12969 #endif
12970 if (POINTER_TYPE_P (tv))
12971 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
12972 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
12973 verify_variant_match (TYPE_UNSIGNED);
12974 verify_variant_match (TYPE_PACKED);
12975 if (TREE_CODE (t) == REFERENCE_TYPE)
12976 verify_variant_match (TYPE_REF_IS_RVALUE);
12977 if (AGGREGATE_TYPE_P (t))
12978 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
12979 else
12980 verify_variant_match (TYPE_SATURATING);
12981 /* FIXME: This check trigger during libstdc++ build. */
12982 #if 0
12983 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t))
12984 verify_variant_match (TYPE_FINAL_P);
12985 #endif
12987 /* tree_type_common checks. */
12989 if (COMPLETE_TYPE_P (t))
12991 verify_variant_match (TYPE_MODE);
12992 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
12993 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
12994 verify_variant_match (TYPE_SIZE);
12995 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
12996 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
12997 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
12999 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
13000 TYPE_SIZE_UNIT (tv), 0));
13001 error ("type variant has different %<TYPE_SIZE_UNIT%>");
13002 debug_tree (tv);
13003 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13004 debug_tree (TYPE_SIZE_UNIT (tv));
13005 error ("type%'s %<TYPE_SIZE_UNIT%>");
13006 debug_tree (TYPE_SIZE_UNIT (t));
13007 return false;
13009 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13011 verify_variant_match (TYPE_PRECISION);
13012 if (RECORD_OR_UNION_TYPE_P (t))
13013 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13014 else if (TREE_CODE (t) == ARRAY_TYPE)
13015 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13016 /* During LTO we merge variant lists from diferent translation units
13017 that may differ BY TYPE_CONTEXT that in turn may point
13018 to TRANSLATION_UNIT_DECL.
13019 Ada also builds variants of types with different TYPE_CONTEXT. */
13020 #if 0
13021 if (!in_lto_p || !TYPE_FILE_SCOPE_P (t))
13022 verify_variant_match (TYPE_CONTEXT);
13023 #endif
13024 if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == INTEGER_TYPE)
13025 verify_variant_match (TYPE_STRING_FLAG);
13026 if (TREE_CODE (t) == RECORD_TYPE || TREE_CODE (t) == UNION_TYPE)
13027 verify_variant_match (TYPE_CXX_ODR_P);
13028 if (TYPE_ALIAS_SET_KNOWN_P (t))
13030 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13031 debug_tree (tv);
13032 return false;
13035 /* tree_type_non_common checks. */
13037 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13038 and dangle the pointer from time to time. */
13039 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13040 && (in_lto_p || !TYPE_VFIELD (tv)
13041 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13043 error ("type variant has different %<TYPE_VFIELD%>");
13044 debug_tree (tv);
13045 return false;
13047 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13048 || TREE_CODE (t) == INTEGER_TYPE
13049 || TREE_CODE (t) == BOOLEAN_TYPE
13050 || TREE_CODE (t) == REAL_TYPE
13051 || TREE_CODE (t) == FIXED_POINT_TYPE)
13053 verify_variant_match (TYPE_MAX_VALUE);
13054 verify_variant_match (TYPE_MIN_VALUE);
13056 if (TREE_CODE (t) == METHOD_TYPE)
13057 verify_variant_match (TYPE_METHOD_BASETYPE);
13058 if (TREE_CODE (t) == OFFSET_TYPE)
13059 verify_variant_match (TYPE_OFFSET_BASETYPE);
13060 if (TREE_CODE (t) == ARRAY_TYPE)
13061 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13062 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13063 or even type's main variant. This is needed to make bootstrap pass
13064 and the bug seems new in GCC 5.
13065 C++ FE should be updated to make this consistent and we should check
13066 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13067 is a match with main variant.
13069 Also disable the check for Java for now because of parser hack that builds
13070 first an dummy BINFO and then sometimes replace it by real BINFO in some
13071 of the copies. */
13072 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13073 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13074 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13075 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13076 at LTO time only. */
13077 && (in_lto_p && odr_type_p (t)))
13079 error ("type variant has different %<TYPE_BINFO%>");
13080 debug_tree (tv);
13081 error ("type variant%'s %<TYPE_BINFO%>");
13082 debug_tree (TYPE_BINFO (tv));
13083 error ("type%'s %<TYPE_BINFO%>");
13084 debug_tree (TYPE_BINFO (t));
13085 return false;
13088 /* Check various uses of TYPE_VALUES_RAW. */
13089 if (TREE_CODE (t) == ENUMERAL_TYPE
13090 && TYPE_VALUES (t))
13091 verify_variant_match (TYPE_VALUES);
13092 else if (TREE_CODE (t) == ARRAY_TYPE)
13093 verify_variant_match (TYPE_DOMAIN);
13094 /* Permit incomplete variants of complete type. While FEs may complete
13095 all variants, this does not happen for C++ templates in all cases. */
13096 else if (RECORD_OR_UNION_TYPE_P (t)
13097 && COMPLETE_TYPE_P (t)
13098 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13100 tree f1, f2;
13102 /* Fortran builds qualified variants as new records with items of
13103 qualified type. Verify that they looks same. */
13104 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13105 f1 && f2;
13106 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13107 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13108 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13109 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13110 /* FIXME: gfc_nonrestricted_type builds all types as variants
13111 with exception of pointer types. It deeply copies the type
13112 which means that we may end up with a variant type
13113 referring non-variant pointer. We may change it to
13114 produce types as variants, too, like
13115 objc_get_protocol_qualified_type does. */
13116 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13117 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13118 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13119 break;
13120 if (f1 || f2)
13122 error ("type variant has different %<TYPE_FIELDS%>");
13123 debug_tree (tv);
13124 error ("first mismatch is field");
13125 debug_tree (f1);
13126 error ("and field");
13127 debug_tree (f2);
13128 return false;
13131 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13132 verify_variant_match (TYPE_ARG_TYPES);
13133 /* For C++ the qualified variant of array type is really an array type
13134 of qualified TREE_TYPE.
13135 objc builds variants of pointer where pointer to type is a variant, too
13136 in objc_get_protocol_qualified_type. */
13137 if (TREE_TYPE (t) != TREE_TYPE (tv)
13138 && ((TREE_CODE (t) != ARRAY_TYPE
13139 && !POINTER_TYPE_P (t))
13140 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13141 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13143 error ("type variant has different %<TREE_TYPE%>");
13144 debug_tree (tv);
13145 error ("type variant%'s %<TREE_TYPE%>");
13146 debug_tree (TREE_TYPE (tv));
13147 error ("type%'s %<TREE_TYPE%>");
13148 debug_tree (TREE_TYPE (t));
13149 return false;
13151 if (type_with_alias_set_p (t)
13152 && !gimple_canonical_types_compatible_p (t, tv, false))
13154 error ("type is not compatible with its variant");
13155 debug_tree (tv);
13156 error ("type variant%'s %<TREE_TYPE%>");
13157 debug_tree (TREE_TYPE (tv));
13158 error ("type%'s %<TREE_TYPE%>");
13159 debug_tree (TREE_TYPE (t));
13160 return false;
13162 return true;
13163 #undef verify_variant_match
13167 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13168 the middle-end types_compatible_p function. It needs to avoid
13169 claiming types are different for types that should be treated
13170 the same with respect to TBAA. Canonical types are also used
13171 for IL consistency checks via the useless_type_conversion_p
13172 predicate which does not handle all type kinds itself but falls
13173 back to pointer-comparison of TYPE_CANONICAL for aggregates
13174 for example. */
13176 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13177 type calculation because we need to allow inter-operability between signed
13178 and unsigned variants. */
13180 bool
13181 type_with_interoperable_signedness (const_tree type)
13183 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13184 signed char and unsigned char. Similarly fortran FE builds
13185 C_SIZE_T as signed type, while C defines it unsigned. */
13187 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13188 == INTEGER_TYPE
13189 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13190 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13193 /* Return true iff T1 and T2 are structurally identical for what
13194 TBAA is concerned.
13195 This function is used both by lto.c canonical type merging and by the
13196 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13197 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13198 only for LTO because only in these cases TYPE_CANONICAL equivalence
13199 correspond to one defined by gimple_canonical_types_compatible_p. */
13201 bool
13202 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13203 bool trust_type_canonical)
13205 /* Type variants should be same as the main variant. When not doing sanity
13206 checking to verify this fact, go to main variants and save some work. */
13207 if (trust_type_canonical)
13209 t1 = TYPE_MAIN_VARIANT (t1);
13210 t2 = TYPE_MAIN_VARIANT (t2);
13213 /* Check first for the obvious case of pointer identity. */
13214 if (t1 == t2)
13215 return true;
13217 /* Check that we have two types to compare. */
13218 if (t1 == NULL_TREE || t2 == NULL_TREE)
13219 return false;
13221 /* We consider complete types always compatible with incomplete type.
13222 This does not make sense for canonical type calculation and thus we
13223 need to ensure that we are never called on it.
13225 FIXME: For more correctness the function probably should have three modes
13226 1) mode assuming that types are complete mathcing their structure
13227 2) mode allowing incomplete types but producing equivalence classes
13228 and thus ignoring all info from complete types
13229 3) mode allowing incomplete types to match complete but checking
13230 compatibility between complete types.
13232 1 and 2 can be used for canonical type calculation. 3 is the real
13233 definition of type compatibility that can be used i.e. for warnings during
13234 declaration merging. */
13236 gcc_assert (!trust_type_canonical
13237 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13239 /* If the types have been previously registered and found equal
13240 they still are. */
13242 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13243 && trust_type_canonical)
13245 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13246 they are always NULL, but they are set to non-NULL for types
13247 constructed by build_pointer_type and variants. In this case the
13248 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13249 all pointers are considered equal. Be sure to not return false
13250 negatives. */
13251 gcc_checking_assert (canonical_type_used_p (t1)
13252 && canonical_type_used_p (t2));
13253 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13256 /* For types where we do ODR based TBAA the canonical type is always
13257 set correctly, so we know that types are different if their
13258 canonical types does not match. */
13259 if (trust_type_canonical
13260 && (odr_type_p (t1) && odr_based_tbaa_p (t1))
13261 != (odr_type_p (t2) && odr_based_tbaa_p (t2)))
13262 return false;
13264 /* Can't be the same type if the types don't have the same code. */
13265 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13266 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13267 return false;
13269 /* Qualifiers do not matter for canonical type comparison purposes. */
13271 /* Void types and nullptr types are always the same. */
13272 if (TREE_CODE (t1) == VOID_TYPE
13273 || TREE_CODE (t1) == NULLPTR_TYPE)
13274 return true;
13276 /* Can't be the same type if they have different mode. */
13277 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13278 return false;
13280 /* Non-aggregate types can be handled cheaply. */
13281 if (INTEGRAL_TYPE_P (t1)
13282 || SCALAR_FLOAT_TYPE_P (t1)
13283 || FIXED_POINT_TYPE_P (t1)
13284 || TREE_CODE (t1) == VECTOR_TYPE
13285 || TREE_CODE (t1) == COMPLEX_TYPE
13286 || TREE_CODE (t1) == OFFSET_TYPE
13287 || POINTER_TYPE_P (t1))
13289 /* Can't be the same type if they have different recision. */
13290 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13291 return false;
13293 /* In some cases the signed and unsigned types are required to be
13294 inter-operable. */
13295 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13296 && !type_with_interoperable_signedness (t1))
13297 return false;
13299 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13300 interoperable with "signed char". Unless all frontends are revisited
13301 to agree on these types, we must ignore the flag completely. */
13303 /* Fortran standard define C_PTR type that is compatible with every
13304 C pointer. For this reason we need to glob all pointers into one.
13305 Still pointers in different address spaces are not compatible. */
13306 if (POINTER_TYPE_P (t1))
13308 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13309 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13310 return false;
13313 /* Tail-recurse to components. */
13314 if (TREE_CODE (t1) == VECTOR_TYPE
13315 || TREE_CODE (t1) == COMPLEX_TYPE)
13316 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13317 TREE_TYPE (t2),
13318 trust_type_canonical);
13320 return true;
13323 /* Do type-specific comparisons. */
13324 switch (TREE_CODE (t1))
13326 case ARRAY_TYPE:
13327 /* Array types are the same if the element types are the same and
13328 the number of elements are the same. */
13329 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13330 trust_type_canonical)
13331 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13332 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13333 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13334 return false;
13335 else
13337 tree i1 = TYPE_DOMAIN (t1);
13338 tree i2 = TYPE_DOMAIN (t2);
13340 /* For an incomplete external array, the type domain can be
13341 NULL_TREE. Check this condition also. */
13342 if (i1 == NULL_TREE && i2 == NULL_TREE)
13343 return true;
13344 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13345 return false;
13346 else
13348 tree min1 = TYPE_MIN_VALUE (i1);
13349 tree min2 = TYPE_MIN_VALUE (i2);
13350 tree max1 = TYPE_MAX_VALUE (i1);
13351 tree max2 = TYPE_MAX_VALUE (i2);
13353 /* The minimum/maximum values have to be the same. */
13354 if ((min1 == min2
13355 || (min1 && min2
13356 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13357 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13358 || operand_equal_p (min1, min2, 0))))
13359 && (max1 == max2
13360 || (max1 && max2
13361 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13362 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13363 || operand_equal_p (max1, max2, 0)))))
13364 return true;
13365 else
13366 return false;
13370 case METHOD_TYPE:
13371 case FUNCTION_TYPE:
13372 /* Function types are the same if the return type and arguments types
13373 are the same. */
13374 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13375 trust_type_canonical))
13376 return false;
13378 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13379 return true;
13380 else
13382 tree parms1, parms2;
13384 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13385 parms1 && parms2;
13386 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13388 if (!gimple_canonical_types_compatible_p
13389 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13390 trust_type_canonical))
13391 return false;
13394 if (parms1 || parms2)
13395 return false;
13397 return true;
13400 case RECORD_TYPE:
13401 case UNION_TYPE:
13402 case QUAL_UNION_TYPE:
13404 tree f1, f2;
13406 /* Don't try to compare variants of an incomplete type, before
13407 TYPE_FIELDS has been copied around. */
13408 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13409 return true;
13412 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13413 return false;
13415 /* For aggregate types, all the fields must be the same. */
13416 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13417 f1 || f2;
13418 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13420 /* Skip non-fields and zero-sized fields. */
13421 while (f1 && (TREE_CODE (f1) != FIELD_DECL
13422 || (DECL_SIZE (f1)
13423 && integer_zerop (DECL_SIZE (f1)))))
13424 f1 = TREE_CHAIN (f1);
13425 while (f2 && (TREE_CODE (f2) != FIELD_DECL
13426 || (DECL_SIZE (f2)
13427 && integer_zerop (DECL_SIZE (f2)))))
13428 f2 = TREE_CHAIN (f2);
13429 if (!f1 || !f2)
13430 break;
13431 /* The fields must have the same name, offset and type. */
13432 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13433 || !gimple_compare_field_offset (f1, f2)
13434 || !gimple_canonical_types_compatible_p
13435 (TREE_TYPE (f1), TREE_TYPE (f2),
13436 trust_type_canonical))
13437 return false;
13440 /* If one aggregate has more fields than the other, they
13441 are not the same. */
13442 if (f1 || f2)
13443 return false;
13445 return true;
13448 default:
13449 /* Consider all types with language specific trees in them mutually
13450 compatible. This is executed only from verify_type and false
13451 positives can be tolerated. */
13452 gcc_assert (!in_lto_p);
13453 return true;
13457 /* Verify type T. */
13459 void
13460 verify_type (const_tree t)
13462 bool error_found = false;
13463 tree mv = TYPE_MAIN_VARIANT (t);
13464 if (!mv)
13466 error ("main variant is not defined");
13467 error_found = true;
13469 else if (mv != TYPE_MAIN_VARIANT (mv))
13471 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
13472 debug_tree (mv);
13473 error_found = true;
13475 else if (t != mv && !verify_type_variant (t, mv))
13476 error_found = true;
13478 tree ct = TYPE_CANONICAL (t);
13479 if (!ct)
13481 else if (TYPE_CANONICAL (t) != ct)
13483 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
13484 debug_tree (ct);
13485 error_found = true;
13487 /* Method and function types cannot be used to address memory and thus
13488 TYPE_CANONICAL really matters only for determining useless conversions.
13490 FIXME: C++ FE produce declarations of builtin functions that are not
13491 compatible with main variants. */
13492 else if (TREE_CODE (t) == FUNCTION_TYPE)
13494 else if (t != ct
13495 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
13496 with variably sized arrays because their sizes possibly
13497 gimplified to different variables. */
13498 && !variably_modified_type_p (ct, NULL)
13499 && !gimple_canonical_types_compatible_p (t, ct, false)
13500 && COMPLETE_TYPE_P (t))
13502 error ("%<TYPE_CANONICAL%> is not compatible");
13503 debug_tree (ct);
13504 error_found = true;
13507 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13508 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13510 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
13511 debug_tree (ct);
13512 error_found = true;
13514 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
13516 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
13517 debug_tree (ct);
13518 debug_tree (TYPE_MAIN_VARIANT (ct));
13519 error_found = true;
13523 /* Check various uses of TYPE_MIN_VALUE_RAW. */
13524 if (RECORD_OR_UNION_TYPE_P (t))
13526 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13527 and danagle the pointer from time to time. */
13528 if (TYPE_VFIELD (t)
13529 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13530 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13532 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
13533 debug_tree (TYPE_VFIELD (t));
13534 error_found = true;
13537 else if (TREE_CODE (t) == POINTER_TYPE)
13539 if (TYPE_NEXT_PTR_TO (t)
13540 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13542 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
13543 debug_tree (TYPE_NEXT_PTR_TO (t));
13544 error_found = true;
13547 else if (TREE_CODE (t) == REFERENCE_TYPE)
13549 if (TYPE_NEXT_REF_TO (t)
13550 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13552 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
13553 debug_tree (TYPE_NEXT_REF_TO (t));
13554 error_found = true;
13557 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13558 || TREE_CODE (t) == FIXED_POINT_TYPE)
13560 /* FIXME: The following check should pass:
13561 useless_type_conversion_p (const_cast <tree> (t),
13562 TREE_TYPE (TYPE_MIN_VALUE (t))
13563 but does not for C sizetypes in LTO. */
13566 /* Check various uses of TYPE_MAXVAL_RAW. */
13567 if (RECORD_OR_UNION_TYPE_P (t))
13569 if (!TYPE_BINFO (t))
13571 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13573 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
13574 debug_tree (TYPE_BINFO (t));
13575 error_found = true;
13577 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
13579 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
13580 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13581 error_found = true;
13584 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13586 if (TYPE_METHOD_BASETYPE (t)
13587 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13588 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13590 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
13591 debug_tree (TYPE_METHOD_BASETYPE (t));
13592 error_found = true;
13595 else if (TREE_CODE (t) == OFFSET_TYPE)
13597 if (TYPE_OFFSET_BASETYPE (t)
13598 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13599 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13601 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
13602 debug_tree (TYPE_OFFSET_BASETYPE (t));
13603 error_found = true;
13606 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13607 || TREE_CODE (t) == FIXED_POINT_TYPE)
13609 /* FIXME: The following check should pass:
13610 useless_type_conversion_p (const_cast <tree> (t),
13611 TREE_TYPE (TYPE_MAX_VALUE (t))
13612 but does not for C sizetypes in LTO. */
13614 else if (TREE_CODE (t) == ARRAY_TYPE)
13616 if (TYPE_ARRAY_MAX_SIZE (t)
13617 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13619 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
13620 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13621 error_found = true;
13624 else if (TYPE_MAX_VALUE_RAW (t))
13626 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
13627 debug_tree (TYPE_MAX_VALUE_RAW (t));
13628 error_found = true;
13631 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13633 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
13634 debug_tree (TYPE_LANG_SLOT_1 (t));
13635 error_found = true;
13638 /* Check various uses of TYPE_VALUES_RAW. */
13639 if (TREE_CODE (t) == ENUMERAL_TYPE)
13640 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13642 tree value = TREE_VALUE (l);
13643 tree name = TREE_PURPOSE (l);
13645 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13646 CONST_DECL of ENUMERAL TYPE. */
13647 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13649 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
13650 debug_tree (value);
13651 debug_tree (name);
13652 error_found = true;
13654 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13655 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13657 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
13658 "to the enum");
13659 debug_tree (value);
13660 debug_tree (name);
13661 error_found = true;
13663 if (TREE_CODE (name) != IDENTIFIER_NODE)
13665 error ("enum value name is not %<IDENTIFIER_NODE%>");
13666 debug_tree (value);
13667 debug_tree (name);
13668 error_found = true;
13671 else if (TREE_CODE (t) == ARRAY_TYPE)
13673 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13675 error ("array %<TYPE_DOMAIN%> is not integer type");
13676 debug_tree (TYPE_DOMAIN (t));
13677 error_found = true;
13680 else if (RECORD_OR_UNION_TYPE_P (t))
13682 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
13684 error ("%<TYPE_FIELDS%> defined in incomplete type");
13685 error_found = true;
13687 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13689 /* TODO: verify properties of decls. */
13690 if (TREE_CODE (fld) == FIELD_DECL)
13692 else if (TREE_CODE (fld) == TYPE_DECL)
13694 else if (TREE_CODE (fld) == CONST_DECL)
13696 else if (VAR_P (fld))
13698 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13700 else if (TREE_CODE (fld) == USING_DECL)
13702 else if (TREE_CODE (fld) == FUNCTION_DECL)
13704 else
13706 error ("wrong tree in %<TYPE_FIELDS%> list");
13707 debug_tree (fld);
13708 error_found = true;
13712 else if (TREE_CODE (t) == INTEGER_TYPE
13713 || TREE_CODE (t) == BOOLEAN_TYPE
13714 || TREE_CODE (t) == OFFSET_TYPE
13715 || TREE_CODE (t) == REFERENCE_TYPE
13716 || TREE_CODE (t) == NULLPTR_TYPE
13717 || TREE_CODE (t) == POINTER_TYPE)
13719 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13721 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
13722 "is %p",
13723 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13724 error_found = true;
13726 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13728 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
13729 debug_tree (TYPE_CACHED_VALUES (t));
13730 error_found = true;
13732 /* Verify just enough of cache to ensure that no one copied it to new type.
13733 All copying should go by copy_node that should clear it. */
13734 else if (TYPE_CACHED_VALUES_P (t))
13736 int i;
13737 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13738 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13739 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13741 error ("wrong %<TYPE_CACHED_VALUES%> entry");
13742 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13743 error_found = true;
13744 break;
13748 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13749 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13751 /* C++ FE uses TREE_PURPOSE to store initial values. */
13752 if (TREE_PURPOSE (l) && in_lto_p)
13754 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
13755 debug_tree (l);
13756 error_found = true;
13758 if (!TYPE_P (TREE_VALUE (l)))
13760 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
13761 debug_tree (l);
13762 error_found = true;
13765 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13767 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
13768 debug_tree (TYPE_VALUES_RAW (t));
13769 error_found = true;
13771 if (TREE_CODE (t) != INTEGER_TYPE
13772 && TREE_CODE (t) != BOOLEAN_TYPE
13773 && TREE_CODE (t) != OFFSET_TYPE
13774 && TREE_CODE (t) != REFERENCE_TYPE
13775 && TREE_CODE (t) != NULLPTR_TYPE
13776 && TREE_CODE (t) != POINTER_TYPE
13777 && TYPE_CACHED_VALUES_P (t))
13779 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
13780 error_found = true;
13783 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13784 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13785 of a type. */
13786 if (TREE_CODE (t) == METHOD_TYPE
13787 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
13789 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
13790 error_found = true;
13793 if (error_found)
13795 debug_tree (const_cast <tree> (t));
13796 internal_error ("%qs failed", __func__);
13801 /* Return 1 if ARG interpreted as signed in its precision is known to be
13802 always positive or 2 if ARG is known to be always negative, or 3 if
13803 ARG may be positive or negative. */
13806 get_range_pos_neg (tree arg)
13808 if (arg == error_mark_node)
13809 return 3;
13811 int prec = TYPE_PRECISION (TREE_TYPE (arg));
13812 int cnt = 0;
13813 if (TREE_CODE (arg) == INTEGER_CST)
13815 wide_int w = wi::sext (wi::to_wide (arg), prec);
13816 if (wi::neg_p (w))
13817 return 2;
13818 else
13819 return 1;
13821 while (CONVERT_EXPR_P (arg)
13822 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
13823 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
13825 arg = TREE_OPERAND (arg, 0);
13826 /* Narrower value zero extended into wider type
13827 will always result in positive values. */
13828 if (TYPE_UNSIGNED (TREE_TYPE (arg))
13829 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
13830 return 1;
13831 prec = TYPE_PRECISION (TREE_TYPE (arg));
13832 if (++cnt > 30)
13833 return 3;
13836 if (TREE_CODE (arg) != SSA_NAME)
13837 return 3;
13838 value_range r;
13839 while (!get_global_range_query ()->range_of_expr (r, arg) || r.kind () != VR_RANGE)
13841 gimple *g = SSA_NAME_DEF_STMT (arg);
13842 if (is_gimple_assign (g)
13843 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
13845 tree t = gimple_assign_rhs1 (g);
13846 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
13847 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
13849 if (TYPE_UNSIGNED (TREE_TYPE (t))
13850 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
13851 return 1;
13852 prec = TYPE_PRECISION (TREE_TYPE (t));
13853 arg = t;
13854 if (++cnt > 30)
13855 return 3;
13856 continue;
13859 return 3;
13861 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
13863 /* For unsigned values, the "positive" range comes
13864 below the "negative" range. */
13865 if (!wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
13866 return 1;
13867 if (wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
13868 return 2;
13870 else
13872 if (!wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
13873 return 1;
13874 if (wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
13875 return 2;
13877 return 3;
13883 /* Return true if ARG is marked with the nonnull attribute in the
13884 current function signature. */
13886 bool
13887 nonnull_arg_p (const_tree arg)
13889 tree t, attrs, fntype;
13890 unsigned HOST_WIDE_INT arg_num;
13892 gcc_assert (TREE_CODE (arg) == PARM_DECL
13893 && (POINTER_TYPE_P (TREE_TYPE (arg))
13894 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
13896 /* The static chain decl is always non null. */
13897 if (arg == cfun->static_chain_decl)
13898 return true;
13900 /* THIS argument of method is always non-NULL. */
13901 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
13902 && arg == DECL_ARGUMENTS (cfun->decl)
13903 && flag_delete_null_pointer_checks)
13904 return true;
13906 /* Values passed by reference are always non-NULL. */
13907 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
13908 && flag_delete_null_pointer_checks)
13909 return true;
13911 fntype = TREE_TYPE (cfun->decl);
13912 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
13914 attrs = lookup_attribute ("nonnull", attrs);
13916 /* If "nonnull" wasn't specified, we know nothing about the argument. */
13917 if (attrs == NULL_TREE)
13918 return false;
13920 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
13921 if (TREE_VALUE (attrs) == NULL_TREE)
13922 return true;
13924 /* Get the position number for ARG in the function signature. */
13925 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
13927 t = DECL_CHAIN (t), arg_num++)
13929 if (t == arg)
13930 break;
13933 gcc_assert (t == arg);
13935 /* Now see if ARG_NUM is mentioned in the nonnull list. */
13936 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
13938 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
13939 return true;
13943 return false;
13946 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
13947 information. */
13949 location_t
13950 set_block (location_t loc, tree block)
13952 location_t pure_loc = get_pure_location (loc);
13953 source_range src_range = get_range_from_loc (line_table, loc);
13954 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
13957 location_t
13958 set_source_range (tree expr, location_t start, location_t finish)
13960 source_range src_range;
13961 src_range.m_start = start;
13962 src_range.m_finish = finish;
13963 return set_source_range (expr, src_range);
13966 location_t
13967 set_source_range (tree expr, source_range src_range)
13969 if (!EXPR_P (expr))
13970 return UNKNOWN_LOCATION;
13972 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
13973 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
13974 pure_loc,
13975 src_range,
13976 NULL);
13977 SET_EXPR_LOCATION (expr, adhoc);
13978 return adhoc;
13981 /* Return EXPR, potentially wrapped with a node expression LOC,
13982 if !CAN_HAVE_LOCATION_P (expr).
13984 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
13985 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
13987 Wrapper nodes can be identified using location_wrapper_p. */
13989 tree
13990 maybe_wrap_with_location (tree expr, location_t loc)
13992 if (expr == NULL)
13993 return NULL;
13994 if (loc == UNKNOWN_LOCATION)
13995 return expr;
13996 if (CAN_HAVE_LOCATION_P (expr))
13997 return expr;
13998 /* We should only be adding wrappers for constants and for decls,
13999 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
14000 gcc_assert (CONSTANT_CLASS_P (expr)
14001 || DECL_P (expr)
14002 || EXCEPTIONAL_CLASS_P (expr));
14004 /* For now, don't add wrappers to exceptional tree nodes, to minimize
14005 any impact of the wrapper nodes. */
14006 if (EXCEPTIONAL_CLASS_P (expr))
14007 return expr;
14009 /* Compiler-generated temporary variables don't need a wrapper. */
14010 if (DECL_P (expr) && DECL_ARTIFICIAL (expr) && DECL_IGNORED_P (expr))
14011 return expr;
14013 /* If any auto_suppress_location_wrappers are active, don't create
14014 wrappers. */
14015 if (suppress_location_wrappers > 0)
14016 return expr;
14018 tree_code code
14019 = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
14020 || (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
14021 ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
14022 tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
14023 /* Mark this node as being a wrapper. */
14024 EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
14025 return wrapper;
14028 int suppress_location_wrappers;
14030 /* Return the name of combined function FN, for debugging purposes. */
14032 const char *
14033 combined_fn_name (combined_fn fn)
14035 if (builtin_fn_p (fn))
14037 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14038 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14040 else
14041 return internal_fn_name (as_internal_fn (fn));
14044 /* Return a bitmap with a bit set corresponding to each argument in
14045 a function call type FNTYPE declared with attribute nonnull,
14046 or null if none of the function's argument are nonnull. The caller
14047 must free the bitmap. */
14049 bitmap
14050 get_nonnull_args (const_tree fntype)
14052 if (fntype == NULL_TREE)
14053 return NULL;
14055 bitmap argmap = NULL;
14056 if (TREE_CODE (fntype) == METHOD_TYPE)
14058 /* The this pointer in C++ non-static member functions is
14059 implicitly nonnull whether or not it's declared as such. */
14060 argmap = BITMAP_ALLOC (NULL);
14061 bitmap_set_bit (argmap, 0);
14064 tree attrs = TYPE_ATTRIBUTES (fntype);
14065 if (!attrs)
14066 return argmap;
14068 /* A function declaration can specify multiple attribute nonnull,
14069 each with zero or more arguments. The loop below creates a bitmap
14070 representing a union of all the arguments. An empty (but non-null)
14071 bitmap means that all arguments have been declaraed nonnull. */
14072 for ( ; attrs; attrs = TREE_CHAIN (attrs))
14074 attrs = lookup_attribute ("nonnull", attrs);
14075 if (!attrs)
14076 break;
14078 if (!argmap)
14079 argmap = BITMAP_ALLOC (NULL);
14081 if (!TREE_VALUE (attrs))
14083 /* Clear the bitmap in case a previous attribute nonnull
14084 set it and this one overrides it for all arguments. */
14085 bitmap_clear (argmap);
14086 return argmap;
14089 /* Iterate over the indices of the format arguments declared nonnull
14090 and set a bit for each. */
14091 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
14093 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
14094 bitmap_set_bit (argmap, val);
14098 return argmap;
14101 /* Returns true if TYPE is a type where it and all of its subobjects
14102 (recursively) are of structure, union, or array type. */
14104 bool
14105 is_empty_type (const_tree type)
14107 if (RECORD_OR_UNION_TYPE_P (type))
14109 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
14110 if (TREE_CODE (field) == FIELD_DECL
14111 && !DECL_PADDING_P (field)
14112 && !is_empty_type (TREE_TYPE (field)))
14113 return false;
14114 return true;
14116 else if (TREE_CODE (type) == ARRAY_TYPE)
14117 return (integer_minus_onep (array_type_nelts (type))
14118 || TYPE_DOMAIN (type) == NULL_TREE
14119 || is_empty_type (TREE_TYPE (type)));
14120 return false;
14123 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
14124 that shouldn't be passed via stack. */
14126 bool
14127 default_is_empty_record (const_tree type)
14129 if (!abi_version_at_least (12))
14130 return false;
14132 if (type == error_mark_node)
14133 return false;
14135 if (TREE_ADDRESSABLE (type))
14136 return false;
14138 return is_empty_type (TYPE_MAIN_VARIANT (type));
14141 /* Determine whether TYPE is a structure with a flexible array member,
14142 or a union containing such a structure (possibly recursively). */
14144 bool
14145 flexible_array_type_p (const_tree type)
14147 tree x, last;
14148 switch (TREE_CODE (type))
14150 case RECORD_TYPE:
14151 last = NULL_TREE;
14152 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14153 if (TREE_CODE (x) == FIELD_DECL)
14154 last = x;
14155 if (last == NULL_TREE)
14156 return false;
14157 if (TREE_CODE (TREE_TYPE (last)) == ARRAY_TYPE
14158 && TYPE_SIZE (TREE_TYPE (last)) == NULL_TREE
14159 && TYPE_DOMAIN (TREE_TYPE (last)) != NULL_TREE
14160 && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last))) == NULL_TREE)
14161 return true;
14162 return false;
14163 case UNION_TYPE:
14164 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14166 if (TREE_CODE (x) == FIELD_DECL
14167 && flexible_array_type_p (TREE_TYPE (x)))
14168 return true;
14170 return false;
14171 default:
14172 return false;
14176 /* Like int_size_in_bytes, but handle empty records specially. */
14178 HOST_WIDE_INT
14179 arg_int_size_in_bytes (const_tree type)
14181 return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
14184 /* Like size_in_bytes, but handle empty records specially. */
14186 tree
14187 arg_size_in_bytes (const_tree type)
14189 return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
14192 /* Return true if an expression with CODE has to have the same result type as
14193 its first operand. */
14195 bool
14196 expr_type_first_operand_type_p (tree_code code)
14198 switch (code)
14200 case NEGATE_EXPR:
14201 case ABS_EXPR:
14202 case BIT_NOT_EXPR:
14203 case PAREN_EXPR:
14204 case CONJ_EXPR:
14206 case PLUS_EXPR:
14207 case MINUS_EXPR:
14208 case MULT_EXPR:
14209 case TRUNC_DIV_EXPR:
14210 case CEIL_DIV_EXPR:
14211 case FLOOR_DIV_EXPR:
14212 case ROUND_DIV_EXPR:
14213 case TRUNC_MOD_EXPR:
14214 case CEIL_MOD_EXPR:
14215 case FLOOR_MOD_EXPR:
14216 case ROUND_MOD_EXPR:
14217 case RDIV_EXPR:
14218 case EXACT_DIV_EXPR:
14219 case MIN_EXPR:
14220 case MAX_EXPR:
14221 case BIT_IOR_EXPR:
14222 case BIT_XOR_EXPR:
14223 case BIT_AND_EXPR:
14225 case LSHIFT_EXPR:
14226 case RSHIFT_EXPR:
14227 case LROTATE_EXPR:
14228 case RROTATE_EXPR:
14229 return true;
14231 default:
14232 return false;
14236 /* Return a typenode for the "standard" C type with a given name. */
14237 tree
14238 get_typenode_from_name (const char *name)
14240 if (name == NULL || *name == '\0')
14241 return NULL_TREE;
14243 if (strcmp (name, "char") == 0)
14244 return char_type_node;
14245 if (strcmp (name, "unsigned char") == 0)
14246 return unsigned_char_type_node;
14247 if (strcmp (name, "signed char") == 0)
14248 return signed_char_type_node;
14250 if (strcmp (name, "short int") == 0)
14251 return short_integer_type_node;
14252 if (strcmp (name, "short unsigned int") == 0)
14253 return short_unsigned_type_node;
14255 if (strcmp (name, "int") == 0)
14256 return integer_type_node;
14257 if (strcmp (name, "unsigned int") == 0)
14258 return unsigned_type_node;
14260 if (strcmp (name, "long int") == 0)
14261 return long_integer_type_node;
14262 if (strcmp (name, "long unsigned int") == 0)
14263 return long_unsigned_type_node;
14265 if (strcmp (name, "long long int") == 0)
14266 return long_long_integer_type_node;
14267 if (strcmp (name, "long long unsigned int") == 0)
14268 return long_long_unsigned_type_node;
14270 gcc_unreachable ();
14273 /* List of pointer types used to declare builtins before we have seen their
14274 real declaration.
14276 Keep the size up to date in tree.h ! */
14277 const builtin_structptr_type builtin_structptr_types[6] =
14279 { fileptr_type_node, ptr_type_node, "FILE" },
14280 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
14281 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
14282 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
14283 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
14284 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
14287 /* Return the maximum object size. */
14289 tree
14290 max_object_size (void)
14292 /* To do: Make this a configurable parameter. */
14293 return TYPE_MAX_VALUE (ptrdiff_type_node);
14296 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
14297 parameter default to false and that weeds out error_mark_node. */
14299 bool
14300 verify_type_context (location_t loc, type_context_kind context,
14301 const_tree type, bool silent_p)
14303 if (type == error_mark_node)
14304 return true;
14306 gcc_assert (TYPE_P (type));
14307 return (!targetm.verify_type_context
14308 || targetm.verify_type_context (loc, context, type, silent_p));
14311 /* Return that NEW_ASM and DELETE_ASM name a valid pair of new and
14312 delete operators. */
14314 bool
14315 valid_new_delete_pair_p (tree new_asm, tree delete_asm)
14317 const char *new_name = IDENTIFIER_POINTER (new_asm);
14318 const char *delete_name = IDENTIFIER_POINTER (delete_asm);
14319 unsigned int new_len = IDENTIFIER_LENGTH (new_asm);
14320 unsigned int delete_len = IDENTIFIER_LENGTH (delete_asm);
14322 if (new_len < 5 || delete_len < 6)
14323 return false;
14324 if (new_name[0] == '_')
14325 ++new_name, --new_len;
14326 if (new_name[0] == '_')
14327 ++new_name, --new_len;
14328 if (delete_name[0] == '_')
14329 ++delete_name, --delete_len;
14330 if (delete_name[0] == '_')
14331 ++delete_name, --delete_len;
14332 if (new_len < 4 || delete_len < 5)
14333 return false;
14334 /* *_len is now just the length after initial underscores. */
14335 if (new_name[0] != 'Z' || new_name[1] != 'n')
14336 return false;
14337 if (delete_name[0] != 'Z' || delete_name[1] != 'd')
14338 return false;
14339 /* _Znw must match _Zdl, _Zna must match _Zda. */
14340 if ((new_name[2] != 'w' || delete_name[2] != 'l')
14341 && (new_name[2] != 'a' || delete_name[2] != 'a'))
14342 return false;
14343 /* 'j', 'm' and 'y' correspond to size_t. */
14344 if (new_name[3] != 'j' && new_name[3] != 'm' && new_name[3] != 'y')
14345 return false;
14346 if (delete_name[3] != 'P' || delete_name[4] != 'v')
14347 return false;
14348 if (new_len == 4
14349 || (new_len == 18 && !memcmp (new_name + 4, "RKSt9nothrow_t", 14)))
14351 /* _ZnXY or _ZnXYRKSt9nothrow_t matches
14352 _ZdXPv, _ZdXPvY and _ZdXPvRKSt9nothrow_t. */
14353 if (delete_len == 5)
14354 return true;
14355 if (delete_len == 6 && delete_name[5] == new_name[3])
14356 return true;
14357 if (delete_len == 19 && !memcmp (delete_name + 5, "RKSt9nothrow_t", 14))
14358 return true;
14360 else if ((new_len == 19 && !memcmp (new_name + 4, "St11align_val_t", 15))
14361 || (new_len == 33
14362 && !memcmp (new_name + 4, "St11align_val_tRKSt9nothrow_t", 29)))
14364 /* _ZnXYSt11align_val_t or _ZnXYSt11align_val_tRKSt9nothrow_t matches
14365 _ZdXPvSt11align_val_t or _ZdXPvYSt11align_val_t or or
14366 _ZdXPvSt11align_val_tRKSt9nothrow_t. */
14367 if (delete_len == 20 && !memcmp (delete_name + 5, "St11align_val_t", 15))
14368 return true;
14369 if (delete_len == 21
14370 && delete_name[5] == new_name[3]
14371 && !memcmp (delete_name + 6, "St11align_val_t", 15))
14372 return true;
14373 if (delete_len == 34
14374 && !memcmp (delete_name + 5, "St11align_val_tRKSt9nothrow_t", 29))
14375 return true;
14377 return false;
14380 #if CHECKING_P
14382 namespace selftest {
14384 /* Selftests for tree. */
14386 /* Verify that integer constants are sane. */
14388 static void
14389 test_integer_constants ()
14391 ASSERT_TRUE (integer_type_node != NULL);
14392 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
14394 tree type = integer_type_node;
14396 tree zero = build_zero_cst (type);
14397 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
14398 ASSERT_EQ (type, TREE_TYPE (zero));
14400 tree one = build_int_cst (type, 1);
14401 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
14402 ASSERT_EQ (type, TREE_TYPE (zero));
14405 /* Verify identifiers. */
14407 static void
14408 test_identifiers ()
14410 tree identifier = get_identifier ("foo");
14411 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
14412 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
14415 /* Verify LABEL_DECL. */
14417 static void
14418 test_labels ()
14420 tree identifier = get_identifier ("err");
14421 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
14422 identifier, void_type_node);
14423 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
14424 ASSERT_FALSE (FORCED_LABEL (label_decl));
14427 /* Return a new VECTOR_CST node whose type is TYPE and whose values
14428 are given by VALS. */
14430 static tree
14431 build_vector (tree type, vec<tree> vals MEM_STAT_DECL)
14433 gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
14434 tree_vector_builder builder (type, vals.length (), 1);
14435 builder.splice (vals);
14436 return builder.build ();
14439 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
14441 static void
14442 check_vector_cst (vec<tree> expected, tree actual)
14444 ASSERT_KNOWN_EQ (expected.length (),
14445 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
14446 for (unsigned int i = 0; i < expected.length (); ++i)
14447 ASSERT_EQ (wi::to_wide (expected[i]),
14448 wi::to_wide (vector_cst_elt (actual, i)));
14451 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
14452 and that its elements match EXPECTED. */
14454 static void
14455 check_vector_cst_duplicate (vec<tree> expected, tree actual,
14456 unsigned int npatterns)
14458 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14459 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
14460 ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
14461 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
14462 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
14463 check_vector_cst (expected, actual);
14466 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
14467 and NPATTERNS background elements, and that its elements match
14468 EXPECTED. */
14470 static void
14471 check_vector_cst_fill (vec<tree> expected, tree actual,
14472 unsigned int npatterns)
14474 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14475 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
14476 ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
14477 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
14478 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
14479 check_vector_cst (expected, actual);
14482 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
14483 and that its elements match EXPECTED. */
14485 static void
14486 check_vector_cst_stepped (vec<tree> expected, tree actual,
14487 unsigned int npatterns)
14489 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14490 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
14491 ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
14492 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
14493 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
14494 check_vector_cst (expected, actual);
14497 /* Test the creation of VECTOR_CSTs. */
14499 static void
14500 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
14502 auto_vec<tree, 8> elements (8);
14503 elements.quick_grow (8);
14504 tree element_type = build_nonstandard_integer_type (16, true);
14505 tree vector_type = build_vector_type (element_type, 8);
14507 /* Test a simple linear series with a base of 0 and a step of 1:
14508 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
14509 for (unsigned int i = 0; i < 8; ++i)
14510 elements[i] = build_int_cst (element_type, i);
14511 tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
14512 check_vector_cst_stepped (elements, vector, 1);
14514 /* Try the same with the first element replaced by 100:
14515 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
14516 elements[0] = build_int_cst (element_type, 100);
14517 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14518 check_vector_cst_stepped (elements, vector, 1);
14520 /* Try a series that wraps around.
14521 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
14522 for (unsigned int i = 1; i < 8; ++i)
14523 elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
14524 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14525 check_vector_cst_stepped (elements, vector, 1);
14527 /* Try a downward series:
14528 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
14529 for (unsigned int i = 1; i < 8; ++i)
14530 elements[i] = build_int_cst (element_type, 80 - i);
14531 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14532 check_vector_cst_stepped (elements, vector, 1);
14534 /* Try two interleaved series with different bases and steps:
14535 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
14536 elements[1] = build_int_cst (element_type, 53);
14537 for (unsigned int i = 2; i < 8; i += 2)
14539 elements[i] = build_int_cst (element_type, 70 - i * 2);
14540 elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
14542 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14543 check_vector_cst_stepped (elements, vector, 2);
14545 /* Try a duplicated value:
14546 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
14547 for (unsigned int i = 1; i < 8; ++i)
14548 elements[i] = elements[0];
14549 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14550 check_vector_cst_duplicate (elements, vector, 1);
14552 /* Try an interleaved duplicated value:
14553 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
14554 elements[1] = build_int_cst (element_type, 55);
14555 for (unsigned int i = 2; i < 8; ++i)
14556 elements[i] = elements[i - 2];
14557 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14558 check_vector_cst_duplicate (elements, vector, 2);
14560 /* Try a duplicated value with 2 exceptions
14561 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
14562 elements[0] = build_int_cst (element_type, 41);
14563 elements[1] = build_int_cst (element_type, 97);
14564 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14565 check_vector_cst_fill (elements, vector, 2);
14567 /* Try with and without a step
14568 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
14569 for (unsigned int i = 3; i < 8; i += 2)
14570 elements[i] = build_int_cst (element_type, i * 7);
14571 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14572 check_vector_cst_stepped (elements, vector, 2);
14574 /* Try a fully-general constant:
14575 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
14576 elements[5] = build_int_cst (element_type, 9990);
14577 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14578 check_vector_cst_fill (elements, vector, 4);
14581 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
14582 Helper function for test_location_wrappers, to deal with STRIP_NOPS
14583 modifying its argument in-place. */
14585 static void
14586 check_strip_nops (tree node, tree expected)
14588 STRIP_NOPS (node);
14589 ASSERT_EQ (expected, node);
14592 /* Verify location wrappers. */
14594 static void
14595 test_location_wrappers ()
14597 location_t loc = BUILTINS_LOCATION;
14599 ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
14601 /* Wrapping a constant. */
14602 tree int_cst = build_int_cst (integer_type_node, 42);
14603 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
14604 ASSERT_FALSE (location_wrapper_p (int_cst));
14606 tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
14607 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
14608 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
14609 ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
14611 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
14612 ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
14614 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
14615 tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
14616 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
14617 ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
14619 /* Wrapping a STRING_CST. */
14620 tree string_cst = build_string (4, "foo");
14621 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
14622 ASSERT_FALSE (location_wrapper_p (string_cst));
14624 tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
14625 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
14626 ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
14627 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
14628 ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
14631 /* Wrapping a variable. */
14632 tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
14633 get_identifier ("some_int_var"),
14634 integer_type_node);
14635 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
14636 ASSERT_FALSE (location_wrapper_p (int_var));
14638 tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
14639 ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
14640 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
14641 ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
14643 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
14644 wrapper. */
14645 tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
14646 ASSERT_FALSE (location_wrapper_p (r_cast));
14647 ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
14649 /* Verify that STRIP_NOPS removes wrappers. */
14650 check_strip_nops (wrapped_int_cst, int_cst);
14651 check_strip_nops (wrapped_string_cst, string_cst);
14652 check_strip_nops (wrapped_int_var, int_var);
14655 /* Test various tree predicates. Verify that location wrappers don't
14656 affect the results. */
14658 static void
14659 test_predicates ()
14661 /* Build various constants and wrappers around them. */
14663 location_t loc = BUILTINS_LOCATION;
14665 tree i_0 = build_int_cst (integer_type_node, 0);
14666 tree wr_i_0 = maybe_wrap_with_location (i_0, loc);
14668 tree i_1 = build_int_cst (integer_type_node, 1);
14669 tree wr_i_1 = maybe_wrap_with_location (i_1, loc);
14671 tree i_m1 = build_int_cst (integer_type_node, -1);
14672 tree wr_i_m1 = maybe_wrap_with_location (i_m1, loc);
14674 tree f_0 = build_real_from_int_cst (float_type_node, i_0);
14675 tree wr_f_0 = maybe_wrap_with_location (f_0, loc);
14676 tree f_1 = build_real_from_int_cst (float_type_node, i_1);
14677 tree wr_f_1 = maybe_wrap_with_location (f_1, loc);
14678 tree f_m1 = build_real_from_int_cst (float_type_node, i_m1);
14679 tree wr_f_m1 = maybe_wrap_with_location (f_m1, loc);
14681 tree c_i_0 = build_complex (NULL_TREE, i_0, i_0);
14682 tree c_i_1 = build_complex (NULL_TREE, i_1, i_0);
14683 tree c_i_m1 = build_complex (NULL_TREE, i_m1, i_0);
14685 tree c_f_0 = build_complex (NULL_TREE, f_0, f_0);
14686 tree c_f_1 = build_complex (NULL_TREE, f_1, f_0);
14687 tree c_f_m1 = build_complex (NULL_TREE, f_m1, f_0);
14689 /* TODO: vector constants. */
14691 /* Test integer_onep. */
14692 ASSERT_FALSE (integer_onep (i_0));
14693 ASSERT_FALSE (integer_onep (wr_i_0));
14694 ASSERT_TRUE (integer_onep (i_1));
14695 ASSERT_TRUE (integer_onep (wr_i_1));
14696 ASSERT_FALSE (integer_onep (i_m1));
14697 ASSERT_FALSE (integer_onep (wr_i_m1));
14698 ASSERT_FALSE (integer_onep (f_0));
14699 ASSERT_FALSE (integer_onep (wr_f_0));
14700 ASSERT_FALSE (integer_onep (f_1));
14701 ASSERT_FALSE (integer_onep (wr_f_1));
14702 ASSERT_FALSE (integer_onep (f_m1));
14703 ASSERT_FALSE (integer_onep (wr_f_m1));
14704 ASSERT_FALSE (integer_onep (c_i_0));
14705 ASSERT_TRUE (integer_onep (c_i_1));
14706 ASSERT_FALSE (integer_onep (c_i_m1));
14707 ASSERT_FALSE (integer_onep (c_f_0));
14708 ASSERT_FALSE (integer_onep (c_f_1));
14709 ASSERT_FALSE (integer_onep (c_f_m1));
14711 /* Test integer_zerop. */
14712 ASSERT_TRUE (integer_zerop (i_0));
14713 ASSERT_TRUE (integer_zerop (wr_i_0));
14714 ASSERT_FALSE (integer_zerop (i_1));
14715 ASSERT_FALSE (integer_zerop (wr_i_1));
14716 ASSERT_FALSE (integer_zerop (i_m1));
14717 ASSERT_FALSE (integer_zerop (wr_i_m1));
14718 ASSERT_FALSE (integer_zerop (f_0));
14719 ASSERT_FALSE (integer_zerop (wr_f_0));
14720 ASSERT_FALSE (integer_zerop (f_1));
14721 ASSERT_FALSE (integer_zerop (wr_f_1));
14722 ASSERT_FALSE (integer_zerop (f_m1));
14723 ASSERT_FALSE (integer_zerop (wr_f_m1));
14724 ASSERT_TRUE (integer_zerop (c_i_0));
14725 ASSERT_FALSE (integer_zerop (c_i_1));
14726 ASSERT_FALSE (integer_zerop (c_i_m1));
14727 ASSERT_FALSE (integer_zerop (c_f_0));
14728 ASSERT_FALSE (integer_zerop (c_f_1));
14729 ASSERT_FALSE (integer_zerop (c_f_m1));
14731 /* Test integer_all_onesp. */
14732 ASSERT_FALSE (integer_all_onesp (i_0));
14733 ASSERT_FALSE (integer_all_onesp (wr_i_0));
14734 ASSERT_FALSE (integer_all_onesp (i_1));
14735 ASSERT_FALSE (integer_all_onesp (wr_i_1));
14736 ASSERT_TRUE (integer_all_onesp (i_m1));
14737 ASSERT_TRUE (integer_all_onesp (wr_i_m1));
14738 ASSERT_FALSE (integer_all_onesp (f_0));
14739 ASSERT_FALSE (integer_all_onesp (wr_f_0));
14740 ASSERT_FALSE (integer_all_onesp (f_1));
14741 ASSERT_FALSE (integer_all_onesp (wr_f_1));
14742 ASSERT_FALSE (integer_all_onesp (f_m1));
14743 ASSERT_FALSE (integer_all_onesp (wr_f_m1));
14744 ASSERT_FALSE (integer_all_onesp (c_i_0));
14745 ASSERT_FALSE (integer_all_onesp (c_i_1));
14746 ASSERT_FALSE (integer_all_onesp (c_i_m1));
14747 ASSERT_FALSE (integer_all_onesp (c_f_0));
14748 ASSERT_FALSE (integer_all_onesp (c_f_1));
14749 ASSERT_FALSE (integer_all_onesp (c_f_m1));
14751 /* Test integer_minus_onep. */
14752 ASSERT_FALSE (integer_minus_onep (i_0));
14753 ASSERT_FALSE (integer_minus_onep (wr_i_0));
14754 ASSERT_FALSE (integer_minus_onep (i_1));
14755 ASSERT_FALSE (integer_minus_onep (wr_i_1));
14756 ASSERT_TRUE (integer_minus_onep (i_m1));
14757 ASSERT_TRUE (integer_minus_onep (wr_i_m1));
14758 ASSERT_FALSE (integer_minus_onep (f_0));
14759 ASSERT_FALSE (integer_minus_onep (wr_f_0));
14760 ASSERT_FALSE (integer_minus_onep (f_1));
14761 ASSERT_FALSE (integer_minus_onep (wr_f_1));
14762 ASSERT_FALSE (integer_minus_onep (f_m1));
14763 ASSERT_FALSE (integer_minus_onep (wr_f_m1));
14764 ASSERT_FALSE (integer_minus_onep (c_i_0));
14765 ASSERT_FALSE (integer_minus_onep (c_i_1));
14766 ASSERT_TRUE (integer_minus_onep (c_i_m1));
14767 ASSERT_FALSE (integer_minus_onep (c_f_0));
14768 ASSERT_FALSE (integer_minus_onep (c_f_1));
14769 ASSERT_FALSE (integer_minus_onep (c_f_m1));
14771 /* Test integer_each_onep. */
14772 ASSERT_FALSE (integer_each_onep (i_0));
14773 ASSERT_FALSE (integer_each_onep (wr_i_0));
14774 ASSERT_TRUE (integer_each_onep (i_1));
14775 ASSERT_TRUE (integer_each_onep (wr_i_1));
14776 ASSERT_FALSE (integer_each_onep (i_m1));
14777 ASSERT_FALSE (integer_each_onep (wr_i_m1));
14778 ASSERT_FALSE (integer_each_onep (f_0));
14779 ASSERT_FALSE (integer_each_onep (wr_f_0));
14780 ASSERT_FALSE (integer_each_onep (f_1));
14781 ASSERT_FALSE (integer_each_onep (wr_f_1));
14782 ASSERT_FALSE (integer_each_onep (f_m1));
14783 ASSERT_FALSE (integer_each_onep (wr_f_m1));
14784 ASSERT_FALSE (integer_each_onep (c_i_0));
14785 ASSERT_FALSE (integer_each_onep (c_i_1));
14786 ASSERT_FALSE (integer_each_onep (c_i_m1));
14787 ASSERT_FALSE (integer_each_onep (c_f_0));
14788 ASSERT_FALSE (integer_each_onep (c_f_1));
14789 ASSERT_FALSE (integer_each_onep (c_f_m1));
14791 /* Test integer_truep. */
14792 ASSERT_FALSE (integer_truep (i_0));
14793 ASSERT_FALSE (integer_truep (wr_i_0));
14794 ASSERT_TRUE (integer_truep (i_1));
14795 ASSERT_TRUE (integer_truep (wr_i_1));
14796 ASSERT_FALSE (integer_truep (i_m1));
14797 ASSERT_FALSE (integer_truep (wr_i_m1));
14798 ASSERT_FALSE (integer_truep (f_0));
14799 ASSERT_FALSE (integer_truep (wr_f_0));
14800 ASSERT_FALSE (integer_truep (f_1));
14801 ASSERT_FALSE (integer_truep (wr_f_1));
14802 ASSERT_FALSE (integer_truep (f_m1));
14803 ASSERT_FALSE (integer_truep (wr_f_m1));
14804 ASSERT_FALSE (integer_truep (c_i_0));
14805 ASSERT_TRUE (integer_truep (c_i_1));
14806 ASSERT_FALSE (integer_truep (c_i_m1));
14807 ASSERT_FALSE (integer_truep (c_f_0));
14808 ASSERT_FALSE (integer_truep (c_f_1));
14809 ASSERT_FALSE (integer_truep (c_f_m1));
14811 /* Test integer_nonzerop. */
14812 ASSERT_FALSE (integer_nonzerop (i_0));
14813 ASSERT_FALSE (integer_nonzerop (wr_i_0));
14814 ASSERT_TRUE (integer_nonzerop (i_1));
14815 ASSERT_TRUE (integer_nonzerop (wr_i_1));
14816 ASSERT_TRUE (integer_nonzerop (i_m1));
14817 ASSERT_TRUE (integer_nonzerop (wr_i_m1));
14818 ASSERT_FALSE (integer_nonzerop (f_0));
14819 ASSERT_FALSE (integer_nonzerop (wr_f_0));
14820 ASSERT_FALSE (integer_nonzerop (f_1));
14821 ASSERT_FALSE (integer_nonzerop (wr_f_1));
14822 ASSERT_FALSE (integer_nonzerop (f_m1));
14823 ASSERT_FALSE (integer_nonzerop (wr_f_m1));
14824 ASSERT_FALSE (integer_nonzerop (c_i_0));
14825 ASSERT_TRUE (integer_nonzerop (c_i_1));
14826 ASSERT_TRUE (integer_nonzerop (c_i_m1));
14827 ASSERT_FALSE (integer_nonzerop (c_f_0));
14828 ASSERT_FALSE (integer_nonzerop (c_f_1));
14829 ASSERT_FALSE (integer_nonzerop (c_f_m1));
14831 /* Test real_zerop. */
14832 ASSERT_FALSE (real_zerop (i_0));
14833 ASSERT_FALSE (real_zerop (wr_i_0));
14834 ASSERT_FALSE (real_zerop (i_1));
14835 ASSERT_FALSE (real_zerop (wr_i_1));
14836 ASSERT_FALSE (real_zerop (i_m1));
14837 ASSERT_FALSE (real_zerop (wr_i_m1));
14838 ASSERT_TRUE (real_zerop (f_0));
14839 ASSERT_TRUE (real_zerop (wr_f_0));
14840 ASSERT_FALSE (real_zerop (f_1));
14841 ASSERT_FALSE (real_zerop (wr_f_1));
14842 ASSERT_FALSE (real_zerop (f_m1));
14843 ASSERT_FALSE (real_zerop (wr_f_m1));
14844 ASSERT_FALSE (real_zerop (c_i_0));
14845 ASSERT_FALSE (real_zerop (c_i_1));
14846 ASSERT_FALSE (real_zerop (c_i_m1));
14847 ASSERT_TRUE (real_zerop (c_f_0));
14848 ASSERT_FALSE (real_zerop (c_f_1));
14849 ASSERT_FALSE (real_zerop (c_f_m1));
14851 /* Test real_onep. */
14852 ASSERT_FALSE (real_onep (i_0));
14853 ASSERT_FALSE (real_onep (wr_i_0));
14854 ASSERT_FALSE (real_onep (i_1));
14855 ASSERT_FALSE (real_onep (wr_i_1));
14856 ASSERT_FALSE (real_onep (i_m1));
14857 ASSERT_FALSE (real_onep (wr_i_m1));
14858 ASSERT_FALSE (real_onep (f_0));
14859 ASSERT_FALSE (real_onep (wr_f_0));
14860 ASSERT_TRUE (real_onep (f_1));
14861 ASSERT_TRUE (real_onep (wr_f_1));
14862 ASSERT_FALSE (real_onep (f_m1));
14863 ASSERT_FALSE (real_onep (wr_f_m1));
14864 ASSERT_FALSE (real_onep (c_i_0));
14865 ASSERT_FALSE (real_onep (c_i_1));
14866 ASSERT_FALSE (real_onep (c_i_m1));
14867 ASSERT_FALSE (real_onep (c_f_0));
14868 ASSERT_TRUE (real_onep (c_f_1));
14869 ASSERT_FALSE (real_onep (c_f_m1));
14871 /* Test real_minus_onep. */
14872 ASSERT_FALSE (real_minus_onep (i_0));
14873 ASSERT_FALSE (real_minus_onep (wr_i_0));
14874 ASSERT_FALSE (real_minus_onep (i_1));
14875 ASSERT_FALSE (real_minus_onep (wr_i_1));
14876 ASSERT_FALSE (real_minus_onep (i_m1));
14877 ASSERT_FALSE (real_minus_onep (wr_i_m1));
14878 ASSERT_FALSE (real_minus_onep (f_0));
14879 ASSERT_FALSE (real_minus_onep (wr_f_0));
14880 ASSERT_FALSE (real_minus_onep (f_1));
14881 ASSERT_FALSE (real_minus_onep (wr_f_1));
14882 ASSERT_TRUE (real_minus_onep (f_m1));
14883 ASSERT_TRUE (real_minus_onep (wr_f_m1));
14884 ASSERT_FALSE (real_minus_onep (c_i_0));
14885 ASSERT_FALSE (real_minus_onep (c_i_1));
14886 ASSERT_FALSE (real_minus_onep (c_i_m1));
14887 ASSERT_FALSE (real_minus_onep (c_f_0));
14888 ASSERT_FALSE (real_minus_onep (c_f_1));
14889 ASSERT_TRUE (real_minus_onep (c_f_m1));
14891 /* Test zerop. */
14892 ASSERT_TRUE (zerop (i_0));
14893 ASSERT_TRUE (zerop (wr_i_0));
14894 ASSERT_FALSE (zerop (i_1));
14895 ASSERT_FALSE (zerop (wr_i_1));
14896 ASSERT_FALSE (zerop (i_m1));
14897 ASSERT_FALSE (zerop (wr_i_m1));
14898 ASSERT_TRUE (zerop (f_0));
14899 ASSERT_TRUE (zerop (wr_f_0));
14900 ASSERT_FALSE (zerop (f_1));
14901 ASSERT_FALSE (zerop (wr_f_1));
14902 ASSERT_FALSE (zerop (f_m1));
14903 ASSERT_FALSE (zerop (wr_f_m1));
14904 ASSERT_TRUE (zerop (c_i_0));
14905 ASSERT_FALSE (zerop (c_i_1));
14906 ASSERT_FALSE (zerop (c_i_m1));
14907 ASSERT_TRUE (zerop (c_f_0));
14908 ASSERT_FALSE (zerop (c_f_1));
14909 ASSERT_FALSE (zerop (c_f_m1));
14911 /* Test tree_expr_nonnegative_p. */
14912 ASSERT_TRUE (tree_expr_nonnegative_p (i_0));
14913 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0));
14914 ASSERT_TRUE (tree_expr_nonnegative_p (i_1));
14915 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1));
14916 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1));
14917 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1));
14918 ASSERT_TRUE (tree_expr_nonnegative_p (f_0));
14919 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0));
14920 ASSERT_TRUE (tree_expr_nonnegative_p (f_1));
14921 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1));
14922 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1));
14923 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1));
14924 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0));
14925 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1));
14926 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1));
14927 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0));
14928 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1));
14929 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1));
14931 /* Test tree_expr_nonzero_p. */
14932 ASSERT_FALSE (tree_expr_nonzero_p (i_0));
14933 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0));
14934 ASSERT_TRUE (tree_expr_nonzero_p (i_1));
14935 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1));
14936 ASSERT_TRUE (tree_expr_nonzero_p (i_m1));
14937 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1));
14939 /* Test integer_valued_real_p. */
14940 ASSERT_FALSE (integer_valued_real_p (i_0));
14941 ASSERT_TRUE (integer_valued_real_p (f_0));
14942 ASSERT_TRUE (integer_valued_real_p (wr_f_0));
14943 ASSERT_TRUE (integer_valued_real_p (f_1));
14944 ASSERT_TRUE (integer_valued_real_p (wr_f_1));
14946 /* Test integer_pow2p. */
14947 ASSERT_FALSE (integer_pow2p (i_0));
14948 ASSERT_TRUE (integer_pow2p (i_1));
14949 ASSERT_TRUE (integer_pow2p (wr_i_1));
14951 /* Test uniform_integer_cst_p. */
14952 ASSERT_TRUE (uniform_integer_cst_p (i_0));
14953 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0));
14954 ASSERT_TRUE (uniform_integer_cst_p (i_1));
14955 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1));
14956 ASSERT_TRUE (uniform_integer_cst_p (i_m1));
14957 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1));
14958 ASSERT_FALSE (uniform_integer_cst_p (f_0));
14959 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0));
14960 ASSERT_FALSE (uniform_integer_cst_p (f_1));
14961 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1));
14962 ASSERT_FALSE (uniform_integer_cst_p (f_m1));
14963 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1));
14964 ASSERT_FALSE (uniform_integer_cst_p (c_i_0));
14965 ASSERT_FALSE (uniform_integer_cst_p (c_i_1));
14966 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1));
14967 ASSERT_FALSE (uniform_integer_cst_p (c_f_0));
14968 ASSERT_FALSE (uniform_integer_cst_p (c_f_1));
14969 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1));
14972 /* Check that string escaping works correctly. */
14974 static void
14975 test_escaped_strings (void)
14977 int saved_cutoff;
14978 escaped_string msg;
14980 msg.escape (NULL);
14981 /* ASSERT_STREQ does not accept NULL as a valid test
14982 result, so we have to use ASSERT_EQ instead. */
14983 ASSERT_EQ (NULL, (const char *) msg);
14985 msg.escape ("");
14986 ASSERT_STREQ ("", (const char *) msg);
14988 msg.escape ("foobar");
14989 ASSERT_STREQ ("foobar", (const char *) msg);
14991 /* Ensure that we have -fmessage-length set to 0. */
14992 saved_cutoff = pp_line_cutoff (global_dc->printer);
14993 pp_line_cutoff (global_dc->printer) = 0;
14995 msg.escape ("foo\nbar");
14996 ASSERT_STREQ ("foo\\nbar", (const char *) msg);
14998 msg.escape ("\a\b\f\n\r\t\v");
14999 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
15001 /* Now repeat the tests with -fmessage-length set to 5. */
15002 pp_line_cutoff (global_dc->printer) = 5;
15004 /* Note that the newline is not translated into an escape. */
15005 msg.escape ("foo\nbar");
15006 ASSERT_STREQ ("foo\nbar", (const char *) msg);
15008 msg.escape ("\a\b\f\n\r\t\v");
15009 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
15011 /* Restore the original message length setting. */
15012 pp_line_cutoff (global_dc->printer) = saved_cutoff;
15015 /* Run all of the selftests within this file. */
15017 void
15018 tree_c_tests ()
15020 test_integer_constants ();
15021 test_identifiers ();
15022 test_labels ();
15023 test_vector_cst_patterns ();
15024 test_location_wrappers ();
15025 test_predicates ();
15026 test_escaped_strings ();
15029 } // namespace selftest
15031 #endif /* CHECKING_P */
15033 #include "gt-tree.h"