Daily bump.
[official-gcc.git] / gcc / tree.c
blob845228a055b2cfac0c9ca8c0cda1b9df4b0095c6
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "langhooks-def.h"
58 #include "tree-diagnostic.h"
59 #include "except.h"
60 #include "builtins.h"
61 #include "print-tree.h"
62 #include "ipa-utils.h"
63 #include "selftest.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "rtl.h"
67 #include "regs.h"
68 #include "tree-vector-builder.h"
69 #include "gimple-fold.h"
70 #include "escaped_string.h"
71 #include "gimple-range.h"
73 /* Tree code classes. */
75 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
76 #define END_OF_BASE_TREE_CODES tcc_exceptional,
78 const enum tree_code_class tree_code_type[] = {
79 #include "all-tree.def"
82 #undef DEFTREECODE
83 #undef END_OF_BASE_TREE_CODES
85 /* Table indexed by tree code giving number of expression
86 operands beyond the fixed part of the node structure.
87 Not used for types or decls. */
89 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
90 #define END_OF_BASE_TREE_CODES 0,
92 const unsigned char tree_code_length[] = {
93 #include "all-tree.def"
96 #undef DEFTREECODE
97 #undef END_OF_BASE_TREE_CODES
99 /* Names of tree components.
100 Used for printing out the tree and error messages. */
101 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
102 #define END_OF_BASE_TREE_CODES "@dummy",
104 static const char *const tree_code_name[] = {
105 #include "all-tree.def"
108 #undef DEFTREECODE
109 #undef END_OF_BASE_TREE_CODES
111 /* Each tree code class has an associated string representation.
112 These must correspond to the tree_code_class entries. */
114 const char *const tree_code_class_strings[] =
116 "exceptional",
117 "constant",
118 "type",
119 "declaration",
120 "reference",
121 "comparison",
122 "unary",
123 "binary",
124 "statement",
125 "vl_exp",
126 "expression"
129 /* obstack.[ch] explicitly declined to prototype this. */
130 extern int _obstack_allocated_p (struct obstack *h, void *obj);
132 /* Statistics-gathering stuff. */
134 static uint64_t tree_code_counts[MAX_TREE_CODES];
135 uint64_t tree_node_counts[(int) all_kinds];
136 uint64_t tree_node_sizes[(int) all_kinds];
138 /* Keep in sync with tree.h:enum tree_node_kind. */
139 static const char * const tree_node_kind_names[] = {
140 "decls",
141 "types",
142 "blocks",
143 "stmts",
144 "refs",
145 "exprs",
146 "constants",
147 "identifiers",
148 "vecs",
149 "binfos",
150 "ssa names",
151 "constructors",
152 "random kinds",
153 "lang_decl kinds",
154 "lang_type kinds",
155 "omp clauses",
158 /* Unique id for next decl created. */
159 static GTY(()) int next_decl_uid;
160 /* Unique id for next type created. */
161 static GTY(()) unsigned next_type_uid = 1;
162 /* Unique id for next debug decl created. Use negative numbers,
163 to catch erroneous uses. */
164 static GTY(()) int next_debug_decl_uid;
166 /* Since we cannot rehash a type after it is in the table, we have to
167 keep the hash code. */
169 struct GTY((for_user)) type_hash {
170 unsigned long hash;
171 tree type;
174 /* Initial size of the hash table (rounded to next prime). */
175 #define TYPE_HASH_INITIAL_SIZE 1000
177 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
179 static hashval_t hash (type_hash *t) { return t->hash; }
180 static bool equal (type_hash *a, type_hash *b);
182 static int
183 keep_cache_entry (type_hash *&t)
185 return ggc_marked_p (t->type);
189 /* Now here is the hash table. When recording a type, it is added to
190 the slot whose index is the hash code. Note that the hash table is
191 used for several kinds of types (function types, array types and
192 array index range types, for now). While all these live in the
193 same table, they are completely independent, and the hash code is
194 computed differently for each of these. */
196 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
198 /* Hash table and temporary node for larger integer const values. */
199 static GTY (()) tree int_cst_node;
201 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
203 static hashval_t hash (tree t);
204 static bool equal (tree x, tree y);
207 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
209 /* Class and variable for making sure that there is a single POLY_INT_CST
210 for a given value. */
211 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
213 typedef std::pair<tree, const poly_wide_int *> compare_type;
214 static hashval_t hash (tree t);
215 static bool equal (tree x, const compare_type &y);
218 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
220 /* Hash table for optimization flags and target option flags. Use the same
221 hash table for both sets of options. Nodes for building the current
222 optimization and target option nodes. The assumption is most of the time
223 the options created will already be in the hash table, so we avoid
224 allocating and freeing up a node repeatably. */
225 static GTY (()) tree cl_optimization_node;
226 static GTY (()) tree cl_target_option_node;
228 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
230 static hashval_t hash (tree t);
231 static bool equal (tree x, tree y);
234 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
236 /* General tree->tree mapping structure for use in hash tables. */
239 static GTY ((cache))
240 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
242 static GTY ((cache))
243 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
245 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
247 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
249 static bool
250 equal (tree_vec_map *a, tree_vec_map *b)
252 return a->base.from == b->base.from;
255 static int
256 keep_cache_entry (tree_vec_map *&m)
258 return ggc_marked_p (m->base.from);
262 static GTY ((cache))
263 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
265 static void set_type_quals (tree, int);
266 static void print_type_hash_statistics (void);
267 static void print_debug_expr_statistics (void);
268 static void print_value_expr_statistics (void);
270 tree global_trees[TI_MAX];
271 tree integer_types[itk_none];
273 bool int_n_enabled_p[NUM_INT_N_ENTS];
274 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
276 bool tree_contains_struct[MAX_TREE_CODES][64];
278 /* Number of operands for each OMP clause. */
279 unsigned const char omp_clause_num_ops[] =
281 0, /* OMP_CLAUSE_ERROR */
282 1, /* OMP_CLAUSE_PRIVATE */
283 1, /* OMP_CLAUSE_SHARED */
284 1, /* OMP_CLAUSE_FIRSTPRIVATE */
285 2, /* OMP_CLAUSE_LASTPRIVATE */
286 5, /* OMP_CLAUSE_REDUCTION */
287 5, /* OMP_CLAUSE_TASK_REDUCTION */
288 5, /* OMP_CLAUSE_IN_REDUCTION */
289 1, /* OMP_CLAUSE_COPYIN */
290 1, /* OMP_CLAUSE_COPYPRIVATE */
291 3, /* OMP_CLAUSE_LINEAR */
292 1, /* OMP_CLAUSE_AFFINITY */
293 2, /* OMP_CLAUSE_ALIGNED */
294 3, /* OMP_CLAUSE_ALLOCATE */
295 1, /* OMP_CLAUSE_DEPEND */
296 1, /* OMP_CLAUSE_NONTEMPORAL */
297 1, /* OMP_CLAUSE_UNIFORM */
298 1, /* OMP_CLAUSE_TO_DECLARE */
299 1, /* OMP_CLAUSE_LINK */
300 1, /* OMP_CLAUSE_DETACH */
301 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
302 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
303 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
304 1, /* OMP_CLAUSE_INCLUSIVE */
305 1, /* OMP_CLAUSE_EXCLUSIVE */
306 2, /* OMP_CLAUSE_FROM */
307 2, /* OMP_CLAUSE_TO */
308 2, /* OMP_CLAUSE_MAP */
309 2, /* OMP_CLAUSE__CACHE_ */
310 2, /* OMP_CLAUSE_GANG */
311 1, /* OMP_CLAUSE_ASYNC */
312 1, /* OMP_CLAUSE_WAIT */
313 0, /* OMP_CLAUSE_AUTO */
314 0, /* OMP_CLAUSE_SEQ */
315 1, /* OMP_CLAUSE__LOOPTEMP_ */
316 1, /* OMP_CLAUSE__REDUCTEMP_ */
317 1, /* OMP_CLAUSE__CONDTEMP_ */
318 1, /* OMP_CLAUSE__SCANTEMP_ */
319 1, /* OMP_CLAUSE_IF */
320 1, /* OMP_CLAUSE_NUM_THREADS */
321 1, /* OMP_CLAUSE_SCHEDULE */
322 0, /* OMP_CLAUSE_NOWAIT */
323 1, /* OMP_CLAUSE_ORDERED */
324 0, /* OMP_CLAUSE_DEFAULT */
325 3, /* OMP_CLAUSE_COLLAPSE */
326 0, /* OMP_CLAUSE_UNTIED */
327 1, /* OMP_CLAUSE_FINAL */
328 0, /* OMP_CLAUSE_MERGEABLE */
329 1, /* OMP_CLAUSE_DEVICE */
330 1, /* OMP_CLAUSE_DIST_SCHEDULE */
331 0, /* OMP_CLAUSE_INBRANCH */
332 0, /* OMP_CLAUSE_NOTINBRANCH */
333 1, /* OMP_CLAUSE_NUM_TEAMS */
334 1, /* OMP_CLAUSE_THREAD_LIMIT */
335 0, /* OMP_CLAUSE_PROC_BIND */
336 1, /* OMP_CLAUSE_SAFELEN */
337 1, /* OMP_CLAUSE_SIMDLEN */
338 0, /* OMP_CLAUSE_DEVICE_TYPE */
339 0, /* OMP_CLAUSE_FOR */
340 0, /* OMP_CLAUSE_PARALLEL */
341 0, /* OMP_CLAUSE_SECTIONS */
342 0, /* OMP_CLAUSE_TASKGROUP */
343 1, /* OMP_CLAUSE_PRIORITY */
344 1, /* OMP_CLAUSE_GRAINSIZE */
345 1, /* OMP_CLAUSE_NUM_TASKS */
346 0, /* OMP_CLAUSE_NOGROUP */
347 0, /* OMP_CLAUSE_THREADS */
348 0, /* OMP_CLAUSE_SIMD */
349 1, /* OMP_CLAUSE_HINT */
350 0, /* OMP_CLAUSE_DEFAULTMAP */
351 0, /* OMP_CLAUSE_ORDER */
352 0, /* OMP_CLAUSE_BIND */
353 1, /* OMP_CLAUSE_FILTER */
354 1, /* OMP_CLAUSE__SIMDUID_ */
355 0, /* OMP_CLAUSE__SIMT_ */
356 0, /* OMP_CLAUSE_INDEPENDENT */
357 1, /* OMP_CLAUSE_WORKER */
358 1, /* OMP_CLAUSE_VECTOR */
359 1, /* OMP_CLAUSE_NUM_GANGS */
360 1, /* OMP_CLAUSE_NUM_WORKERS */
361 1, /* OMP_CLAUSE_VECTOR_LENGTH */
362 3, /* OMP_CLAUSE_TILE */
363 0, /* OMP_CLAUSE_IF_PRESENT */
364 0, /* OMP_CLAUSE_FINALIZE */
365 0, /* OMP_CLAUSE_NOHOST */
368 const char * const omp_clause_code_name[] =
370 "error_clause",
371 "private",
372 "shared",
373 "firstprivate",
374 "lastprivate",
375 "reduction",
376 "task_reduction",
377 "in_reduction",
378 "copyin",
379 "copyprivate",
380 "linear",
381 "affinity",
382 "aligned",
383 "allocate",
384 "depend",
385 "nontemporal",
386 "uniform",
387 "to",
388 "link",
389 "detach",
390 "use_device_ptr",
391 "use_device_addr",
392 "is_device_ptr",
393 "inclusive",
394 "exclusive",
395 "from",
396 "to",
397 "map",
398 "_cache_",
399 "gang",
400 "async",
401 "wait",
402 "auto",
403 "seq",
404 "_looptemp_",
405 "_reductemp_",
406 "_condtemp_",
407 "_scantemp_",
408 "if",
409 "num_threads",
410 "schedule",
411 "nowait",
412 "ordered",
413 "default",
414 "collapse",
415 "untied",
416 "final",
417 "mergeable",
418 "device",
419 "dist_schedule",
420 "inbranch",
421 "notinbranch",
422 "num_teams",
423 "thread_limit",
424 "proc_bind",
425 "safelen",
426 "simdlen",
427 "device_type",
428 "for",
429 "parallel",
430 "sections",
431 "taskgroup",
432 "priority",
433 "grainsize",
434 "num_tasks",
435 "nogroup",
436 "threads",
437 "simd",
438 "hint",
439 "defaultmap",
440 "order",
441 "bind",
442 "filter",
443 "_simduid_",
444 "_simt_",
445 "independent",
446 "worker",
447 "vector",
448 "num_gangs",
449 "num_workers",
450 "vector_length",
451 "tile",
452 "if_present",
453 "finalize",
454 "nohost",
458 /* Return the tree node structure used by tree code CODE. */
460 static inline enum tree_node_structure_enum
461 tree_node_structure_for_code (enum tree_code code)
463 switch (TREE_CODE_CLASS (code))
465 case tcc_declaration:
466 switch (code)
468 case CONST_DECL: return TS_CONST_DECL;
469 case DEBUG_EXPR_DECL: return TS_DECL_WRTL;
470 case FIELD_DECL: return TS_FIELD_DECL;
471 case FUNCTION_DECL: return TS_FUNCTION_DECL;
472 case LABEL_DECL: return TS_LABEL_DECL;
473 case PARM_DECL: return TS_PARM_DECL;
474 case RESULT_DECL: return TS_RESULT_DECL;
475 case TRANSLATION_UNIT_DECL: return TS_TRANSLATION_UNIT_DECL;
476 case TYPE_DECL: return TS_TYPE_DECL;
477 case VAR_DECL: return TS_VAR_DECL;
478 default: return TS_DECL_NON_COMMON;
481 case tcc_type: return TS_TYPE_NON_COMMON;
483 case tcc_binary:
484 case tcc_comparison:
485 case tcc_expression:
486 case tcc_reference:
487 case tcc_statement:
488 case tcc_unary:
489 case tcc_vl_exp: return TS_EXP;
491 default: /* tcc_constant and tcc_exceptional */
492 break;
495 switch (code)
497 /* tcc_constant cases. */
498 case COMPLEX_CST: return TS_COMPLEX;
499 case FIXED_CST: return TS_FIXED_CST;
500 case INTEGER_CST: return TS_INT_CST;
501 case POLY_INT_CST: return TS_POLY_INT_CST;
502 case REAL_CST: return TS_REAL_CST;
503 case STRING_CST: return TS_STRING;
504 case VECTOR_CST: return TS_VECTOR;
505 case VOID_CST: return TS_TYPED;
507 /* tcc_exceptional cases. */
508 case BLOCK: return TS_BLOCK;
509 case CONSTRUCTOR: return TS_CONSTRUCTOR;
510 case ERROR_MARK: return TS_COMMON;
511 case IDENTIFIER_NODE: return TS_IDENTIFIER;
512 case OMP_CLAUSE: return TS_OMP_CLAUSE;
513 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
514 case PLACEHOLDER_EXPR: return TS_COMMON;
515 case SSA_NAME: return TS_SSA_NAME;
516 case STATEMENT_LIST: return TS_STATEMENT_LIST;
517 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
518 case TREE_BINFO: return TS_BINFO;
519 case TREE_LIST: return TS_LIST;
520 case TREE_VEC: return TS_VEC;
522 default:
523 gcc_unreachable ();
528 /* Initialize tree_contains_struct to describe the hierarchy of tree
529 nodes. */
531 static void
532 initialize_tree_contains_struct (void)
534 unsigned i;
536 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
538 enum tree_code code;
539 enum tree_node_structure_enum ts_code;
541 code = (enum tree_code) i;
542 ts_code = tree_node_structure_for_code (code);
544 /* Mark the TS structure itself. */
545 tree_contains_struct[code][ts_code] = 1;
547 /* Mark all the structures that TS is derived from. */
548 switch (ts_code)
550 case TS_TYPED:
551 case TS_BLOCK:
552 case TS_OPTIMIZATION:
553 case TS_TARGET_OPTION:
554 MARK_TS_BASE (code);
555 break;
557 case TS_COMMON:
558 case TS_INT_CST:
559 case TS_POLY_INT_CST:
560 case TS_REAL_CST:
561 case TS_FIXED_CST:
562 case TS_VECTOR:
563 case TS_STRING:
564 case TS_COMPLEX:
565 case TS_SSA_NAME:
566 case TS_CONSTRUCTOR:
567 case TS_EXP:
568 case TS_STATEMENT_LIST:
569 MARK_TS_TYPED (code);
570 break;
572 case TS_IDENTIFIER:
573 case TS_DECL_MINIMAL:
574 case TS_TYPE_COMMON:
575 case TS_LIST:
576 case TS_VEC:
577 case TS_BINFO:
578 case TS_OMP_CLAUSE:
579 MARK_TS_COMMON (code);
580 break;
582 case TS_TYPE_WITH_LANG_SPECIFIC:
583 MARK_TS_TYPE_COMMON (code);
584 break;
586 case TS_TYPE_NON_COMMON:
587 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
588 break;
590 case TS_DECL_COMMON:
591 MARK_TS_DECL_MINIMAL (code);
592 break;
594 case TS_DECL_WRTL:
595 case TS_CONST_DECL:
596 MARK_TS_DECL_COMMON (code);
597 break;
599 case TS_DECL_NON_COMMON:
600 MARK_TS_DECL_WITH_VIS (code);
601 break;
603 case TS_DECL_WITH_VIS:
604 case TS_PARM_DECL:
605 case TS_LABEL_DECL:
606 case TS_RESULT_DECL:
607 MARK_TS_DECL_WRTL (code);
608 break;
610 case TS_FIELD_DECL:
611 MARK_TS_DECL_COMMON (code);
612 break;
614 case TS_VAR_DECL:
615 MARK_TS_DECL_WITH_VIS (code);
616 break;
618 case TS_TYPE_DECL:
619 case TS_FUNCTION_DECL:
620 MARK_TS_DECL_NON_COMMON (code);
621 break;
623 case TS_TRANSLATION_UNIT_DECL:
624 MARK_TS_DECL_COMMON (code);
625 break;
627 default:
628 gcc_unreachable ();
632 /* Basic consistency checks for attributes used in fold. */
633 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
634 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
635 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
636 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
637 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
638 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
639 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
640 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
641 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
642 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
643 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
644 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
645 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
646 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
647 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
648 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
649 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
650 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
651 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
652 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
653 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
654 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
655 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
656 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
657 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
658 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
659 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
660 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
661 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
662 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
663 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
664 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
665 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
666 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
667 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
668 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
669 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
670 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
671 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
672 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
676 /* Init tree.c. */
678 void
679 init_ttree (void)
681 /* Initialize the hash table of types. */
682 type_hash_table
683 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
685 debug_expr_for_decl
686 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
688 value_expr_for_decl
689 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
691 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
693 poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
695 int_cst_node = make_int_cst (1, 1);
697 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
699 cl_optimization_node = make_node (OPTIMIZATION_NODE);
700 cl_target_option_node = make_node (TARGET_OPTION_NODE);
702 /* Initialize the tree_contains_struct array. */
703 initialize_tree_contains_struct ();
704 lang_hooks.init_ts ();
708 /* The name of the object as the assembler will see it (but before any
709 translations made by ASM_OUTPUT_LABELREF). Often this is the same
710 as DECL_NAME. It is an IDENTIFIER_NODE. */
711 tree
712 decl_assembler_name (tree decl)
714 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
715 lang_hooks.set_decl_assembler_name (decl);
716 return DECL_ASSEMBLER_NAME_RAW (decl);
719 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
720 (either of which may be NULL). Inform the FE, if this changes the
721 name. */
723 void
724 overwrite_decl_assembler_name (tree decl, tree name)
726 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
727 lang_hooks.overwrite_decl_assembler_name (decl, name);
730 /* Return true if DECL may need an assembler name to be set. */
732 static inline bool
733 need_assembler_name_p (tree decl)
735 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
736 Rule merging. This makes type_odr_p to return true on those types during
737 LTO and by comparing the mangled name, we can say what types are intended
738 to be equivalent across compilation unit.
740 We do not store names of type_in_anonymous_namespace_p.
742 Record, union and enumeration type have linkage that allows use
743 to check type_in_anonymous_namespace_p. We do not mangle compound types
744 that always can be compared structurally.
746 Similarly for builtin types, we compare properties of their main variant.
747 A special case are integer types where mangling do make differences
748 between char/signed char/unsigned char etc. Storing name for these makes
749 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
750 See cp/mangle.c:write_builtin_type for details. */
752 if (TREE_CODE (decl) == TYPE_DECL)
754 if (DECL_NAME (decl)
755 && decl == TYPE_NAME (TREE_TYPE (decl))
756 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
757 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
758 && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
759 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE)
760 || TYPE_CXX_ODR_P (TREE_TYPE (decl)))
761 && (type_with_linkage_p (TREE_TYPE (decl))
762 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
763 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
764 return !DECL_ASSEMBLER_NAME_SET_P (decl);
765 return false;
767 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
768 if (!VAR_OR_FUNCTION_DECL_P (decl))
769 return false;
771 /* If DECL already has its assembler name set, it does not need a
772 new one. */
773 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
774 || DECL_ASSEMBLER_NAME_SET_P (decl))
775 return false;
777 /* Abstract decls do not need an assembler name. */
778 if (DECL_ABSTRACT_P (decl))
779 return false;
781 /* For VAR_DECLs, only static, public and external symbols need an
782 assembler name. */
783 if (VAR_P (decl)
784 && !TREE_STATIC (decl)
785 && !TREE_PUBLIC (decl)
786 && !DECL_EXTERNAL (decl))
787 return false;
789 if (TREE_CODE (decl) == FUNCTION_DECL)
791 /* Do not set assembler name on builtins. Allow RTL expansion to
792 decide whether to expand inline or via a regular call. */
793 if (fndecl_built_in_p (decl)
794 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
795 return false;
797 /* Functions represented in the callgraph need an assembler name. */
798 if (cgraph_node::get (decl) != NULL)
799 return true;
801 /* Unused and not public functions don't need an assembler name. */
802 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
803 return false;
806 return true;
809 /* If T needs an assembler name, have one created for it. */
811 void
812 assign_assembler_name_if_needed (tree t)
814 if (need_assembler_name_p (t))
816 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
817 diagnostics that use input_location to show locus
818 information. The problem here is that, at this point,
819 input_location is generally anchored to the end of the file
820 (since the parser is long gone), so we don't have a good
821 position to pin it to.
823 To alleviate this problem, this uses the location of T's
824 declaration. Examples of this are
825 testsuite/g++.dg/template/cond2.C and
826 testsuite/g++.dg/template/pr35240.C. */
827 location_t saved_location = input_location;
828 input_location = DECL_SOURCE_LOCATION (t);
830 decl_assembler_name (t);
832 input_location = saved_location;
836 /* When the target supports COMDAT groups, this indicates which group the
837 DECL is associated with. This can be either an IDENTIFIER_NODE or a
838 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
839 tree
840 decl_comdat_group (const_tree node)
842 struct symtab_node *snode = symtab_node::get (node);
843 if (!snode)
844 return NULL;
845 return snode->get_comdat_group ();
848 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
849 tree
850 decl_comdat_group_id (const_tree node)
852 struct symtab_node *snode = symtab_node::get (node);
853 if (!snode)
854 return NULL;
855 return snode->get_comdat_group_id ();
858 /* When the target supports named section, return its name as IDENTIFIER_NODE
859 or NULL if it is in no section. */
860 const char *
861 decl_section_name (const_tree node)
863 struct symtab_node *snode = symtab_node::get (node);
864 if (!snode)
865 return NULL;
866 return snode->get_section ();
869 /* Set section name of NODE to VALUE (that is expected to be
870 identifier node) */
871 void
872 set_decl_section_name (tree node, const char *value)
874 struct symtab_node *snode;
876 if (value == NULL)
878 snode = symtab_node::get (node);
879 if (!snode)
880 return;
882 else if (VAR_P (node))
883 snode = varpool_node::get_create (node);
884 else
885 snode = cgraph_node::get_create (node);
886 snode->set_section (value);
889 /* Set section name of NODE to match the section name of OTHER.
891 set_decl_section_name (decl, other) is equivalent to
892 set_decl_section_name (decl, DECL_SECTION_NAME (other)), but possibly more
893 efficient. */
894 void
895 set_decl_section_name (tree decl, const_tree other)
897 struct symtab_node *other_node = symtab_node::get (other);
898 if (other_node)
900 struct symtab_node *decl_node;
901 if (VAR_P (decl))
902 decl_node = varpool_node::get_create (decl);
903 else
904 decl_node = cgraph_node::get_create (decl);
905 decl_node->set_section (*other_node);
907 else
909 struct symtab_node *decl_node = symtab_node::get (decl);
910 if (!decl_node)
911 return;
912 decl_node->set_section (NULL);
916 /* Return TLS model of a variable NODE. */
917 enum tls_model
918 decl_tls_model (const_tree node)
920 struct varpool_node *snode = varpool_node::get (node);
921 if (!snode)
922 return TLS_MODEL_NONE;
923 return snode->tls_model;
926 /* Set TLS model of variable NODE to MODEL. */
927 void
928 set_decl_tls_model (tree node, enum tls_model model)
930 struct varpool_node *vnode;
932 if (model == TLS_MODEL_NONE)
934 vnode = varpool_node::get (node);
935 if (!vnode)
936 return;
938 else
939 vnode = varpool_node::get_create (node);
940 vnode->tls_model = model;
943 /* Compute the number of bytes occupied by a tree with code CODE.
944 This function cannot be used for nodes that have variable sizes,
945 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
946 size_t
947 tree_code_size (enum tree_code code)
949 switch (TREE_CODE_CLASS (code))
951 case tcc_declaration: /* A decl node */
952 switch (code)
954 case FIELD_DECL: return sizeof (tree_field_decl);
955 case PARM_DECL: return sizeof (tree_parm_decl);
956 case VAR_DECL: return sizeof (tree_var_decl);
957 case LABEL_DECL: return sizeof (tree_label_decl);
958 case RESULT_DECL: return sizeof (tree_result_decl);
959 case CONST_DECL: return sizeof (tree_const_decl);
960 case TYPE_DECL: return sizeof (tree_type_decl);
961 case FUNCTION_DECL: return sizeof (tree_function_decl);
962 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
963 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
964 case NAMESPACE_DECL:
965 case IMPORTED_DECL:
966 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
967 default:
968 gcc_checking_assert (code >= NUM_TREE_CODES);
969 return lang_hooks.tree_size (code);
972 case tcc_type: /* a type node */
973 switch (code)
975 case OFFSET_TYPE:
976 case ENUMERAL_TYPE:
977 case BOOLEAN_TYPE:
978 case INTEGER_TYPE:
979 case REAL_TYPE:
980 case OPAQUE_TYPE:
981 case POINTER_TYPE:
982 case REFERENCE_TYPE:
983 case NULLPTR_TYPE:
984 case FIXED_POINT_TYPE:
985 case COMPLEX_TYPE:
986 case VECTOR_TYPE:
987 case ARRAY_TYPE:
988 case RECORD_TYPE:
989 case UNION_TYPE:
990 case QUAL_UNION_TYPE:
991 case VOID_TYPE:
992 case FUNCTION_TYPE:
993 case METHOD_TYPE:
994 case LANG_TYPE: return sizeof (tree_type_non_common);
995 default:
996 gcc_checking_assert (code >= NUM_TREE_CODES);
997 return lang_hooks.tree_size (code);
1000 case tcc_reference: /* a reference */
1001 case tcc_expression: /* an expression */
1002 case tcc_statement: /* an expression with side effects */
1003 case tcc_comparison: /* a comparison expression */
1004 case tcc_unary: /* a unary arithmetic expression */
1005 case tcc_binary: /* a binary arithmetic expression */
1006 return (sizeof (struct tree_exp)
1007 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
1009 case tcc_constant: /* a constant */
1010 switch (code)
1012 case VOID_CST: return sizeof (tree_typed);
1013 case INTEGER_CST: gcc_unreachable ();
1014 case POLY_INT_CST: return sizeof (tree_poly_int_cst);
1015 case REAL_CST: return sizeof (tree_real_cst);
1016 case FIXED_CST: return sizeof (tree_fixed_cst);
1017 case COMPLEX_CST: return sizeof (tree_complex);
1018 case VECTOR_CST: gcc_unreachable ();
1019 case STRING_CST: gcc_unreachable ();
1020 default:
1021 gcc_checking_assert (code >= NUM_TREE_CODES);
1022 return lang_hooks.tree_size (code);
1025 case tcc_exceptional: /* something random, like an identifier. */
1026 switch (code)
1028 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
1029 case TREE_LIST: return sizeof (tree_list);
1031 case ERROR_MARK:
1032 case PLACEHOLDER_EXPR: return sizeof (tree_common);
1034 case TREE_VEC: gcc_unreachable ();
1035 case OMP_CLAUSE: gcc_unreachable ();
1037 case SSA_NAME: return sizeof (tree_ssa_name);
1039 case STATEMENT_LIST: return sizeof (tree_statement_list);
1040 case BLOCK: return sizeof (struct tree_block);
1041 case CONSTRUCTOR: return sizeof (tree_constructor);
1042 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
1043 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
1045 default:
1046 gcc_checking_assert (code >= NUM_TREE_CODES);
1047 return lang_hooks.tree_size (code);
1050 default:
1051 gcc_unreachable ();
1055 /* Compute the number of bytes occupied by NODE. This routine only
1056 looks at TREE_CODE, except for those nodes that have variable sizes. */
1057 size_t
1058 tree_size (const_tree node)
1060 const enum tree_code code = TREE_CODE (node);
1061 switch (code)
1063 case INTEGER_CST:
1064 return (sizeof (struct tree_int_cst)
1065 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
1067 case TREE_BINFO:
1068 return (offsetof (struct tree_binfo, base_binfos)
1069 + vec<tree, va_gc>
1070 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
1072 case TREE_VEC:
1073 return (sizeof (struct tree_vec)
1074 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
1076 case VECTOR_CST:
1077 return (sizeof (struct tree_vector)
1078 + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
1080 case STRING_CST:
1081 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
1083 case OMP_CLAUSE:
1084 return (sizeof (struct tree_omp_clause)
1085 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
1086 * sizeof (tree));
1088 default:
1089 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
1090 return (sizeof (struct tree_exp)
1091 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
1092 else
1093 return tree_code_size (code);
1097 /* Return tree node kind based on tree CODE. */
1099 static tree_node_kind
1100 get_stats_node_kind (enum tree_code code)
1102 enum tree_code_class type = TREE_CODE_CLASS (code);
1104 switch (type)
1106 case tcc_declaration: /* A decl node */
1107 return d_kind;
1108 case tcc_type: /* a type node */
1109 return t_kind;
1110 case tcc_statement: /* an expression with side effects */
1111 return s_kind;
1112 case tcc_reference: /* a reference */
1113 return r_kind;
1114 case tcc_expression: /* an expression */
1115 case tcc_comparison: /* a comparison expression */
1116 case tcc_unary: /* a unary arithmetic expression */
1117 case tcc_binary: /* a binary arithmetic expression */
1118 return e_kind;
1119 case tcc_constant: /* a constant */
1120 return c_kind;
1121 case tcc_exceptional: /* something random, like an identifier. */
1122 switch (code)
1124 case IDENTIFIER_NODE:
1125 return id_kind;
1126 case TREE_VEC:
1127 return vec_kind;
1128 case TREE_BINFO:
1129 return binfo_kind;
1130 case SSA_NAME:
1131 return ssa_name_kind;
1132 case BLOCK:
1133 return b_kind;
1134 case CONSTRUCTOR:
1135 return constr_kind;
1136 case OMP_CLAUSE:
1137 return omp_clause_kind;
1138 default:
1139 return x_kind;
1141 break;
1142 case tcc_vl_exp:
1143 return e_kind;
1144 default:
1145 gcc_unreachable ();
1149 /* Record interesting allocation statistics for a tree node with CODE
1150 and LENGTH. */
1152 static void
1153 record_node_allocation_statistics (enum tree_code code, size_t length)
1155 if (!GATHER_STATISTICS)
1156 return;
1158 tree_node_kind kind = get_stats_node_kind (code);
1160 tree_code_counts[(int) code]++;
1161 tree_node_counts[(int) kind]++;
1162 tree_node_sizes[(int) kind] += length;
1165 /* Allocate and return a new UID from the DECL_UID namespace. */
1168 allocate_decl_uid (void)
1170 return next_decl_uid++;
1173 /* Return a newly allocated node of code CODE. For decl and type
1174 nodes, some other fields are initialized. The rest of the node is
1175 initialized to zero. This function cannot be used for TREE_VEC,
1176 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1177 tree_code_size.
1179 Achoo! I got a code in the node. */
1181 tree
1182 make_node (enum tree_code code MEM_STAT_DECL)
1184 tree t;
1185 enum tree_code_class type = TREE_CODE_CLASS (code);
1186 size_t length = tree_code_size (code);
1188 record_node_allocation_statistics (code, length);
1190 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1191 TREE_SET_CODE (t, code);
1193 switch (type)
1195 case tcc_statement:
1196 if (code != DEBUG_BEGIN_STMT)
1197 TREE_SIDE_EFFECTS (t) = 1;
1198 break;
1200 case tcc_declaration:
1201 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1203 if (code == FUNCTION_DECL)
1205 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1206 SET_DECL_MODE (t, FUNCTION_MODE);
1208 else
1209 SET_DECL_ALIGN (t, 1);
1211 DECL_SOURCE_LOCATION (t) = input_location;
1212 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1213 DECL_UID (t) = --next_debug_decl_uid;
1214 else
1216 DECL_UID (t) = allocate_decl_uid ();
1217 SET_DECL_PT_UID (t, -1);
1219 if (TREE_CODE (t) == LABEL_DECL)
1220 LABEL_DECL_UID (t) = -1;
1222 break;
1224 case tcc_type:
1225 TYPE_UID (t) = next_type_uid++;
1226 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1227 TYPE_USER_ALIGN (t) = 0;
1228 TYPE_MAIN_VARIANT (t) = t;
1229 TYPE_CANONICAL (t) = t;
1231 /* Default to no attributes for type, but let target change that. */
1232 TYPE_ATTRIBUTES (t) = NULL_TREE;
1233 targetm.set_default_type_attributes (t);
1235 /* We have not yet computed the alias set for this type. */
1236 TYPE_ALIAS_SET (t) = -1;
1237 break;
1239 case tcc_constant:
1240 TREE_CONSTANT (t) = 1;
1241 break;
1243 case tcc_expression:
1244 switch (code)
1246 case INIT_EXPR:
1247 case MODIFY_EXPR:
1248 case VA_ARG_EXPR:
1249 case PREDECREMENT_EXPR:
1250 case PREINCREMENT_EXPR:
1251 case POSTDECREMENT_EXPR:
1252 case POSTINCREMENT_EXPR:
1253 /* All of these have side-effects, no matter what their
1254 operands are. */
1255 TREE_SIDE_EFFECTS (t) = 1;
1256 break;
1258 default:
1259 break;
1261 break;
1263 case tcc_exceptional:
1264 switch (code)
1266 case TARGET_OPTION_NODE:
1267 TREE_TARGET_OPTION(t)
1268 = ggc_cleared_alloc<struct cl_target_option> ();
1269 break;
1271 case OPTIMIZATION_NODE:
1272 TREE_OPTIMIZATION (t)
1273 = ggc_cleared_alloc<struct cl_optimization> ();
1274 break;
1276 default:
1277 break;
1279 break;
1281 default:
1282 /* Other classes need no special treatment. */
1283 break;
1286 return t;
1289 /* Free tree node. */
1291 void
1292 free_node (tree node)
1294 enum tree_code code = TREE_CODE (node);
1295 if (GATHER_STATISTICS)
1297 enum tree_node_kind kind = get_stats_node_kind (code);
1299 gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1300 gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1301 gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1303 tree_code_counts[(int) TREE_CODE (node)]--;
1304 tree_node_counts[(int) kind]--;
1305 tree_node_sizes[(int) kind] -= tree_size (node);
1307 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1308 vec_free (CONSTRUCTOR_ELTS (node));
1309 else if (code == BLOCK)
1310 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1311 else if (code == TREE_BINFO)
1312 vec_free (BINFO_BASE_ACCESSES (node));
1313 else if (code == OPTIMIZATION_NODE)
1314 cl_optimization_option_free (TREE_OPTIMIZATION (node));
1315 else if (code == TARGET_OPTION_NODE)
1316 cl_target_option_free (TREE_TARGET_OPTION (node));
1317 ggc_free (node);
1320 /* Return a new node with the same contents as NODE except that its
1321 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1323 tree
1324 copy_node (tree node MEM_STAT_DECL)
1326 tree t;
1327 enum tree_code code = TREE_CODE (node);
1328 size_t length;
1330 gcc_assert (code != STATEMENT_LIST);
1332 length = tree_size (node);
1333 record_node_allocation_statistics (code, length);
1334 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1335 memcpy (t, node, length);
1337 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1338 TREE_CHAIN (t) = 0;
1339 TREE_ASM_WRITTEN (t) = 0;
1340 TREE_VISITED (t) = 0;
1342 if (TREE_CODE_CLASS (code) == tcc_declaration)
1344 if (code == DEBUG_EXPR_DECL)
1345 DECL_UID (t) = --next_debug_decl_uid;
1346 else
1348 DECL_UID (t) = allocate_decl_uid ();
1349 if (DECL_PT_UID_SET_P (node))
1350 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1352 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1353 && DECL_HAS_VALUE_EXPR_P (node))
1355 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1356 DECL_HAS_VALUE_EXPR_P (t) = 1;
1358 /* DECL_DEBUG_EXPR is copied explicitly by callers. */
1359 if (VAR_P (node))
1361 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1362 t->decl_with_vis.symtab_node = NULL;
1364 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1366 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1367 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1369 if (TREE_CODE (node) == FUNCTION_DECL)
1371 DECL_STRUCT_FUNCTION (t) = NULL;
1372 t->decl_with_vis.symtab_node = NULL;
1375 else if (TREE_CODE_CLASS (code) == tcc_type)
1377 TYPE_UID (t) = next_type_uid++;
1378 /* The following is so that the debug code for
1379 the copy is different from the original type.
1380 The two statements usually duplicate each other
1381 (because they clear fields of the same union),
1382 but the optimizer should catch that. */
1383 TYPE_SYMTAB_ADDRESS (t) = 0;
1384 TYPE_SYMTAB_DIE (t) = 0;
1386 /* Do not copy the values cache. */
1387 if (TYPE_CACHED_VALUES_P (t))
1389 TYPE_CACHED_VALUES_P (t) = 0;
1390 TYPE_CACHED_VALUES (t) = NULL_TREE;
1393 else if (code == TARGET_OPTION_NODE)
1395 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1396 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1397 sizeof (struct cl_target_option));
1399 else if (code == OPTIMIZATION_NODE)
1401 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1402 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1403 sizeof (struct cl_optimization));
1406 return t;
1409 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1410 For example, this can copy a list made of TREE_LIST nodes. */
1412 tree
1413 copy_list (tree list)
1415 tree head;
1416 tree prev, next;
1418 if (list == 0)
1419 return 0;
1421 head = prev = copy_node (list);
1422 next = TREE_CHAIN (list);
1423 while (next)
1425 TREE_CHAIN (prev) = copy_node (next);
1426 prev = TREE_CHAIN (prev);
1427 next = TREE_CHAIN (next);
1429 return head;
1433 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1434 INTEGER_CST with value CST and type TYPE. */
1436 static unsigned int
1437 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1439 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1440 /* We need extra HWIs if CST is an unsigned integer with its
1441 upper bit set. */
1442 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1443 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1444 return cst.get_len ();
1447 /* Return a new INTEGER_CST with value CST and type TYPE. */
1449 static tree
1450 build_new_int_cst (tree type, const wide_int &cst)
1452 unsigned int len = cst.get_len ();
1453 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1454 tree nt = make_int_cst (len, ext_len);
1456 if (len < ext_len)
1458 --ext_len;
1459 TREE_INT_CST_ELT (nt, ext_len)
1460 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1461 for (unsigned int i = len; i < ext_len; ++i)
1462 TREE_INT_CST_ELT (nt, i) = -1;
1464 else if (TYPE_UNSIGNED (type)
1465 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1467 len--;
1468 TREE_INT_CST_ELT (nt, len)
1469 = zext_hwi (cst.elt (len),
1470 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1473 for (unsigned int i = 0; i < len; i++)
1474 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1475 TREE_TYPE (nt) = type;
1476 return nt;
1479 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1481 static tree
1482 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1483 CXX_MEM_STAT_INFO)
1485 size_t length = sizeof (struct tree_poly_int_cst);
1486 record_node_allocation_statistics (POLY_INT_CST, length);
1488 tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1490 TREE_SET_CODE (t, POLY_INT_CST);
1491 TREE_CONSTANT (t) = 1;
1492 TREE_TYPE (t) = type;
1493 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1494 POLY_INT_CST_COEFF (t, i) = coeffs[i];
1495 return t;
1498 /* Create a constant tree that contains CST sign-extended to TYPE. */
1500 tree
1501 build_int_cst (tree type, poly_int64 cst)
1503 /* Support legacy code. */
1504 if (!type)
1505 type = integer_type_node;
1507 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1510 /* Create a constant tree that contains CST zero-extended to TYPE. */
1512 tree
1513 build_int_cstu (tree type, poly_uint64 cst)
1515 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1518 /* Create a constant tree that contains CST sign-extended to TYPE. */
1520 tree
1521 build_int_cst_type (tree type, poly_int64 cst)
1523 gcc_assert (type);
1524 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1527 /* Constructs tree in type TYPE from with value given by CST. Signedness
1528 of CST is assumed to be the same as the signedness of TYPE. */
1530 tree
1531 double_int_to_tree (tree type, double_int cst)
1533 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1536 /* We force the wide_int CST to the range of the type TYPE by sign or
1537 zero extending it. OVERFLOWABLE indicates if we are interested in
1538 overflow of the value, when >0 we are only interested in signed
1539 overflow, for <0 we are interested in any overflow. OVERFLOWED
1540 indicates whether overflow has already occurred. CONST_OVERFLOWED
1541 indicates whether constant overflow has already occurred. We force
1542 T's value to be within range of T's type (by setting to 0 or 1 all
1543 the bits outside the type's range). We set TREE_OVERFLOWED if,
1544 OVERFLOWED is nonzero,
1545 or OVERFLOWABLE is >0 and signed overflow occurs
1546 or OVERFLOWABLE is <0 and any overflow occurs
1547 We return a new tree node for the extended wide_int. The node
1548 is shared if no overflow flags are set. */
1551 tree
1552 force_fit_type (tree type, const poly_wide_int_ref &cst,
1553 int overflowable, bool overflowed)
1555 signop sign = TYPE_SIGN (type);
1557 /* If we need to set overflow flags, return a new unshared node. */
1558 if (overflowed || !wi::fits_to_tree_p (cst, type))
1560 if (overflowed
1561 || overflowable < 0
1562 || (overflowable > 0 && sign == SIGNED))
1564 poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1565 sign);
1566 tree t;
1567 if (tmp.is_constant ())
1568 t = build_new_int_cst (type, tmp.coeffs[0]);
1569 else
1571 tree coeffs[NUM_POLY_INT_COEFFS];
1572 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1574 coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1575 TREE_OVERFLOW (coeffs[i]) = 1;
1577 t = build_new_poly_int_cst (type, coeffs);
1579 TREE_OVERFLOW (t) = 1;
1580 return t;
1584 /* Else build a shared node. */
1585 return wide_int_to_tree (type, cst);
1588 /* These are the hash table functions for the hash table of INTEGER_CST
1589 nodes of a sizetype. */
1591 /* Return the hash code X, an INTEGER_CST. */
1593 hashval_t
1594 int_cst_hasher::hash (tree x)
1596 const_tree const t = x;
1597 hashval_t code = TYPE_UID (TREE_TYPE (t));
1598 int i;
1600 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1601 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1603 return code;
1606 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1607 is the same as that given by *Y, which is the same. */
1609 bool
1610 int_cst_hasher::equal (tree x, tree y)
1612 const_tree const xt = x;
1613 const_tree const yt = y;
1615 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1616 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1617 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1618 return false;
1620 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1621 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1622 return false;
1624 return true;
1627 /* Cache wide_int CST into the TYPE_CACHED_VALUES cache for TYPE.
1628 SLOT is the slot entry to store it in, and MAX_SLOTS is the maximum
1629 number of slots that can be cached for the type. */
1631 static inline tree
1632 cache_wide_int_in_type_cache (tree type, const wide_int &cst,
1633 int slot, int max_slots)
1635 gcc_checking_assert (slot >= 0);
1636 /* Initialize cache. */
1637 if (!TYPE_CACHED_VALUES_P (type))
1639 TYPE_CACHED_VALUES_P (type) = 1;
1640 TYPE_CACHED_VALUES (type) = make_tree_vec (max_slots);
1642 tree t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot);
1643 if (!t)
1645 /* Create a new shared int. */
1646 t = build_new_int_cst (type, cst);
1647 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot) = t;
1649 return t;
1652 /* Create an INT_CST node of TYPE and value CST.
1653 The returned node is always shared. For small integers we use a
1654 per-type vector cache, for larger ones we use a single hash table.
1655 The value is extended from its precision according to the sign of
1656 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1657 the upper bits and ensures that hashing and value equality based
1658 upon the underlying HOST_WIDE_INTs works without masking. */
1660 static tree
1661 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1663 tree t;
1664 int ix = -1;
1665 int limit = 0;
1667 gcc_assert (type);
1668 unsigned int prec = TYPE_PRECISION (type);
1669 signop sgn = TYPE_SIGN (type);
1671 /* Verify that everything is canonical. */
1672 int l = pcst.get_len ();
1673 if (l > 1)
1675 if (pcst.elt (l - 1) == 0)
1676 gcc_checking_assert (pcst.elt (l - 2) < 0);
1677 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1678 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1681 wide_int cst = wide_int::from (pcst, prec, sgn);
1682 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1684 enum tree_code code = TREE_CODE (type);
1685 if (code == POINTER_TYPE || code == REFERENCE_TYPE)
1687 /* Cache NULL pointer and zero bounds. */
1688 if (cst == 0)
1689 ix = 0;
1690 /* Cache upper bounds of pointers. */
1691 else if (cst == wi::max_value (prec, sgn))
1692 ix = 1;
1693 /* Cache 1 which is used for a non-zero range. */
1694 else if (cst == 1)
1695 ix = 2;
1697 if (ix >= 0)
1699 t = cache_wide_int_in_type_cache (type, cst, ix, 3);
1700 /* Make sure no one is clobbering the shared constant. */
1701 gcc_checking_assert (TREE_TYPE (t) == type
1702 && cst == wi::to_wide (t));
1703 return t;
1706 if (ext_len == 1)
1708 /* We just need to store a single HOST_WIDE_INT. */
1709 HOST_WIDE_INT hwi;
1710 if (TYPE_UNSIGNED (type))
1711 hwi = cst.to_uhwi ();
1712 else
1713 hwi = cst.to_shwi ();
1715 switch (code)
1717 case NULLPTR_TYPE:
1718 gcc_assert (hwi == 0);
1719 /* Fallthru. */
1721 case POINTER_TYPE:
1722 case REFERENCE_TYPE:
1723 /* Ignore pointers, as they were already handled above. */
1724 break;
1726 case BOOLEAN_TYPE:
1727 /* Cache false or true. */
1728 limit = 2;
1729 if (IN_RANGE (hwi, 0, 1))
1730 ix = hwi;
1731 break;
1733 case INTEGER_TYPE:
1734 case OFFSET_TYPE:
1735 if (TYPE_SIGN (type) == UNSIGNED)
1737 /* Cache [0, N). */
1738 limit = param_integer_share_limit;
1739 if (IN_RANGE (hwi, 0, param_integer_share_limit - 1))
1740 ix = hwi;
1742 else
1744 /* Cache [-1, N). */
1745 limit = param_integer_share_limit + 1;
1746 if (IN_RANGE (hwi, -1, param_integer_share_limit - 1))
1747 ix = hwi + 1;
1749 break;
1751 case ENUMERAL_TYPE:
1752 break;
1754 default:
1755 gcc_unreachable ();
1758 if (ix >= 0)
1760 t = cache_wide_int_in_type_cache (type, cst, ix, limit);
1761 /* Make sure no one is clobbering the shared constant. */
1762 gcc_checking_assert (TREE_TYPE (t) == type
1763 && TREE_INT_CST_NUNITS (t) == 1
1764 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1765 && TREE_INT_CST_EXT_NUNITS (t) == 1
1766 && TREE_INT_CST_ELT (t, 0) == hwi);
1767 return t;
1769 else
1771 /* Use the cache of larger shared ints, using int_cst_node as
1772 a temporary. */
1774 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1775 TREE_TYPE (int_cst_node) = type;
1777 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1778 t = *slot;
1779 if (!t)
1781 /* Insert this one into the hash table. */
1782 t = int_cst_node;
1783 *slot = t;
1784 /* Make a new node for next time round. */
1785 int_cst_node = make_int_cst (1, 1);
1789 else
1791 /* The value either hashes properly or we drop it on the floor
1792 for the gc to take care of. There will not be enough of them
1793 to worry about. */
1795 tree nt = build_new_int_cst (type, cst);
1796 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1797 t = *slot;
1798 if (!t)
1800 /* Insert this one into the hash table. */
1801 t = nt;
1802 *slot = t;
1804 else
1805 ggc_free (nt);
1808 return t;
1811 hashval_t
1812 poly_int_cst_hasher::hash (tree t)
1814 inchash::hash hstate;
1816 hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1817 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1818 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1820 return hstate.end ();
1823 bool
1824 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1826 if (TREE_TYPE (x) != y.first)
1827 return false;
1828 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1829 if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1830 return false;
1831 return true;
1834 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1835 The elements must also have type TYPE. */
1837 tree
1838 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1840 unsigned int prec = TYPE_PRECISION (type);
1841 gcc_assert (prec <= values.coeffs[0].get_precision ());
1842 poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1844 inchash::hash h;
1845 h.add_int (TYPE_UID (type));
1846 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1847 h.add_wide_int (c.coeffs[i]);
1848 poly_int_cst_hasher::compare_type comp (type, &c);
1849 tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1850 INSERT);
1851 if (*slot == NULL_TREE)
1853 tree coeffs[NUM_POLY_INT_COEFFS];
1854 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1855 coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1856 *slot = build_new_poly_int_cst (type, coeffs);
1858 return *slot;
1861 /* Create a constant tree with value VALUE in type TYPE. */
1863 tree
1864 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1866 if (value.is_constant ())
1867 return wide_int_to_tree_1 (type, value.coeffs[0]);
1868 return build_poly_int_cst (type, value);
1871 /* Insert INTEGER_CST T into a cache of integer constants. And return
1872 the cached constant (which may or may not be T). If MIGHT_DUPLICATE
1873 is false, and T falls into the type's 'smaller values' range, there
1874 cannot be an existing entry. Otherwise, if MIGHT_DUPLICATE is true,
1875 or the value is large, should an existing entry exist, it is
1876 returned (rather than inserting T). */
1878 tree
1879 cache_integer_cst (tree t, bool might_duplicate ATTRIBUTE_UNUSED)
1881 tree type = TREE_TYPE (t);
1882 int ix = -1;
1883 int limit = 0;
1884 int prec = TYPE_PRECISION (type);
1886 gcc_assert (!TREE_OVERFLOW (t));
1888 /* The caching indices here must match those in
1889 wide_int_to_type_1. */
1890 switch (TREE_CODE (type))
1892 case NULLPTR_TYPE:
1893 gcc_checking_assert (integer_zerop (t));
1894 /* Fallthru. */
1896 case POINTER_TYPE:
1897 case REFERENCE_TYPE:
1899 if (integer_zerop (t))
1900 ix = 0;
1901 else if (integer_onep (t))
1902 ix = 2;
1904 if (ix >= 0)
1905 limit = 3;
1907 break;
1909 case BOOLEAN_TYPE:
1910 /* Cache false or true. */
1911 limit = 2;
1912 if (wi::ltu_p (wi::to_wide (t), 2))
1913 ix = TREE_INT_CST_ELT (t, 0);
1914 break;
1916 case INTEGER_TYPE:
1917 case OFFSET_TYPE:
1918 if (TYPE_UNSIGNED (type))
1920 /* Cache 0..N */
1921 limit = param_integer_share_limit;
1923 /* This is a little hokie, but if the prec is smaller than
1924 what is necessary to hold param_integer_share_limit, then the
1925 obvious test will not get the correct answer. */
1926 if (prec < HOST_BITS_PER_WIDE_INT)
1928 if (tree_to_uhwi (t)
1929 < (unsigned HOST_WIDE_INT) param_integer_share_limit)
1930 ix = tree_to_uhwi (t);
1932 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1933 ix = tree_to_uhwi (t);
1935 else
1937 /* Cache -1..N */
1938 limit = param_integer_share_limit + 1;
1940 if (integer_minus_onep (t))
1941 ix = 0;
1942 else if (!wi::neg_p (wi::to_wide (t)))
1944 if (prec < HOST_BITS_PER_WIDE_INT)
1946 if (tree_to_shwi (t) < param_integer_share_limit)
1947 ix = tree_to_shwi (t) + 1;
1949 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1950 ix = tree_to_shwi (t) + 1;
1953 break;
1955 case ENUMERAL_TYPE:
1956 /* The slot used by TYPE_CACHED_VALUES is used for the enum
1957 members. */
1958 break;
1960 default:
1961 gcc_unreachable ();
1964 if (ix >= 0)
1966 /* Look for it in the type's vector of small shared ints. */
1967 if (!TYPE_CACHED_VALUES_P (type))
1969 TYPE_CACHED_VALUES_P (type) = 1;
1970 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1973 if (tree r = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix))
1975 gcc_checking_assert (might_duplicate);
1976 t = r;
1978 else
1979 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1981 else
1983 /* Use the cache of larger shared ints. */
1984 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1985 if (tree r = *slot)
1987 /* If there is already an entry for the number verify it's the
1988 same value. */
1989 gcc_checking_assert (wi::to_wide (tree (r)) == wi::to_wide (t));
1990 /* And return the cached value. */
1991 t = r;
1993 else
1994 /* Otherwise insert this one into the hash table. */
1995 *slot = t;
1998 return t;
2002 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
2003 and the rest are zeros. */
2005 tree
2006 build_low_bits_mask (tree type, unsigned bits)
2008 gcc_assert (bits <= TYPE_PRECISION (type));
2010 return wide_int_to_tree (type, wi::mask (bits, false,
2011 TYPE_PRECISION (type)));
2014 /* Checks that X is integer constant that can be expressed in (unsigned)
2015 HOST_WIDE_INT without loss of precision. */
2017 bool
2018 cst_and_fits_in_hwi (const_tree x)
2020 return (TREE_CODE (x) == INTEGER_CST
2021 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
2024 /* Build a newly constructed VECTOR_CST with the given values of
2025 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
2027 tree
2028 make_vector (unsigned log2_npatterns,
2029 unsigned int nelts_per_pattern MEM_STAT_DECL)
2031 gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
2032 tree t;
2033 unsigned npatterns = 1 << log2_npatterns;
2034 unsigned encoded_nelts = npatterns * nelts_per_pattern;
2035 unsigned length = (sizeof (struct tree_vector)
2036 + (encoded_nelts - 1) * sizeof (tree));
2038 record_node_allocation_statistics (VECTOR_CST, length);
2040 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2042 TREE_SET_CODE (t, VECTOR_CST);
2043 TREE_CONSTANT (t) = 1;
2044 VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
2045 VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
2047 return t;
2050 /* Return a new VECTOR_CST node whose type is TYPE and whose values
2051 are extracted from V, a vector of CONSTRUCTOR_ELT. */
2053 tree
2054 build_vector_from_ctor (tree type, const vec<constructor_elt, va_gc> *v)
2056 if (vec_safe_length (v) == 0)
2057 return build_zero_cst (type);
2059 unsigned HOST_WIDE_INT idx, nelts;
2060 tree value;
2062 /* We can't construct a VECTOR_CST for a variable number of elements. */
2063 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
2064 tree_vector_builder vec (type, nelts, 1);
2065 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
2067 if (TREE_CODE (value) == VECTOR_CST)
2069 /* If NELTS is constant then this must be too. */
2070 unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
2071 for (unsigned i = 0; i < sub_nelts; ++i)
2072 vec.quick_push (VECTOR_CST_ELT (value, i));
2074 else
2075 vec.quick_push (value);
2077 while (vec.length () < nelts)
2078 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
2080 return vec.build ();
2083 /* Build a vector of type VECTYPE where all the elements are SCs. */
2084 tree
2085 build_vector_from_val (tree vectype, tree sc)
2087 unsigned HOST_WIDE_INT i, nunits;
2089 if (sc == error_mark_node)
2090 return sc;
2092 /* Verify that the vector type is suitable for SC. Note that there
2093 is some inconsistency in the type-system with respect to restrict
2094 qualifications of pointers. Vector types always have a main-variant
2095 element type and the qualification is applied to the vector-type.
2096 So TREE_TYPE (vector-type) does not return a properly qualified
2097 vector element-type. */
2098 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
2099 TREE_TYPE (vectype)));
2101 if (CONSTANT_CLASS_P (sc))
2103 tree_vector_builder v (vectype, 1, 1);
2104 v.quick_push (sc);
2105 return v.build ();
2107 else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
2108 return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
2109 else
2111 vec<constructor_elt, va_gc> *v;
2112 vec_alloc (v, nunits);
2113 for (i = 0; i < nunits; ++i)
2114 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
2115 return build_constructor (vectype, v);
2119 /* If TYPE is not a vector type, just return SC, otherwise return
2120 build_vector_from_val (TYPE, SC). */
2122 tree
2123 build_uniform_cst (tree type, tree sc)
2125 if (!VECTOR_TYPE_P (type))
2126 return sc;
2128 return build_vector_from_val (type, sc);
2131 /* Build a vector series of type TYPE in which element I has the value
2132 BASE + I * STEP. The result is a constant if BASE and STEP are constant
2133 and a VEC_SERIES_EXPR otherwise. */
2135 tree
2136 build_vec_series (tree type, tree base, tree step)
2138 if (integer_zerop (step))
2139 return build_vector_from_val (type, base);
2140 if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
2142 tree_vector_builder builder (type, 1, 3);
2143 tree elt1 = wide_int_to_tree (TREE_TYPE (base),
2144 wi::to_wide (base) + wi::to_wide (step));
2145 tree elt2 = wide_int_to_tree (TREE_TYPE (base),
2146 wi::to_wide (elt1) + wi::to_wide (step));
2147 builder.quick_push (base);
2148 builder.quick_push (elt1);
2149 builder.quick_push (elt2);
2150 return builder.build ();
2152 return build2 (VEC_SERIES_EXPR, type, base, step);
2155 /* Return a vector with the same number of units and number of bits
2156 as VEC_TYPE, but in which the elements are a linear series of unsigned
2157 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
2159 tree
2160 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
2162 tree index_vec_type = vec_type;
2163 tree index_elt_type = TREE_TYPE (vec_type);
2164 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
2165 if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
2167 index_elt_type = build_nonstandard_integer_type
2168 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
2169 index_vec_type = build_vector_type (index_elt_type, nunits);
2172 tree_vector_builder v (index_vec_type, 1, 3);
2173 for (unsigned int i = 0; i < 3; ++i)
2174 v.quick_push (build_int_cstu (index_elt_type, base + i * step));
2175 return v.build ();
2178 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
2179 elements are A and the rest are B. */
2181 tree
2182 build_vector_a_then_b (tree vec_type, unsigned int num_a, tree a, tree b)
2184 gcc_assert (known_le (num_a, TYPE_VECTOR_SUBPARTS (vec_type)));
2185 unsigned int count = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type));
2186 /* Optimize the constant case. */
2187 if ((count & 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type).is_constant ())
2188 count /= 2;
2189 tree_vector_builder builder (vec_type, count, 2);
2190 for (unsigned int i = 0; i < count * 2; ++i)
2191 builder.quick_push (i < num_a ? a : b);
2192 return builder.build ();
2195 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2196 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
2198 void
2199 recompute_constructor_flags (tree c)
2201 unsigned int i;
2202 tree val;
2203 bool constant_p = true;
2204 bool side_effects_p = false;
2205 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2207 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2209 /* Mostly ctors will have elts that don't have side-effects, so
2210 the usual case is to scan all the elements. Hence a single
2211 loop for both const and side effects, rather than one loop
2212 each (with early outs). */
2213 if (!TREE_CONSTANT (val))
2214 constant_p = false;
2215 if (TREE_SIDE_EFFECTS (val))
2216 side_effects_p = true;
2219 TREE_SIDE_EFFECTS (c) = side_effects_p;
2220 TREE_CONSTANT (c) = constant_p;
2223 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2224 CONSTRUCTOR C. */
2226 void
2227 verify_constructor_flags (tree c)
2229 unsigned int i;
2230 tree val;
2231 bool constant_p = TREE_CONSTANT (c);
2232 bool side_effects_p = TREE_SIDE_EFFECTS (c);
2233 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2235 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2237 if (constant_p && !TREE_CONSTANT (val))
2238 internal_error ("non-constant element in constant CONSTRUCTOR");
2239 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
2240 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2244 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2245 are in the vec pointed to by VALS. */
2246 tree
2247 build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL)
2249 tree c = make_node (CONSTRUCTOR PASS_MEM_STAT);
2251 TREE_TYPE (c) = type;
2252 CONSTRUCTOR_ELTS (c) = vals;
2254 recompute_constructor_flags (c);
2256 return c;
2259 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2260 INDEX and VALUE. */
2261 tree
2262 build_constructor_single (tree type, tree index, tree value)
2264 vec<constructor_elt, va_gc> *v;
2265 constructor_elt elt = {index, value};
2267 vec_alloc (v, 1);
2268 v->quick_push (elt);
2270 return build_constructor (type, v);
2274 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2275 are in a list pointed to by VALS. */
2276 tree
2277 build_constructor_from_list (tree type, tree vals)
2279 tree t;
2280 vec<constructor_elt, va_gc> *v = NULL;
2282 if (vals)
2284 vec_alloc (v, list_length (vals));
2285 for (t = vals; t; t = TREE_CHAIN (t))
2286 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2289 return build_constructor (type, v);
2292 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2293 are in a vector pointed to by VALS. Note that the TREE_PURPOSE
2294 fields in the constructor remain null. */
2296 tree
2297 build_constructor_from_vec (tree type, const vec<tree, va_gc> *vals)
2299 vec<constructor_elt, va_gc> *v = NULL;
2301 for (tree t : vals)
2302 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t);
2304 return build_constructor (type, v);
2307 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2308 of elements, provided as index/value pairs. */
2310 tree
2311 build_constructor_va (tree type, int nelts, ...)
2313 vec<constructor_elt, va_gc> *v = NULL;
2314 va_list p;
2316 va_start (p, nelts);
2317 vec_alloc (v, nelts);
2318 while (nelts--)
2320 tree index = va_arg (p, tree);
2321 tree value = va_arg (p, tree);
2322 CONSTRUCTOR_APPEND_ELT (v, index, value);
2324 va_end (p);
2325 return build_constructor (type, v);
2328 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2330 tree
2331 build_clobber (tree type)
2333 tree clobber = build_constructor (type, NULL);
2334 TREE_THIS_VOLATILE (clobber) = true;
2335 return clobber;
2338 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2340 tree
2341 build_fixed (tree type, FIXED_VALUE_TYPE f)
2343 tree v;
2344 FIXED_VALUE_TYPE *fp;
2346 v = make_node (FIXED_CST);
2347 fp = ggc_alloc<fixed_value> ();
2348 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2350 TREE_TYPE (v) = type;
2351 TREE_FIXED_CST_PTR (v) = fp;
2352 return v;
2355 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2357 tree
2358 build_real (tree type, REAL_VALUE_TYPE d)
2360 tree v;
2361 REAL_VALUE_TYPE *dp;
2362 int overflow = 0;
2364 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2365 Consider doing it via real_convert now. */
2367 v = make_node (REAL_CST);
2368 dp = ggc_alloc<real_value> ();
2369 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
2371 TREE_TYPE (v) = type;
2372 TREE_REAL_CST_PTR (v) = dp;
2373 TREE_OVERFLOW (v) = overflow;
2374 return v;
2377 /* Like build_real, but first truncate D to the type. */
2379 tree
2380 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2382 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2385 /* Return a new REAL_CST node whose type is TYPE
2386 and whose value is the integer value of the INTEGER_CST node I. */
2388 REAL_VALUE_TYPE
2389 real_value_from_int_cst (const_tree type, const_tree i)
2391 REAL_VALUE_TYPE d;
2393 /* Clear all bits of the real value type so that we can later do
2394 bitwise comparisons to see if two values are the same. */
2395 memset (&d, 0, sizeof d);
2397 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2398 TYPE_SIGN (TREE_TYPE (i)));
2399 return d;
2402 /* Given a tree representing an integer constant I, return a tree
2403 representing the same value as a floating-point constant of type TYPE. */
2405 tree
2406 build_real_from_int_cst (tree type, const_tree i)
2408 tree v;
2409 int overflow = TREE_OVERFLOW (i);
2411 v = build_real (type, real_value_from_int_cst (type, i));
2413 TREE_OVERFLOW (v) |= overflow;
2414 return v;
2417 /* Return a new REAL_CST node whose type is TYPE
2418 and whose value is the integer value I which has sign SGN. */
2420 tree
2421 build_real_from_wide (tree type, const wide_int_ref &i, signop sgn)
2423 REAL_VALUE_TYPE d;
2425 /* Clear all bits of the real value type so that we can later do
2426 bitwise comparisons to see if two values are the same. */
2427 memset (&d, 0, sizeof d);
2429 real_from_integer (&d, TYPE_MODE (type), i, sgn);
2430 return build_real (type, d);
2433 /* Return a newly constructed STRING_CST node whose value is the LEN
2434 characters at STR when STR is nonnull, or all zeros otherwise.
2435 Note that for a C string literal, LEN should include the trailing NUL.
2436 The TREE_TYPE is not initialized. */
2438 tree
2439 build_string (unsigned len, const char *str /*= NULL */)
2441 /* Do not waste bytes provided by padding of struct tree_string. */
2442 unsigned size = len + offsetof (struct tree_string, str) + 1;
2444 record_node_allocation_statistics (STRING_CST, size);
2446 tree s = (tree) ggc_internal_alloc (size);
2448 memset (s, 0, sizeof (struct tree_typed));
2449 TREE_SET_CODE (s, STRING_CST);
2450 TREE_CONSTANT (s) = 1;
2451 TREE_STRING_LENGTH (s) = len;
2452 if (str)
2453 memcpy (s->string.str, str, len);
2454 else
2455 memset (s->string.str, 0, len);
2456 s->string.str[len] = '\0';
2458 return s;
2461 /* Return a newly constructed COMPLEX_CST node whose value is
2462 specified by the real and imaginary parts REAL and IMAG.
2463 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2464 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2466 tree
2467 build_complex (tree type, tree real, tree imag)
2469 gcc_assert (CONSTANT_CLASS_P (real));
2470 gcc_assert (CONSTANT_CLASS_P (imag));
2472 tree t = make_node (COMPLEX_CST);
2474 TREE_REALPART (t) = real;
2475 TREE_IMAGPART (t) = imag;
2476 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2477 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2478 return t;
2481 /* Build a complex (inf +- 0i), such as for the result of cproj.
2482 TYPE is the complex tree type of the result. If NEG is true, the
2483 imaginary zero is negative. */
2485 tree
2486 build_complex_inf (tree type, bool neg)
2488 REAL_VALUE_TYPE rinf, rzero = dconst0;
2490 real_inf (&rinf);
2491 rzero.sign = neg;
2492 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2493 build_real (TREE_TYPE (type), rzero));
2496 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2497 element is set to 1. In particular, this is 1 + i for complex types. */
2499 tree
2500 build_each_one_cst (tree type)
2502 if (TREE_CODE (type) == COMPLEX_TYPE)
2504 tree scalar = build_one_cst (TREE_TYPE (type));
2505 return build_complex (type, scalar, scalar);
2507 else
2508 return build_one_cst (type);
2511 /* Return a constant of arithmetic type TYPE which is the
2512 multiplicative identity of the set TYPE. */
2514 tree
2515 build_one_cst (tree type)
2517 switch (TREE_CODE (type))
2519 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2520 case POINTER_TYPE: case REFERENCE_TYPE:
2521 case OFFSET_TYPE:
2522 return build_int_cst (type, 1);
2524 case REAL_TYPE:
2525 return build_real (type, dconst1);
2527 case FIXED_POINT_TYPE:
2528 /* We can only generate 1 for accum types. */
2529 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2530 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2532 case VECTOR_TYPE:
2534 tree scalar = build_one_cst (TREE_TYPE (type));
2536 return build_vector_from_val (type, scalar);
2539 case COMPLEX_TYPE:
2540 return build_complex (type,
2541 build_one_cst (TREE_TYPE (type)),
2542 build_zero_cst (TREE_TYPE (type)));
2544 default:
2545 gcc_unreachable ();
2549 /* Return an integer of type TYPE containing all 1's in as much precision as
2550 it contains, or a complex or vector whose subparts are such integers. */
2552 tree
2553 build_all_ones_cst (tree type)
2555 if (TREE_CODE (type) == COMPLEX_TYPE)
2557 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2558 return build_complex (type, scalar, scalar);
2560 else
2561 return build_minus_one_cst (type);
2564 /* Return a constant of arithmetic type TYPE which is the
2565 opposite of the multiplicative identity of the set TYPE. */
2567 tree
2568 build_minus_one_cst (tree type)
2570 switch (TREE_CODE (type))
2572 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2573 case POINTER_TYPE: case REFERENCE_TYPE:
2574 case OFFSET_TYPE:
2575 return build_int_cst (type, -1);
2577 case REAL_TYPE:
2578 return build_real (type, dconstm1);
2580 case FIXED_POINT_TYPE:
2581 /* We can only generate 1 for accum types. */
2582 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2583 return build_fixed (type,
2584 fixed_from_double_int (double_int_minus_one,
2585 SCALAR_TYPE_MODE (type)));
2587 case VECTOR_TYPE:
2589 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2591 return build_vector_from_val (type, scalar);
2594 case COMPLEX_TYPE:
2595 return build_complex (type,
2596 build_minus_one_cst (TREE_TYPE (type)),
2597 build_zero_cst (TREE_TYPE (type)));
2599 default:
2600 gcc_unreachable ();
2604 /* Build 0 constant of type TYPE. This is used by constructor folding
2605 and thus the constant should be represented in memory by
2606 zero(es). */
2608 tree
2609 build_zero_cst (tree type)
2611 switch (TREE_CODE (type))
2613 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2614 case POINTER_TYPE: case REFERENCE_TYPE:
2615 case OFFSET_TYPE: case NULLPTR_TYPE:
2616 return build_int_cst (type, 0);
2618 case REAL_TYPE:
2619 return build_real (type, dconst0);
2621 case FIXED_POINT_TYPE:
2622 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2624 case VECTOR_TYPE:
2626 tree scalar = build_zero_cst (TREE_TYPE (type));
2628 return build_vector_from_val (type, scalar);
2631 case COMPLEX_TYPE:
2633 tree zero = build_zero_cst (TREE_TYPE (type));
2635 return build_complex (type, zero, zero);
2638 default:
2639 if (!AGGREGATE_TYPE_P (type))
2640 return fold_convert (type, integer_zero_node);
2641 return build_constructor (type, NULL);
2646 /* Build a BINFO with LEN language slots. */
2648 tree
2649 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2651 tree t;
2652 size_t length = (offsetof (struct tree_binfo, base_binfos)
2653 + vec<tree, va_gc>::embedded_size (base_binfos));
2655 record_node_allocation_statistics (TREE_BINFO, length);
2657 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2659 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2661 TREE_SET_CODE (t, TREE_BINFO);
2663 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2665 return t;
2668 /* Create a CASE_LABEL_EXPR tree node and return it. */
2670 tree
2671 build_case_label (tree low_value, tree high_value, tree label_decl)
2673 tree t = make_node (CASE_LABEL_EXPR);
2675 TREE_TYPE (t) = void_type_node;
2676 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2678 CASE_LOW (t) = low_value;
2679 CASE_HIGH (t) = high_value;
2680 CASE_LABEL (t) = label_decl;
2681 CASE_CHAIN (t) = NULL_TREE;
2683 return t;
2686 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2687 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2688 The latter determines the length of the HOST_WIDE_INT vector. */
2690 tree
2691 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2693 tree t;
2694 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2695 + sizeof (struct tree_int_cst));
2697 gcc_assert (len);
2698 record_node_allocation_statistics (INTEGER_CST, length);
2700 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2702 TREE_SET_CODE (t, INTEGER_CST);
2703 TREE_INT_CST_NUNITS (t) = len;
2704 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2705 /* to_offset can only be applied to trees that are offset_int-sized
2706 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2707 must be exactly the precision of offset_int and so LEN is correct. */
2708 if (ext_len <= OFFSET_INT_ELTS)
2709 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2710 else
2711 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2713 TREE_CONSTANT (t) = 1;
2715 return t;
2718 /* Build a newly constructed TREE_VEC node of length LEN. */
2720 tree
2721 make_tree_vec (int len MEM_STAT_DECL)
2723 tree t;
2724 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2726 record_node_allocation_statistics (TREE_VEC, length);
2728 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2730 TREE_SET_CODE (t, TREE_VEC);
2731 TREE_VEC_LENGTH (t) = len;
2733 return t;
2736 /* Grow a TREE_VEC node to new length LEN. */
2738 tree
2739 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2741 gcc_assert (TREE_CODE (v) == TREE_VEC);
2743 int oldlen = TREE_VEC_LENGTH (v);
2744 gcc_assert (len > oldlen);
2746 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2747 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2749 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2751 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2753 TREE_VEC_LENGTH (v) = len;
2755 return v;
2758 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2759 fixed, and scalar, complex or vector. */
2761 bool
2762 zerop (const_tree expr)
2764 return (integer_zerop (expr)
2765 || real_zerop (expr)
2766 || fixed_zerop (expr));
2769 /* Return 1 if EXPR is the integer constant zero or a complex constant
2770 of zero, or a location wrapper for such a constant. */
2772 bool
2773 integer_zerop (const_tree expr)
2775 STRIP_ANY_LOCATION_WRAPPER (expr);
2777 switch (TREE_CODE (expr))
2779 case INTEGER_CST:
2780 return wi::to_wide (expr) == 0;
2781 case COMPLEX_CST:
2782 return (integer_zerop (TREE_REALPART (expr))
2783 && integer_zerop (TREE_IMAGPART (expr)));
2784 case VECTOR_CST:
2785 return (VECTOR_CST_NPATTERNS (expr) == 1
2786 && VECTOR_CST_DUPLICATE_P (expr)
2787 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2788 default:
2789 return false;
2793 /* Return 1 if EXPR is the integer constant one or the corresponding
2794 complex constant, or a location wrapper for such a constant. */
2796 bool
2797 integer_onep (const_tree expr)
2799 STRIP_ANY_LOCATION_WRAPPER (expr);
2801 switch (TREE_CODE (expr))
2803 case INTEGER_CST:
2804 return wi::eq_p (wi::to_widest (expr), 1);
2805 case COMPLEX_CST:
2806 return (integer_onep (TREE_REALPART (expr))
2807 && integer_zerop (TREE_IMAGPART (expr)));
2808 case VECTOR_CST:
2809 return (VECTOR_CST_NPATTERNS (expr) == 1
2810 && VECTOR_CST_DUPLICATE_P (expr)
2811 && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2812 default:
2813 return false;
2817 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2818 return 1 if every piece is the integer constant one.
2819 Also return 1 for location wrappers for such a constant. */
2821 bool
2822 integer_each_onep (const_tree expr)
2824 STRIP_ANY_LOCATION_WRAPPER (expr);
2826 if (TREE_CODE (expr) == COMPLEX_CST)
2827 return (integer_onep (TREE_REALPART (expr))
2828 && integer_onep (TREE_IMAGPART (expr)));
2829 else
2830 return integer_onep (expr);
2833 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2834 it contains, or a complex or vector whose subparts are such integers,
2835 or a location wrapper for such a constant. */
2837 bool
2838 integer_all_onesp (const_tree expr)
2840 STRIP_ANY_LOCATION_WRAPPER (expr);
2842 if (TREE_CODE (expr) == COMPLEX_CST
2843 && integer_all_onesp (TREE_REALPART (expr))
2844 && integer_all_onesp (TREE_IMAGPART (expr)))
2845 return true;
2847 else if (TREE_CODE (expr) == VECTOR_CST)
2848 return (VECTOR_CST_NPATTERNS (expr) == 1
2849 && VECTOR_CST_DUPLICATE_P (expr)
2850 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2852 else if (TREE_CODE (expr) != INTEGER_CST)
2853 return false;
2855 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2856 == wi::to_wide (expr));
2859 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
2860 for such a constant. */
2862 bool
2863 integer_minus_onep (const_tree expr)
2865 STRIP_ANY_LOCATION_WRAPPER (expr);
2867 if (TREE_CODE (expr) == COMPLEX_CST)
2868 return (integer_all_onesp (TREE_REALPART (expr))
2869 && integer_zerop (TREE_IMAGPART (expr)));
2870 else
2871 return integer_all_onesp (expr);
2874 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2875 one bit on), or a location wrapper for such a constant. */
2877 bool
2878 integer_pow2p (const_tree expr)
2880 STRIP_ANY_LOCATION_WRAPPER (expr);
2882 if (TREE_CODE (expr) == COMPLEX_CST
2883 && integer_pow2p (TREE_REALPART (expr))
2884 && integer_zerop (TREE_IMAGPART (expr)))
2885 return true;
2887 if (TREE_CODE (expr) != INTEGER_CST)
2888 return false;
2890 return wi::popcount (wi::to_wide (expr)) == 1;
2893 /* Return 1 if EXPR is an integer constant other than zero or a
2894 complex constant other than zero, or a location wrapper for such a
2895 constant. */
2897 bool
2898 integer_nonzerop (const_tree expr)
2900 STRIP_ANY_LOCATION_WRAPPER (expr);
2902 return ((TREE_CODE (expr) == INTEGER_CST
2903 && wi::to_wide (expr) != 0)
2904 || (TREE_CODE (expr) == COMPLEX_CST
2905 && (integer_nonzerop (TREE_REALPART (expr))
2906 || integer_nonzerop (TREE_IMAGPART (expr)))));
2909 /* Return 1 if EXPR is the integer constant one. For vector,
2910 return 1 if every piece is the integer constant minus one
2911 (representing the value TRUE).
2912 Also return 1 for location wrappers for such a constant. */
2914 bool
2915 integer_truep (const_tree expr)
2917 STRIP_ANY_LOCATION_WRAPPER (expr);
2919 if (TREE_CODE (expr) == VECTOR_CST)
2920 return integer_all_onesp (expr);
2921 return integer_onep (expr);
2924 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
2925 for such a constant. */
2927 bool
2928 fixed_zerop (const_tree expr)
2930 STRIP_ANY_LOCATION_WRAPPER (expr);
2932 return (TREE_CODE (expr) == FIXED_CST
2933 && TREE_FIXED_CST (expr).data.is_zero ());
2936 /* Return the power of two represented by a tree node known to be a
2937 power of two. */
2940 tree_log2 (const_tree expr)
2942 if (TREE_CODE (expr) == COMPLEX_CST)
2943 return tree_log2 (TREE_REALPART (expr));
2945 return wi::exact_log2 (wi::to_wide (expr));
2948 /* Similar, but return the largest integer Y such that 2 ** Y is less
2949 than or equal to EXPR. */
2952 tree_floor_log2 (const_tree expr)
2954 if (TREE_CODE (expr) == COMPLEX_CST)
2955 return tree_log2 (TREE_REALPART (expr));
2957 return wi::floor_log2 (wi::to_wide (expr));
2960 /* Return number of known trailing zero bits in EXPR, or, if the value of
2961 EXPR is known to be zero, the precision of it's type. */
2963 unsigned int
2964 tree_ctz (const_tree expr)
2966 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2967 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2968 return 0;
2970 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2971 switch (TREE_CODE (expr))
2973 case INTEGER_CST:
2974 ret1 = wi::ctz (wi::to_wide (expr));
2975 return MIN (ret1, prec);
2976 case SSA_NAME:
2977 ret1 = wi::ctz (get_nonzero_bits (expr));
2978 return MIN (ret1, prec);
2979 case PLUS_EXPR:
2980 case MINUS_EXPR:
2981 case BIT_IOR_EXPR:
2982 case BIT_XOR_EXPR:
2983 case MIN_EXPR:
2984 case MAX_EXPR:
2985 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2986 if (ret1 == 0)
2987 return ret1;
2988 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2989 return MIN (ret1, ret2);
2990 case POINTER_PLUS_EXPR:
2991 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2992 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2993 /* Second operand is sizetype, which could be in theory
2994 wider than pointer's precision. Make sure we never
2995 return more than prec. */
2996 ret2 = MIN (ret2, prec);
2997 return MIN (ret1, ret2);
2998 case BIT_AND_EXPR:
2999 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3000 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3001 return MAX (ret1, ret2);
3002 case MULT_EXPR:
3003 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3004 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3005 return MIN (ret1 + ret2, prec);
3006 case LSHIFT_EXPR:
3007 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3008 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3009 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3011 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3012 return MIN (ret1 + ret2, prec);
3014 return ret1;
3015 case RSHIFT_EXPR:
3016 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3017 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3019 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3020 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3021 if (ret1 > ret2)
3022 return ret1 - ret2;
3024 return 0;
3025 case TRUNC_DIV_EXPR:
3026 case CEIL_DIV_EXPR:
3027 case FLOOR_DIV_EXPR:
3028 case ROUND_DIV_EXPR:
3029 case EXACT_DIV_EXPR:
3030 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3031 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
3033 int l = tree_log2 (TREE_OPERAND (expr, 1));
3034 if (l >= 0)
3036 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3037 ret2 = l;
3038 if (ret1 > ret2)
3039 return ret1 - ret2;
3042 return 0;
3043 CASE_CONVERT:
3044 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3045 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
3046 ret1 = prec;
3047 return MIN (ret1, prec);
3048 case SAVE_EXPR:
3049 return tree_ctz (TREE_OPERAND (expr, 0));
3050 case COND_EXPR:
3051 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
3052 if (ret1 == 0)
3053 return 0;
3054 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
3055 return MIN (ret1, ret2);
3056 case COMPOUND_EXPR:
3057 return tree_ctz (TREE_OPERAND (expr, 1));
3058 case ADDR_EXPR:
3059 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
3060 if (ret1 > BITS_PER_UNIT)
3062 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
3063 return MIN (ret1, prec);
3065 return 0;
3066 default:
3067 return 0;
3071 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
3072 decimal float constants, so don't return 1 for them.
3073 Also return 1 for location wrappers around such a constant. */
3075 bool
3076 real_zerop (const_tree expr)
3078 STRIP_ANY_LOCATION_WRAPPER (expr);
3080 switch (TREE_CODE (expr))
3082 case REAL_CST:
3083 return real_equal (&TREE_REAL_CST (expr), &dconst0)
3084 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3085 case COMPLEX_CST:
3086 return real_zerop (TREE_REALPART (expr))
3087 && real_zerop (TREE_IMAGPART (expr));
3088 case VECTOR_CST:
3090 /* Don't simply check for a duplicate because the predicate
3091 accepts both +0.0 and -0.0. */
3092 unsigned count = vector_cst_encoded_nelts (expr);
3093 for (unsigned int i = 0; i < count; ++i)
3094 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
3095 return false;
3096 return true;
3098 default:
3099 return false;
3103 /* Return 1 if EXPR is the real constant one in real or complex form.
3104 Trailing zeroes matter for decimal float constants, so don't return
3105 1 for them.
3106 Also return 1 for location wrappers around such a constant. */
3108 bool
3109 real_onep (const_tree expr)
3111 STRIP_ANY_LOCATION_WRAPPER (expr);
3113 switch (TREE_CODE (expr))
3115 case REAL_CST:
3116 return real_equal (&TREE_REAL_CST (expr), &dconst1)
3117 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3118 case COMPLEX_CST:
3119 return real_onep (TREE_REALPART (expr))
3120 && real_zerop (TREE_IMAGPART (expr));
3121 case VECTOR_CST:
3122 return (VECTOR_CST_NPATTERNS (expr) == 1
3123 && VECTOR_CST_DUPLICATE_P (expr)
3124 && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3125 default:
3126 return false;
3130 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
3131 matter for decimal float constants, so don't return 1 for them.
3132 Also return 1 for location wrappers around such a constant. */
3134 bool
3135 real_minus_onep (const_tree expr)
3137 STRIP_ANY_LOCATION_WRAPPER (expr);
3139 switch (TREE_CODE (expr))
3141 case REAL_CST:
3142 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
3143 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3144 case COMPLEX_CST:
3145 return real_minus_onep (TREE_REALPART (expr))
3146 && real_zerop (TREE_IMAGPART (expr));
3147 case VECTOR_CST:
3148 return (VECTOR_CST_NPATTERNS (expr) == 1
3149 && VECTOR_CST_DUPLICATE_P (expr)
3150 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3151 default:
3152 return false;
3156 /* Nonzero if EXP is a constant or a cast of a constant. */
3158 bool
3159 really_constant_p (const_tree exp)
3161 /* This is not quite the same as STRIP_NOPS. It does more. */
3162 while (CONVERT_EXPR_P (exp)
3163 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3164 exp = TREE_OPERAND (exp, 0);
3165 return TREE_CONSTANT (exp);
3168 /* Return true if T holds a polynomial pointer difference, storing it in
3169 *VALUE if so. A true return means that T's precision is no greater
3170 than 64 bits, which is the largest address space we support, so *VALUE
3171 never loses precision. However, the signedness of the result does
3172 not necessarily match the signedness of T: sometimes an unsigned type
3173 like sizetype is used to encode a value that is actually negative. */
3175 bool
3176 ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
3178 if (!t)
3179 return false;
3180 if (TREE_CODE (t) == INTEGER_CST)
3182 if (!cst_and_fits_in_hwi (t))
3183 return false;
3184 *value = int_cst_value (t);
3185 return true;
3187 if (POLY_INT_CST_P (t))
3189 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3190 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
3191 return false;
3192 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3193 value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
3194 return true;
3196 return false;
3199 poly_int64
3200 tree_to_poly_int64 (const_tree t)
3202 gcc_assert (tree_fits_poly_int64_p (t));
3203 if (POLY_INT_CST_P (t))
3204 return poly_int_cst_value (t).force_shwi ();
3205 return TREE_INT_CST_LOW (t);
3208 poly_uint64
3209 tree_to_poly_uint64 (const_tree t)
3211 gcc_assert (tree_fits_poly_uint64_p (t));
3212 if (POLY_INT_CST_P (t))
3213 return poly_int_cst_value (t).force_uhwi ();
3214 return TREE_INT_CST_LOW (t);
3217 /* Return first list element whose TREE_VALUE is ELEM.
3218 Return 0 if ELEM is not in LIST. */
3220 tree
3221 value_member (tree elem, tree list)
3223 while (list)
3225 if (elem == TREE_VALUE (list))
3226 return list;
3227 list = TREE_CHAIN (list);
3229 return NULL_TREE;
3232 /* Return first list element whose TREE_PURPOSE is ELEM.
3233 Return 0 if ELEM is not in LIST. */
3235 tree
3236 purpose_member (const_tree elem, tree list)
3238 while (list)
3240 if (elem == TREE_PURPOSE (list))
3241 return list;
3242 list = TREE_CHAIN (list);
3244 return NULL_TREE;
3247 /* Return true if ELEM is in V. */
3249 bool
3250 vec_member (const_tree elem, vec<tree, va_gc> *v)
3252 unsigned ix;
3253 tree t;
3254 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
3255 if (elem == t)
3256 return true;
3257 return false;
3260 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3261 NULL_TREE. */
3263 tree
3264 chain_index (int idx, tree chain)
3266 for (; chain && idx > 0; --idx)
3267 chain = TREE_CHAIN (chain);
3268 return chain;
3271 /* Return nonzero if ELEM is part of the chain CHAIN. */
3273 bool
3274 chain_member (const_tree elem, const_tree chain)
3276 while (chain)
3278 if (elem == chain)
3279 return true;
3280 chain = DECL_CHAIN (chain);
3283 return false;
3286 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3287 We expect a null pointer to mark the end of the chain.
3288 This is the Lisp primitive `length'. */
3291 list_length (const_tree t)
3293 const_tree p = t;
3294 #ifdef ENABLE_TREE_CHECKING
3295 const_tree q = t;
3296 #endif
3297 int len = 0;
3299 while (p)
3301 p = TREE_CHAIN (p);
3302 #ifdef ENABLE_TREE_CHECKING
3303 if (len % 2)
3304 q = TREE_CHAIN (q);
3305 gcc_assert (p != q);
3306 #endif
3307 len++;
3310 return len;
3313 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3314 UNION_TYPE TYPE, or NULL_TREE if none. */
3316 tree
3317 first_field (const_tree type)
3319 tree t = TYPE_FIELDS (type);
3320 while (t && TREE_CODE (t) != FIELD_DECL)
3321 t = TREE_CHAIN (t);
3322 return t;
3325 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3326 UNION_TYPE TYPE, or NULL_TREE if none. */
3328 tree
3329 last_field (const_tree type)
3331 tree last = NULL_TREE;
3333 for (tree fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
3335 if (TREE_CODE (fld) != FIELD_DECL)
3336 continue;
3338 last = fld;
3341 return last;
3344 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3345 by modifying the last node in chain 1 to point to chain 2.
3346 This is the Lisp primitive `nconc'. */
3348 tree
3349 chainon (tree op1, tree op2)
3351 tree t1;
3353 if (!op1)
3354 return op2;
3355 if (!op2)
3356 return op1;
3358 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3359 continue;
3360 TREE_CHAIN (t1) = op2;
3362 #ifdef ENABLE_TREE_CHECKING
3364 tree t2;
3365 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3366 gcc_assert (t2 != t1);
3368 #endif
3370 return op1;
3373 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3375 tree
3376 tree_last (tree chain)
3378 tree next;
3379 if (chain)
3380 while ((next = TREE_CHAIN (chain)))
3381 chain = next;
3382 return chain;
3385 /* Reverse the order of elements in the chain T,
3386 and return the new head of the chain (old last element). */
3388 tree
3389 nreverse (tree t)
3391 tree prev = 0, decl, next;
3392 for (decl = t; decl; decl = next)
3394 /* We shouldn't be using this function to reverse BLOCK chains; we
3395 have blocks_nreverse for that. */
3396 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3397 next = TREE_CHAIN (decl);
3398 TREE_CHAIN (decl) = prev;
3399 prev = decl;
3401 return prev;
3404 /* Return a newly created TREE_LIST node whose
3405 purpose and value fields are PARM and VALUE. */
3407 tree
3408 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3410 tree t = make_node (TREE_LIST PASS_MEM_STAT);
3411 TREE_PURPOSE (t) = parm;
3412 TREE_VALUE (t) = value;
3413 return t;
3416 /* Build a chain of TREE_LIST nodes from a vector. */
3418 tree
3419 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3421 tree ret = NULL_TREE;
3422 tree *pp = &ret;
3423 unsigned int i;
3424 tree t;
3425 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3427 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3428 pp = &TREE_CHAIN (*pp);
3430 return ret;
3433 /* Return a newly created TREE_LIST node whose
3434 purpose and value fields are PURPOSE and VALUE
3435 and whose TREE_CHAIN is CHAIN. */
3437 tree
3438 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3440 tree node;
3442 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3443 memset (node, 0, sizeof (struct tree_common));
3445 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3447 TREE_SET_CODE (node, TREE_LIST);
3448 TREE_CHAIN (node) = chain;
3449 TREE_PURPOSE (node) = purpose;
3450 TREE_VALUE (node) = value;
3451 return node;
3454 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3455 trees. */
3457 vec<tree, va_gc> *
3458 ctor_to_vec (tree ctor)
3460 vec<tree, va_gc> *vec;
3461 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3462 unsigned int ix;
3463 tree val;
3465 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3466 vec->quick_push (val);
3468 return vec;
3471 /* Return the size nominally occupied by an object of type TYPE
3472 when it resides in memory. The value is measured in units of bytes,
3473 and its data type is that normally used for type sizes
3474 (which is the first type created by make_signed_type or
3475 make_unsigned_type). */
3477 tree
3478 size_in_bytes_loc (location_t loc, const_tree type)
3480 tree t;
3482 if (type == error_mark_node)
3483 return integer_zero_node;
3485 type = TYPE_MAIN_VARIANT (type);
3486 t = TYPE_SIZE_UNIT (type);
3488 if (t == 0)
3490 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3491 return size_zero_node;
3494 return t;
3497 /* Return the size of TYPE (in bytes) as a wide integer
3498 or return -1 if the size can vary or is larger than an integer. */
3500 HOST_WIDE_INT
3501 int_size_in_bytes (const_tree type)
3503 tree t;
3505 if (type == error_mark_node)
3506 return 0;
3508 type = TYPE_MAIN_VARIANT (type);
3509 t = TYPE_SIZE_UNIT (type);
3511 if (t && tree_fits_uhwi_p (t))
3512 return TREE_INT_CST_LOW (t);
3513 else
3514 return -1;
3517 /* Return the maximum size of TYPE (in bytes) as a wide integer
3518 or return -1 if the size can vary or is larger than an integer. */
3520 HOST_WIDE_INT
3521 max_int_size_in_bytes (const_tree type)
3523 HOST_WIDE_INT size = -1;
3524 tree size_tree;
3526 /* If this is an array type, check for a possible MAX_SIZE attached. */
3528 if (TREE_CODE (type) == ARRAY_TYPE)
3530 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3532 if (size_tree && tree_fits_uhwi_p (size_tree))
3533 size = tree_to_uhwi (size_tree);
3536 /* If we still haven't been able to get a size, see if the language
3537 can compute a maximum size. */
3539 if (size == -1)
3541 size_tree = lang_hooks.types.max_size (type);
3543 if (size_tree && tree_fits_uhwi_p (size_tree))
3544 size = tree_to_uhwi (size_tree);
3547 return size;
3550 /* Return the bit position of FIELD, in bits from the start of the record.
3551 This is a tree of type bitsizetype. */
3553 tree
3554 bit_position (const_tree field)
3556 return bit_from_pos (DECL_FIELD_OFFSET (field),
3557 DECL_FIELD_BIT_OFFSET (field));
3560 /* Return the byte position of FIELD, in bytes from the start of the record.
3561 This is a tree of type sizetype. */
3563 tree
3564 byte_position (const_tree field)
3566 return byte_from_pos (DECL_FIELD_OFFSET (field),
3567 DECL_FIELD_BIT_OFFSET (field));
3570 /* Likewise, but return as an integer. It must be representable in
3571 that way (since it could be a signed value, we don't have the
3572 option of returning -1 like int_size_in_byte can. */
3574 HOST_WIDE_INT
3575 int_byte_position (const_tree field)
3577 return tree_to_shwi (byte_position (field));
3580 /* Return, as a tree node, the number of elements for TYPE (which is an
3581 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3583 tree
3584 array_type_nelts (const_tree type)
3586 tree index_type, min, max;
3588 /* If they did it with unspecified bounds, then we should have already
3589 given an error about it before we got here. */
3590 if (! TYPE_DOMAIN (type))
3591 return error_mark_node;
3593 index_type = TYPE_DOMAIN (type);
3594 min = TYPE_MIN_VALUE (index_type);
3595 max = TYPE_MAX_VALUE (index_type);
3597 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3598 if (!max)
3600 /* zero sized arrays are represented from C FE as complete types with
3601 NULL TYPE_MAX_VALUE and zero TYPE_SIZE, while C++ FE represents
3602 them as min 0, max -1. */
3603 if (COMPLETE_TYPE_P (type)
3604 && integer_zerop (TYPE_SIZE (type))
3605 && integer_zerop (min))
3606 return build_int_cst (TREE_TYPE (min), -1);
3608 return error_mark_node;
3611 return (integer_zerop (min)
3612 ? max
3613 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3616 /* If arg is static -- a reference to an object in static storage -- then
3617 return the object. This is not the same as the C meaning of `static'.
3618 If arg isn't static, return NULL. */
3620 tree
3621 staticp (tree arg)
3623 switch (TREE_CODE (arg))
3625 case FUNCTION_DECL:
3626 /* Nested functions are static, even though taking their address will
3627 involve a trampoline as we unnest the nested function and create
3628 the trampoline on the tree level. */
3629 return arg;
3631 case VAR_DECL:
3632 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3633 && ! DECL_THREAD_LOCAL_P (arg)
3634 && ! DECL_DLLIMPORT_P (arg)
3635 ? arg : NULL);
3637 case CONST_DECL:
3638 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3639 ? arg : NULL);
3641 case CONSTRUCTOR:
3642 return TREE_STATIC (arg) ? arg : NULL;
3644 case LABEL_DECL:
3645 case STRING_CST:
3646 return arg;
3648 case COMPONENT_REF:
3649 /* If the thing being referenced is not a field, then it is
3650 something language specific. */
3651 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3653 /* If we are referencing a bitfield, we can't evaluate an
3654 ADDR_EXPR at compile time and so it isn't a constant. */
3655 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3656 return NULL;
3658 return staticp (TREE_OPERAND (arg, 0));
3660 case BIT_FIELD_REF:
3661 return NULL;
3663 case INDIRECT_REF:
3664 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3666 case ARRAY_REF:
3667 case ARRAY_RANGE_REF:
3668 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3669 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3670 return staticp (TREE_OPERAND (arg, 0));
3671 else
3672 return NULL;
3674 case COMPOUND_LITERAL_EXPR:
3675 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3677 default:
3678 return NULL;
3685 /* Return whether OP is a DECL whose address is function-invariant. */
3687 bool
3688 decl_address_invariant_p (const_tree op)
3690 /* The conditions below are slightly less strict than the one in
3691 staticp. */
3693 switch (TREE_CODE (op))
3695 case PARM_DECL:
3696 case RESULT_DECL:
3697 case LABEL_DECL:
3698 case FUNCTION_DECL:
3699 return true;
3701 case VAR_DECL:
3702 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3703 || DECL_THREAD_LOCAL_P (op)
3704 || DECL_CONTEXT (op) == current_function_decl
3705 || decl_function_context (op) == current_function_decl)
3706 return true;
3707 break;
3709 case CONST_DECL:
3710 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3711 || decl_function_context (op) == current_function_decl)
3712 return true;
3713 break;
3715 default:
3716 break;
3719 return false;
3722 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3724 bool
3725 decl_address_ip_invariant_p (const_tree op)
3727 /* The conditions below are slightly less strict than the one in
3728 staticp. */
3730 switch (TREE_CODE (op))
3732 case LABEL_DECL:
3733 case FUNCTION_DECL:
3734 case STRING_CST:
3735 return true;
3737 case VAR_DECL:
3738 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3739 && !DECL_DLLIMPORT_P (op))
3740 || DECL_THREAD_LOCAL_P (op))
3741 return true;
3742 break;
3744 case CONST_DECL:
3745 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3746 return true;
3747 break;
3749 default:
3750 break;
3753 return false;
3757 /* Return true if T is function-invariant (internal function, does
3758 not handle arithmetic; that's handled in skip_simple_arithmetic and
3759 tree_invariant_p). */
3761 static bool
3762 tree_invariant_p_1 (tree t)
3764 tree op;
3766 if (TREE_CONSTANT (t)
3767 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3768 return true;
3770 switch (TREE_CODE (t))
3772 case SAVE_EXPR:
3773 return true;
3775 case ADDR_EXPR:
3776 op = TREE_OPERAND (t, 0);
3777 while (handled_component_p (op))
3779 switch (TREE_CODE (op))
3781 case ARRAY_REF:
3782 case ARRAY_RANGE_REF:
3783 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3784 || TREE_OPERAND (op, 2) != NULL_TREE
3785 || TREE_OPERAND (op, 3) != NULL_TREE)
3786 return false;
3787 break;
3789 case COMPONENT_REF:
3790 if (TREE_OPERAND (op, 2) != NULL_TREE)
3791 return false;
3792 break;
3794 default:;
3796 op = TREE_OPERAND (op, 0);
3799 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3801 default:
3802 break;
3805 return false;
3808 /* Return true if T is function-invariant. */
3810 bool
3811 tree_invariant_p (tree t)
3813 tree inner = skip_simple_arithmetic (t);
3814 return tree_invariant_p_1 (inner);
3817 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3818 Do this to any expression which may be used in more than one place,
3819 but must be evaluated only once.
3821 Normally, expand_expr would reevaluate the expression each time.
3822 Calling save_expr produces something that is evaluated and recorded
3823 the first time expand_expr is called on it. Subsequent calls to
3824 expand_expr just reuse the recorded value.
3826 The call to expand_expr that generates code that actually computes
3827 the value is the first call *at compile time*. Subsequent calls
3828 *at compile time* generate code to use the saved value.
3829 This produces correct result provided that *at run time* control
3830 always flows through the insns made by the first expand_expr
3831 before reaching the other places where the save_expr was evaluated.
3832 You, the caller of save_expr, must make sure this is so.
3834 Constants, and certain read-only nodes, are returned with no
3835 SAVE_EXPR because that is safe. Expressions containing placeholders
3836 are not touched; see tree.def for an explanation of what these
3837 are used for. */
3839 tree
3840 save_expr (tree expr)
3842 tree inner;
3844 /* If the tree evaluates to a constant, then we don't want to hide that
3845 fact (i.e. this allows further folding, and direct checks for constants).
3846 However, a read-only object that has side effects cannot be bypassed.
3847 Since it is no problem to reevaluate literals, we just return the
3848 literal node. */
3849 inner = skip_simple_arithmetic (expr);
3850 if (TREE_CODE (inner) == ERROR_MARK)
3851 return inner;
3853 if (tree_invariant_p_1 (inner))
3854 return expr;
3856 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3857 it means that the size or offset of some field of an object depends on
3858 the value within another field.
3860 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3861 and some variable since it would then need to be both evaluated once and
3862 evaluated more than once. Front-ends must assure this case cannot
3863 happen by surrounding any such subexpressions in their own SAVE_EXPR
3864 and forcing evaluation at the proper time. */
3865 if (contains_placeholder_p (inner))
3866 return expr;
3868 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3870 /* This expression might be placed ahead of a jump to ensure that the
3871 value was computed on both sides of the jump. So make sure it isn't
3872 eliminated as dead. */
3873 TREE_SIDE_EFFECTS (expr) = 1;
3874 return expr;
3877 /* Look inside EXPR into any simple arithmetic operations. Return the
3878 outermost non-arithmetic or non-invariant node. */
3880 tree
3881 skip_simple_arithmetic (tree expr)
3883 /* We don't care about whether this can be used as an lvalue in this
3884 context. */
3885 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3886 expr = TREE_OPERAND (expr, 0);
3888 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3889 a constant, it will be more efficient to not make another SAVE_EXPR since
3890 it will allow better simplification and GCSE will be able to merge the
3891 computations if they actually occur. */
3892 while (true)
3894 if (UNARY_CLASS_P (expr))
3895 expr = TREE_OPERAND (expr, 0);
3896 else if (BINARY_CLASS_P (expr))
3898 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3899 expr = TREE_OPERAND (expr, 0);
3900 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3901 expr = TREE_OPERAND (expr, 1);
3902 else
3903 break;
3905 else
3906 break;
3909 return expr;
3912 /* Look inside EXPR into simple arithmetic operations involving constants.
3913 Return the outermost non-arithmetic or non-constant node. */
3915 tree
3916 skip_simple_constant_arithmetic (tree expr)
3918 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3919 expr = TREE_OPERAND (expr, 0);
3921 while (true)
3923 if (UNARY_CLASS_P (expr))
3924 expr = TREE_OPERAND (expr, 0);
3925 else if (BINARY_CLASS_P (expr))
3927 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3928 expr = TREE_OPERAND (expr, 0);
3929 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3930 expr = TREE_OPERAND (expr, 1);
3931 else
3932 break;
3934 else
3935 break;
3938 return expr;
3941 /* Return which tree structure is used by T. */
3943 enum tree_node_structure_enum
3944 tree_node_structure (const_tree t)
3946 const enum tree_code code = TREE_CODE (t);
3947 return tree_node_structure_for_code (code);
3950 /* Set various status flags when building a CALL_EXPR object T. */
3952 static void
3953 process_call_operands (tree t)
3955 bool side_effects = TREE_SIDE_EFFECTS (t);
3956 bool read_only = false;
3957 int i = call_expr_flags (t);
3959 /* Calls have side-effects, except those to const or pure functions. */
3960 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3961 side_effects = true;
3962 /* Propagate TREE_READONLY of arguments for const functions. */
3963 if (i & ECF_CONST)
3964 read_only = true;
3966 if (!side_effects || read_only)
3967 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3969 tree op = TREE_OPERAND (t, i);
3970 if (op && TREE_SIDE_EFFECTS (op))
3971 side_effects = true;
3972 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3973 read_only = false;
3976 TREE_SIDE_EFFECTS (t) = side_effects;
3977 TREE_READONLY (t) = read_only;
3980 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3981 size or offset that depends on a field within a record. */
3983 bool
3984 contains_placeholder_p (const_tree exp)
3986 enum tree_code code;
3988 if (!exp)
3989 return 0;
3991 code = TREE_CODE (exp);
3992 if (code == PLACEHOLDER_EXPR)
3993 return 1;
3995 switch (TREE_CODE_CLASS (code))
3997 case tcc_reference:
3998 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3999 position computations since they will be converted into a
4000 WITH_RECORD_EXPR involving the reference, which will assume
4001 here will be valid. */
4002 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4004 case tcc_exceptional:
4005 if (code == TREE_LIST)
4006 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
4007 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
4008 break;
4010 case tcc_unary:
4011 case tcc_binary:
4012 case tcc_comparison:
4013 case tcc_expression:
4014 switch (code)
4016 case COMPOUND_EXPR:
4017 /* Ignoring the first operand isn't quite right, but works best. */
4018 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
4020 case COND_EXPR:
4021 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4022 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
4023 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
4025 case SAVE_EXPR:
4026 /* The save_expr function never wraps anything containing
4027 a PLACEHOLDER_EXPR. */
4028 return 0;
4030 default:
4031 break;
4034 switch (TREE_CODE_LENGTH (code))
4036 case 1:
4037 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4038 case 2:
4039 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4040 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
4041 default:
4042 return 0;
4045 case tcc_vl_exp:
4046 switch (code)
4048 case CALL_EXPR:
4050 const_tree arg;
4051 const_call_expr_arg_iterator iter;
4052 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
4053 if (CONTAINS_PLACEHOLDER_P (arg))
4054 return 1;
4055 return 0;
4057 default:
4058 return 0;
4061 default:
4062 return 0;
4064 return 0;
4067 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
4068 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
4069 field positions. */
4071 static bool
4072 type_contains_placeholder_1 (const_tree type)
4074 /* If the size contains a placeholder or the parent type (component type in
4075 the case of arrays) type involves a placeholder, this type does. */
4076 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
4077 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
4078 || (!POINTER_TYPE_P (type)
4079 && TREE_TYPE (type)
4080 && type_contains_placeholder_p (TREE_TYPE (type))))
4081 return true;
4083 /* Now do type-specific checks. Note that the last part of the check above
4084 greatly limits what we have to do below. */
4085 switch (TREE_CODE (type))
4087 case VOID_TYPE:
4088 case OPAQUE_TYPE:
4089 case COMPLEX_TYPE:
4090 case ENUMERAL_TYPE:
4091 case BOOLEAN_TYPE:
4092 case POINTER_TYPE:
4093 case OFFSET_TYPE:
4094 case REFERENCE_TYPE:
4095 case METHOD_TYPE:
4096 case FUNCTION_TYPE:
4097 case VECTOR_TYPE:
4098 case NULLPTR_TYPE:
4099 return false;
4101 case INTEGER_TYPE:
4102 case REAL_TYPE:
4103 case FIXED_POINT_TYPE:
4104 /* Here we just check the bounds. */
4105 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
4106 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
4108 case ARRAY_TYPE:
4109 /* We have already checked the component type above, so just check
4110 the domain type. Flexible array members have a null domain. */
4111 return TYPE_DOMAIN (type) ?
4112 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
4114 case RECORD_TYPE:
4115 case UNION_TYPE:
4116 case QUAL_UNION_TYPE:
4118 tree field;
4120 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4121 if (TREE_CODE (field) == FIELD_DECL
4122 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
4123 || (TREE_CODE (type) == QUAL_UNION_TYPE
4124 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
4125 || type_contains_placeholder_p (TREE_TYPE (field))))
4126 return true;
4128 return false;
4131 default:
4132 gcc_unreachable ();
4136 /* Wrapper around above function used to cache its result. */
4138 bool
4139 type_contains_placeholder_p (tree type)
4141 bool result;
4143 /* If the contains_placeholder_bits field has been initialized,
4144 then we know the answer. */
4145 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
4146 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
4148 /* Indicate that we've seen this type node, and the answer is false.
4149 This is what we want to return if we run into recursion via fields. */
4150 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
4152 /* Compute the real value. */
4153 result = type_contains_placeholder_1 (type);
4155 /* Store the real value. */
4156 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
4158 return result;
4161 /* Push tree EXP onto vector QUEUE if it is not already present. */
4163 static void
4164 push_without_duplicates (tree exp, vec<tree> *queue)
4166 unsigned int i;
4167 tree iter;
4169 FOR_EACH_VEC_ELT (*queue, i, iter)
4170 if (simple_cst_equal (iter, exp) == 1)
4171 break;
4173 if (!iter)
4174 queue->safe_push (exp);
4177 /* Given a tree EXP, find all occurrences of references to fields
4178 in a PLACEHOLDER_EXPR and place them in vector REFS without
4179 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
4180 we assume here that EXP contains only arithmetic expressions
4181 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
4182 argument list. */
4184 void
4185 find_placeholder_in_expr (tree exp, vec<tree> *refs)
4187 enum tree_code code = TREE_CODE (exp);
4188 tree inner;
4189 int i;
4191 /* We handle TREE_LIST and COMPONENT_REF separately. */
4192 if (code == TREE_LIST)
4194 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
4195 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
4197 else if (code == COMPONENT_REF)
4199 for (inner = TREE_OPERAND (exp, 0);
4200 REFERENCE_CLASS_P (inner);
4201 inner = TREE_OPERAND (inner, 0))
4204 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
4205 push_without_duplicates (exp, refs);
4206 else
4207 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
4209 else
4210 switch (TREE_CODE_CLASS (code))
4212 case tcc_constant:
4213 break;
4215 case tcc_declaration:
4216 /* Variables allocated to static storage can stay. */
4217 if (!TREE_STATIC (exp))
4218 push_without_duplicates (exp, refs);
4219 break;
4221 case tcc_expression:
4222 /* This is the pattern built in ada/make_aligning_type. */
4223 if (code == ADDR_EXPR
4224 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
4226 push_without_duplicates (exp, refs);
4227 break;
4230 /* Fall through. */
4232 case tcc_exceptional:
4233 case tcc_unary:
4234 case tcc_binary:
4235 case tcc_comparison:
4236 case tcc_reference:
4237 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
4238 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4239 break;
4241 case tcc_vl_exp:
4242 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4243 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4244 break;
4246 default:
4247 gcc_unreachable ();
4251 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4252 return a tree with all occurrences of references to F in a
4253 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4254 CONST_DECLs. Note that we assume here that EXP contains only
4255 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4256 occurring only in their argument list. */
4258 tree
4259 substitute_in_expr (tree exp, tree f, tree r)
4261 enum tree_code code = TREE_CODE (exp);
4262 tree op0, op1, op2, op3;
4263 tree new_tree;
4265 /* We handle TREE_LIST and COMPONENT_REF separately. */
4266 if (code == TREE_LIST)
4268 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
4269 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
4270 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4271 return exp;
4273 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4275 else if (code == COMPONENT_REF)
4277 tree inner;
4279 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4280 and it is the right field, replace it with R. */
4281 for (inner = TREE_OPERAND (exp, 0);
4282 REFERENCE_CLASS_P (inner);
4283 inner = TREE_OPERAND (inner, 0))
4286 /* The field. */
4287 op1 = TREE_OPERAND (exp, 1);
4289 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
4290 return r;
4292 /* If this expression hasn't been completed let, leave it alone. */
4293 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
4294 return exp;
4296 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4297 if (op0 == TREE_OPERAND (exp, 0))
4298 return exp;
4300 new_tree
4301 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4303 else
4304 switch (TREE_CODE_CLASS (code))
4306 case tcc_constant:
4307 return exp;
4309 case tcc_declaration:
4310 if (exp == f)
4311 return r;
4312 else
4313 return exp;
4315 case tcc_expression:
4316 if (exp == f)
4317 return r;
4319 /* Fall through. */
4321 case tcc_exceptional:
4322 case tcc_unary:
4323 case tcc_binary:
4324 case tcc_comparison:
4325 case tcc_reference:
4326 switch (TREE_CODE_LENGTH (code))
4328 case 0:
4329 return exp;
4331 case 1:
4332 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4333 if (op0 == TREE_OPERAND (exp, 0))
4334 return exp;
4336 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4337 break;
4339 case 2:
4340 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4341 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4343 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4344 return exp;
4346 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4347 break;
4349 case 3:
4350 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4351 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4352 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4354 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4355 && op2 == TREE_OPERAND (exp, 2))
4356 return exp;
4358 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4359 break;
4361 case 4:
4362 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4363 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4364 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4365 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4367 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4368 && op2 == TREE_OPERAND (exp, 2)
4369 && op3 == TREE_OPERAND (exp, 3))
4370 return exp;
4372 new_tree
4373 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4374 break;
4376 default:
4377 gcc_unreachable ();
4379 break;
4381 case tcc_vl_exp:
4383 int i;
4385 new_tree = NULL_TREE;
4387 /* If we are trying to replace F with a constant or with another
4388 instance of one of the arguments of the call, inline back
4389 functions which do nothing else than computing a value from
4390 the arguments they are passed. This makes it possible to
4391 fold partially or entirely the replacement expression. */
4392 if (code == CALL_EXPR)
4394 bool maybe_inline = false;
4395 if (CONSTANT_CLASS_P (r))
4396 maybe_inline = true;
4397 else
4398 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4399 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4401 maybe_inline = true;
4402 break;
4404 if (maybe_inline)
4406 tree t = maybe_inline_call_in_expr (exp);
4407 if (t)
4408 return SUBSTITUTE_IN_EXPR (t, f, r);
4412 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4414 tree op = TREE_OPERAND (exp, i);
4415 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4416 if (new_op != op)
4418 if (!new_tree)
4419 new_tree = copy_node (exp);
4420 TREE_OPERAND (new_tree, i) = new_op;
4424 if (new_tree)
4426 new_tree = fold (new_tree);
4427 if (TREE_CODE (new_tree) == CALL_EXPR)
4428 process_call_operands (new_tree);
4430 else
4431 return exp;
4433 break;
4435 default:
4436 gcc_unreachable ();
4439 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4441 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4442 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4444 return new_tree;
4447 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4448 for it within OBJ, a tree that is an object or a chain of references. */
4450 tree
4451 substitute_placeholder_in_expr (tree exp, tree obj)
4453 enum tree_code code = TREE_CODE (exp);
4454 tree op0, op1, op2, op3;
4455 tree new_tree;
4457 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4458 in the chain of OBJ. */
4459 if (code == PLACEHOLDER_EXPR)
4461 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4462 tree elt;
4464 for (elt = obj; elt != 0;
4465 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4466 || TREE_CODE (elt) == COND_EXPR)
4467 ? TREE_OPERAND (elt, 1)
4468 : (REFERENCE_CLASS_P (elt)
4469 || UNARY_CLASS_P (elt)
4470 || BINARY_CLASS_P (elt)
4471 || VL_EXP_CLASS_P (elt)
4472 || EXPRESSION_CLASS_P (elt))
4473 ? TREE_OPERAND (elt, 0) : 0))
4474 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4475 return elt;
4477 for (elt = obj; elt != 0;
4478 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4479 || TREE_CODE (elt) == COND_EXPR)
4480 ? TREE_OPERAND (elt, 1)
4481 : (REFERENCE_CLASS_P (elt)
4482 || UNARY_CLASS_P (elt)
4483 || BINARY_CLASS_P (elt)
4484 || VL_EXP_CLASS_P (elt)
4485 || EXPRESSION_CLASS_P (elt))
4486 ? TREE_OPERAND (elt, 0) : 0))
4487 if (POINTER_TYPE_P (TREE_TYPE (elt))
4488 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4489 == need_type))
4490 return fold_build1 (INDIRECT_REF, need_type, elt);
4492 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4493 survives until RTL generation, there will be an error. */
4494 return exp;
4497 /* TREE_LIST is special because we need to look at TREE_VALUE
4498 and TREE_CHAIN, not TREE_OPERANDS. */
4499 else if (code == TREE_LIST)
4501 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4502 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4503 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4504 return exp;
4506 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4508 else
4509 switch (TREE_CODE_CLASS (code))
4511 case tcc_constant:
4512 case tcc_declaration:
4513 return exp;
4515 case tcc_exceptional:
4516 case tcc_unary:
4517 case tcc_binary:
4518 case tcc_comparison:
4519 case tcc_expression:
4520 case tcc_reference:
4521 case tcc_statement:
4522 switch (TREE_CODE_LENGTH (code))
4524 case 0:
4525 return exp;
4527 case 1:
4528 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4529 if (op0 == TREE_OPERAND (exp, 0))
4530 return exp;
4532 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4533 break;
4535 case 2:
4536 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4537 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4539 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4540 return exp;
4542 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4543 break;
4545 case 3:
4546 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4547 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4548 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4550 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4551 && op2 == TREE_OPERAND (exp, 2))
4552 return exp;
4554 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4555 break;
4557 case 4:
4558 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4559 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4560 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4561 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4563 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4564 && op2 == TREE_OPERAND (exp, 2)
4565 && op3 == TREE_OPERAND (exp, 3))
4566 return exp;
4568 new_tree
4569 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4570 break;
4572 default:
4573 gcc_unreachable ();
4575 break;
4577 case tcc_vl_exp:
4579 int i;
4581 new_tree = NULL_TREE;
4583 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4585 tree op = TREE_OPERAND (exp, i);
4586 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4587 if (new_op != op)
4589 if (!new_tree)
4590 new_tree = copy_node (exp);
4591 TREE_OPERAND (new_tree, i) = new_op;
4595 if (new_tree)
4597 new_tree = fold (new_tree);
4598 if (TREE_CODE (new_tree) == CALL_EXPR)
4599 process_call_operands (new_tree);
4601 else
4602 return exp;
4604 break;
4606 default:
4607 gcc_unreachable ();
4610 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4612 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4613 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4615 return new_tree;
4619 /* Subroutine of stabilize_reference; this is called for subtrees of
4620 references. Any expression with side-effects must be put in a SAVE_EXPR
4621 to ensure that it is only evaluated once.
4623 We don't put SAVE_EXPR nodes around everything, because assigning very
4624 simple expressions to temporaries causes us to miss good opportunities
4625 for optimizations. Among other things, the opportunity to fold in the
4626 addition of a constant into an addressing mode often gets lost, e.g.
4627 "y[i+1] += x;". In general, we take the approach that we should not make
4628 an assignment unless we are forced into it - i.e., that any non-side effect
4629 operator should be allowed, and that cse should take care of coalescing
4630 multiple utterances of the same expression should that prove fruitful. */
4632 static tree
4633 stabilize_reference_1 (tree e)
4635 tree result;
4636 enum tree_code code = TREE_CODE (e);
4638 /* We cannot ignore const expressions because it might be a reference
4639 to a const array but whose index contains side-effects. But we can
4640 ignore things that are actual constant or that already have been
4641 handled by this function. */
4643 if (tree_invariant_p (e))
4644 return e;
4646 switch (TREE_CODE_CLASS (code))
4648 case tcc_exceptional:
4649 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4650 have side-effects. */
4651 if (code == STATEMENT_LIST)
4652 return save_expr (e);
4653 /* FALLTHRU */
4654 case tcc_type:
4655 case tcc_declaration:
4656 case tcc_comparison:
4657 case tcc_statement:
4658 case tcc_expression:
4659 case tcc_reference:
4660 case tcc_vl_exp:
4661 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4662 so that it will only be evaluated once. */
4663 /* The reference (r) and comparison (<) classes could be handled as
4664 below, but it is generally faster to only evaluate them once. */
4665 if (TREE_SIDE_EFFECTS (e))
4666 return save_expr (e);
4667 return e;
4669 case tcc_constant:
4670 /* Constants need no processing. In fact, we should never reach
4671 here. */
4672 return e;
4674 case tcc_binary:
4675 /* Division is slow and tends to be compiled with jumps,
4676 especially the division by powers of 2 that is often
4677 found inside of an array reference. So do it just once. */
4678 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4679 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4680 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4681 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4682 return save_expr (e);
4683 /* Recursively stabilize each operand. */
4684 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4685 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4686 break;
4688 case tcc_unary:
4689 /* Recursively stabilize each operand. */
4690 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4691 break;
4693 default:
4694 gcc_unreachable ();
4697 TREE_TYPE (result) = TREE_TYPE (e);
4698 TREE_READONLY (result) = TREE_READONLY (e);
4699 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4700 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4702 return result;
4705 /* Stabilize a reference so that we can use it any number of times
4706 without causing its operands to be evaluated more than once.
4707 Returns the stabilized reference. This works by means of save_expr,
4708 so see the caveats in the comments about save_expr.
4710 Also allows conversion expressions whose operands are references.
4711 Any other kind of expression is returned unchanged. */
4713 tree
4714 stabilize_reference (tree ref)
4716 tree result;
4717 enum tree_code code = TREE_CODE (ref);
4719 switch (code)
4721 case VAR_DECL:
4722 case PARM_DECL:
4723 case RESULT_DECL:
4724 /* No action is needed in this case. */
4725 return ref;
4727 CASE_CONVERT:
4728 case FLOAT_EXPR:
4729 case FIX_TRUNC_EXPR:
4730 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4731 break;
4733 case INDIRECT_REF:
4734 result = build_nt (INDIRECT_REF,
4735 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4736 break;
4738 case COMPONENT_REF:
4739 result = build_nt (COMPONENT_REF,
4740 stabilize_reference (TREE_OPERAND (ref, 0)),
4741 TREE_OPERAND (ref, 1), NULL_TREE);
4742 break;
4744 case BIT_FIELD_REF:
4745 result = build_nt (BIT_FIELD_REF,
4746 stabilize_reference (TREE_OPERAND (ref, 0)),
4747 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4748 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4749 break;
4751 case ARRAY_REF:
4752 result = build_nt (ARRAY_REF,
4753 stabilize_reference (TREE_OPERAND (ref, 0)),
4754 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4755 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4756 break;
4758 case ARRAY_RANGE_REF:
4759 result = build_nt (ARRAY_RANGE_REF,
4760 stabilize_reference (TREE_OPERAND (ref, 0)),
4761 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4762 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4763 break;
4765 case COMPOUND_EXPR:
4766 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4767 it wouldn't be ignored. This matters when dealing with
4768 volatiles. */
4769 return stabilize_reference_1 (ref);
4771 /* If arg isn't a kind of lvalue we recognize, make no change.
4772 Caller should recognize the error for an invalid lvalue. */
4773 default:
4774 return ref;
4776 case ERROR_MARK:
4777 return error_mark_node;
4780 TREE_TYPE (result) = TREE_TYPE (ref);
4781 TREE_READONLY (result) = TREE_READONLY (ref);
4782 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4783 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4785 return result;
4788 /* Low-level constructors for expressions. */
4790 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4791 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4793 void
4794 recompute_tree_invariant_for_addr_expr (tree t)
4796 tree node;
4797 bool tc = true, se = false;
4799 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4801 /* We started out assuming this address is both invariant and constant, but
4802 does not have side effects. Now go down any handled components and see if
4803 any of them involve offsets that are either non-constant or non-invariant.
4804 Also check for side-effects.
4806 ??? Note that this code makes no attempt to deal with the case where
4807 taking the address of something causes a copy due to misalignment. */
4809 #define UPDATE_FLAGS(NODE) \
4810 do { tree _node = (NODE); \
4811 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4812 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4814 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4815 node = TREE_OPERAND (node, 0))
4817 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4818 array reference (probably made temporarily by the G++ front end),
4819 so ignore all the operands. */
4820 if ((TREE_CODE (node) == ARRAY_REF
4821 || TREE_CODE (node) == ARRAY_RANGE_REF)
4822 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4824 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4825 if (TREE_OPERAND (node, 2))
4826 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4827 if (TREE_OPERAND (node, 3))
4828 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4830 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4831 FIELD_DECL, apparently. The G++ front end can put something else
4832 there, at least temporarily. */
4833 else if (TREE_CODE (node) == COMPONENT_REF
4834 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4836 if (TREE_OPERAND (node, 2))
4837 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4841 node = lang_hooks.expr_to_decl (node, &tc, &se);
4843 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4844 the address, since &(*a)->b is a form of addition. If it's a constant, the
4845 address is constant too. If it's a decl, its address is constant if the
4846 decl is static. Everything else is not constant and, furthermore,
4847 taking the address of a volatile variable is not volatile. */
4848 if (TREE_CODE (node) == INDIRECT_REF
4849 || TREE_CODE (node) == MEM_REF)
4850 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4851 else if (CONSTANT_CLASS_P (node))
4853 else if (DECL_P (node))
4854 tc &= (staticp (node) != NULL_TREE);
4855 else
4857 tc = false;
4858 se |= TREE_SIDE_EFFECTS (node);
4862 TREE_CONSTANT (t) = tc;
4863 TREE_SIDE_EFFECTS (t) = se;
4864 #undef UPDATE_FLAGS
4867 /* Build an expression of code CODE, data type TYPE, and operands as
4868 specified. Expressions and reference nodes can be created this way.
4869 Constants, decls, types and misc nodes cannot be.
4871 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4872 enough for all extant tree codes. */
4874 tree
4875 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4877 tree t;
4879 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4881 t = make_node (code PASS_MEM_STAT);
4882 TREE_TYPE (t) = tt;
4884 return t;
4887 tree
4888 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4890 int length = sizeof (struct tree_exp);
4891 tree t;
4893 record_node_allocation_statistics (code, length);
4895 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4897 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4899 memset (t, 0, sizeof (struct tree_common));
4901 TREE_SET_CODE (t, code);
4903 TREE_TYPE (t) = type;
4904 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4905 TREE_OPERAND (t, 0) = node;
4906 if (node && !TYPE_P (node))
4908 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4909 TREE_READONLY (t) = TREE_READONLY (node);
4912 if (TREE_CODE_CLASS (code) == tcc_statement)
4914 if (code != DEBUG_BEGIN_STMT)
4915 TREE_SIDE_EFFECTS (t) = 1;
4917 else switch (code)
4919 case VA_ARG_EXPR:
4920 /* All of these have side-effects, no matter what their
4921 operands are. */
4922 TREE_SIDE_EFFECTS (t) = 1;
4923 TREE_READONLY (t) = 0;
4924 break;
4926 case INDIRECT_REF:
4927 /* Whether a dereference is readonly has nothing to do with whether
4928 its operand is readonly. */
4929 TREE_READONLY (t) = 0;
4930 break;
4932 case ADDR_EXPR:
4933 if (node)
4934 recompute_tree_invariant_for_addr_expr (t);
4935 break;
4937 default:
4938 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4939 && node && !TYPE_P (node)
4940 && TREE_CONSTANT (node))
4941 TREE_CONSTANT (t) = 1;
4942 if (TREE_CODE_CLASS (code) == tcc_reference
4943 && node && TREE_THIS_VOLATILE (node))
4944 TREE_THIS_VOLATILE (t) = 1;
4945 break;
4948 return t;
4951 #define PROCESS_ARG(N) \
4952 do { \
4953 TREE_OPERAND (t, N) = arg##N; \
4954 if (arg##N &&!TYPE_P (arg##N)) \
4956 if (TREE_SIDE_EFFECTS (arg##N)) \
4957 side_effects = 1; \
4958 if (!TREE_READONLY (arg##N) \
4959 && !CONSTANT_CLASS_P (arg##N)) \
4960 (void) (read_only = 0); \
4961 if (!TREE_CONSTANT (arg##N)) \
4962 (void) (constant = 0); \
4964 } while (0)
4966 tree
4967 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4969 bool constant, read_only, side_effects, div_by_zero;
4970 tree t;
4972 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4974 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4975 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4976 /* When sizetype precision doesn't match that of pointers
4977 we need to be able to build explicit extensions or truncations
4978 of the offset argument. */
4979 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4980 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4981 && TREE_CODE (arg1) == INTEGER_CST);
4983 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4984 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4985 && ptrofftype_p (TREE_TYPE (arg1)));
4987 t = make_node (code PASS_MEM_STAT);
4988 TREE_TYPE (t) = tt;
4990 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4991 result based on those same flags for the arguments. But if the
4992 arguments aren't really even `tree' expressions, we shouldn't be trying
4993 to do this. */
4995 /* Expressions without side effects may be constant if their
4996 arguments are as well. */
4997 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4998 || TREE_CODE_CLASS (code) == tcc_binary);
4999 read_only = 1;
5000 side_effects = TREE_SIDE_EFFECTS (t);
5002 switch (code)
5004 case TRUNC_DIV_EXPR:
5005 case CEIL_DIV_EXPR:
5006 case FLOOR_DIV_EXPR:
5007 case ROUND_DIV_EXPR:
5008 case EXACT_DIV_EXPR:
5009 case CEIL_MOD_EXPR:
5010 case FLOOR_MOD_EXPR:
5011 case ROUND_MOD_EXPR:
5012 case TRUNC_MOD_EXPR:
5013 div_by_zero = integer_zerop (arg1);
5014 break;
5015 default:
5016 div_by_zero = false;
5019 PROCESS_ARG (0);
5020 PROCESS_ARG (1);
5022 TREE_SIDE_EFFECTS (t) = side_effects;
5023 if (code == MEM_REF)
5025 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5027 tree o = TREE_OPERAND (arg0, 0);
5028 TREE_READONLY (t) = TREE_READONLY (o);
5029 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5032 else
5034 TREE_READONLY (t) = read_only;
5035 /* Don't mark X / 0 as constant. */
5036 TREE_CONSTANT (t) = constant && !div_by_zero;
5037 TREE_THIS_VOLATILE (t)
5038 = (TREE_CODE_CLASS (code) == tcc_reference
5039 && arg0 && TREE_THIS_VOLATILE (arg0));
5042 return t;
5046 tree
5047 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
5048 tree arg2 MEM_STAT_DECL)
5050 bool constant, read_only, side_effects;
5051 tree t;
5053 gcc_assert (TREE_CODE_LENGTH (code) == 3);
5054 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5056 t = make_node (code PASS_MEM_STAT);
5057 TREE_TYPE (t) = tt;
5059 read_only = 1;
5061 /* As a special exception, if COND_EXPR has NULL branches, we
5062 assume that it is a gimple statement and always consider
5063 it to have side effects. */
5064 if (code == COND_EXPR
5065 && tt == void_type_node
5066 && arg1 == NULL_TREE
5067 && arg2 == NULL_TREE)
5068 side_effects = true;
5069 else
5070 side_effects = TREE_SIDE_EFFECTS (t);
5072 PROCESS_ARG (0);
5073 PROCESS_ARG (1);
5074 PROCESS_ARG (2);
5076 if (code == COND_EXPR)
5077 TREE_READONLY (t) = read_only;
5079 TREE_SIDE_EFFECTS (t) = side_effects;
5080 TREE_THIS_VOLATILE (t)
5081 = (TREE_CODE_CLASS (code) == tcc_reference
5082 && arg0 && TREE_THIS_VOLATILE (arg0));
5084 return t;
5087 tree
5088 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
5089 tree arg2, tree arg3 MEM_STAT_DECL)
5091 bool constant, read_only, side_effects;
5092 tree t;
5094 gcc_assert (TREE_CODE_LENGTH (code) == 4);
5096 t = make_node (code PASS_MEM_STAT);
5097 TREE_TYPE (t) = tt;
5099 side_effects = TREE_SIDE_EFFECTS (t);
5101 PROCESS_ARG (0);
5102 PROCESS_ARG (1);
5103 PROCESS_ARG (2);
5104 PROCESS_ARG (3);
5106 TREE_SIDE_EFFECTS (t) = side_effects;
5107 TREE_THIS_VOLATILE (t)
5108 = (TREE_CODE_CLASS (code) == tcc_reference
5109 && arg0 && TREE_THIS_VOLATILE (arg0));
5111 return t;
5114 tree
5115 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
5116 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
5118 bool constant, read_only, side_effects;
5119 tree t;
5121 gcc_assert (TREE_CODE_LENGTH (code) == 5);
5123 t = make_node (code PASS_MEM_STAT);
5124 TREE_TYPE (t) = tt;
5126 side_effects = TREE_SIDE_EFFECTS (t);
5128 PROCESS_ARG (0);
5129 PROCESS_ARG (1);
5130 PROCESS_ARG (2);
5131 PROCESS_ARG (3);
5132 PROCESS_ARG (4);
5134 TREE_SIDE_EFFECTS (t) = side_effects;
5135 if (code == TARGET_MEM_REF)
5137 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5139 tree o = TREE_OPERAND (arg0, 0);
5140 TREE_READONLY (t) = TREE_READONLY (o);
5141 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5144 else
5145 TREE_THIS_VOLATILE (t)
5146 = (TREE_CODE_CLASS (code) == tcc_reference
5147 && arg0 && TREE_THIS_VOLATILE (arg0));
5149 return t;
5152 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
5153 on the pointer PTR. */
5155 tree
5156 build_simple_mem_ref_loc (location_t loc, tree ptr)
5158 poly_int64 offset = 0;
5159 tree ptype = TREE_TYPE (ptr);
5160 tree tem;
5161 /* For convenience allow addresses that collapse to a simple base
5162 and offset. */
5163 if (TREE_CODE (ptr) == ADDR_EXPR
5164 && (handled_component_p (TREE_OPERAND (ptr, 0))
5165 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
5167 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
5168 gcc_assert (ptr);
5169 if (TREE_CODE (ptr) == MEM_REF)
5171 offset += mem_ref_offset (ptr).force_shwi ();
5172 ptr = TREE_OPERAND (ptr, 0);
5174 else
5175 ptr = build_fold_addr_expr (ptr);
5176 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
5178 tem = build2 (MEM_REF, TREE_TYPE (ptype),
5179 ptr, build_int_cst (ptype, offset));
5180 SET_EXPR_LOCATION (tem, loc);
5181 return tem;
5184 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
5186 poly_offset_int
5187 mem_ref_offset (const_tree t)
5189 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
5190 SIGNED);
5193 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
5194 offsetted by OFFSET units. */
5196 tree
5197 build_invariant_address (tree type, tree base, poly_int64 offset)
5199 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
5200 build_fold_addr_expr (base),
5201 build_int_cst (ptr_type_node, offset));
5202 tree addr = build1 (ADDR_EXPR, type, ref);
5203 recompute_tree_invariant_for_addr_expr (addr);
5204 return addr;
5207 /* Similar except don't specify the TREE_TYPE
5208 and leave the TREE_SIDE_EFFECTS as 0.
5209 It is permissible for arguments to be null,
5210 or even garbage if their values do not matter. */
5212 tree
5213 build_nt (enum tree_code code, ...)
5215 tree t;
5216 int length;
5217 int i;
5218 va_list p;
5220 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5222 va_start (p, code);
5224 t = make_node (code);
5225 length = TREE_CODE_LENGTH (code);
5227 for (i = 0; i < length; i++)
5228 TREE_OPERAND (t, i) = va_arg (p, tree);
5230 va_end (p);
5231 return t;
5234 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5235 tree vec. */
5237 tree
5238 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
5240 tree ret, t;
5241 unsigned int ix;
5243 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
5244 CALL_EXPR_FN (ret) = fn;
5245 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
5246 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
5247 CALL_EXPR_ARG (ret, ix) = t;
5248 return ret;
5251 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5252 and data type TYPE.
5253 We do NOT enter this node in any sort of symbol table.
5255 LOC is the location of the decl.
5257 layout_decl is used to set up the decl's storage layout.
5258 Other slots are initialized to 0 or null pointers. */
5260 tree
5261 build_decl (location_t loc, enum tree_code code, tree name,
5262 tree type MEM_STAT_DECL)
5264 tree t;
5266 t = make_node (code PASS_MEM_STAT);
5267 DECL_SOURCE_LOCATION (t) = loc;
5269 /* if (type == error_mark_node)
5270 type = integer_type_node; */
5271 /* That is not done, deliberately, so that having error_mark_node
5272 as the type can suppress useless errors in the use of this variable. */
5274 DECL_NAME (t) = name;
5275 TREE_TYPE (t) = type;
5277 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
5278 layout_decl (t, 0);
5280 return t;
5283 /* Create and return a DEBUG_EXPR_DECL node of the given TYPE. */
5285 tree
5286 build_debug_expr_decl (tree type)
5288 tree vexpr = make_node (DEBUG_EXPR_DECL);
5289 DECL_ARTIFICIAL (vexpr) = 1;
5290 TREE_TYPE (vexpr) = type;
5291 SET_DECL_MODE (vexpr, TYPE_MODE (type));
5292 return vexpr;
5295 /* Builds and returns function declaration with NAME and TYPE. */
5297 tree
5298 build_fn_decl (const char *name, tree type)
5300 tree id = get_identifier (name);
5301 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
5303 DECL_EXTERNAL (decl) = 1;
5304 TREE_PUBLIC (decl) = 1;
5305 DECL_ARTIFICIAL (decl) = 1;
5306 TREE_NOTHROW (decl) = 1;
5308 return decl;
5311 vec<tree, va_gc> *all_translation_units;
5313 /* Builds a new translation-unit decl with name NAME, queues it in the
5314 global list of translation-unit decls and returns it. */
5316 tree
5317 build_translation_unit_decl (tree name)
5319 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5320 name, NULL_TREE);
5321 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5322 vec_safe_push (all_translation_units, tu);
5323 return tu;
5327 /* BLOCK nodes are used to represent the structure of binding contours
5328 and declarations, once those contours have been exited and their contents
5329 compiled. This information is used for outputting debugging info. */
5331 tree
5332 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5334 tree block = make_node (BLOCK);
5336 BLOCK_VARS (block) = vars;
5337 BLOCK_SUBBLOCKS (block) = subblocks;
5338 BLOCK_SUPERCONTEXT (block) = supercontext;
5339 BLOCK_CHAIN (block) = chain;
5340 return block;
5344 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5346 LOC is the location to use in tree T. */
5348 void
5349 protected_set_expr_location (tree t, location_t loc)
5351 if (CAN_HAVE_LOCATION_P (t))
5352 SET_EXPR_LOCATION (t, loc);
5353 else if (t && TREE_CODE (t) == STATEMENT_LIST)
5355 t = expr_single (t);
5356 if (t && CAN_HAVE_LOCATION_P (t))
5357 SET_EXPR_LOCATION (t, loc);
5361 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
5362 UNKNOWN_LOCATION. */
5364 void
5365 protected_set_expr_location_if_unset (tree t, location_t loc)
5367 t = expr_single (t);
5368 if (t && !EXPR_HAS_LOCATION (t))
5369 protected_set_expr_location (t, loc);
5372 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5373 of the various TYPE_QUAL values. */
5375 static void
5376 set_type_quals (tree type, int type_quals)
5378 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
5379 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
5380 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
5381 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
5382 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
5385 /* Returns true iff CAND and BASE have equivalent language-specific
5386 qualifiers. */
5388 bool
5389 check_lang_type (const_tree cand, const_tree base)
5391 if (lang_hooks.types.type_hash_eq == NULL)
5392 return true;
5393 /* type_hash_eq currently only applies to these types. */
5394 if (TREE_CODE (cand) != FUNCTION_TYPE
5395 && TREE_CODE (cand) != METHOD_TYPE)
5396 return true;
5397 return lang_hooks.types.type_hash_eq (cand, base);
5400 /* This function checks to see if TYPE matches the size one of the built-in
5401 atomic types, and returns that core atomic type. */
5403 static tree
5404 find_atomic_core_type (const_tree type)
5406 tree base_atomic_type;
5408 /* Only handle complete types. */
5409 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
5410 return NULL_TREE;
5412 switch (tree_to_uhwi (TYPE_SIZE (type)))
5414 case 8:
5415 base_atomic_type = atomicQI_type_node;
5416 break;
5418 case 16:
5419 base_atomic_type = atomicHI_type_node;
5420 break;
5422 case 32:
5423 base_atomic_type = atomicSI_type_node;
5424 break;
5426 case 64:
5427 base_atomic_type = atomicDI_type_node;
5428 break;
5430 case 128:
5431 base_atomic_type = atomicTI_type_node;
5432 break;
5434 default:
5435 base_atomic_type = NULL_TREE;
5438 return base_atomic_type;
5441 /* Returns true iff unqualified CAND and BASE are equivalent. */
5443 bool
5444 check_base_type (const_tree cand, const_tree base)
5446 if (TYPE_NAME (cand) != TYPE_NAME (base)
5447 /* Apparently this is needed for Objective-C. */
5448 || TYPE_CONTEXT (cand) != TYPE_CONTEXT (base)
5449 || !attribute_list_equal (TYPE_ATTRIBUTES (cand),
5450 TYPE_ATTRIBUTES (base)))
5451 return false;
5452 /* Check alignment. */
5453 if (TYPE_ALIGN (cand) == TYPE_ALIGN (base)
5454 && TYPE_USER_ALIGN (cand) == TYPE_USER_ALIGN (base))
5455 return true;
5456 /* Atomic types increase minimal alignment. We must to do so as well
5457 or we get duplicated canonical types. See PR88686. */
5458 if ((TYPE_QUALS (cand) & TYPE_QUAL_ATOMIC))
5460 /* See if this object can map to a basic atomic type. */
5461 tree atomic_type = find_atomic_core_type (cand);
5462 if (atomic_type && TYPE_ALIGN (atomic_type) == TYPE_ALIGN (cand))
5463 return true;
5465 return false;
5468 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5470 bool
5471 check_qualified_type (const_tree cand, const_tree base, int type_quals)
5473 return (TYPE_QUALS (cand) == type_quals
5474 && check_base_type (cand, base)
5475 && check_lang_type (cand, base));
5478 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
5480 static bool
5481 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
5483 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
5484 && TYPE_NAME (cand) == TYPE_NAME (base)
5485 /* Apparently this is needed for Objective-C. */
5486 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5487 /* Check alignment. */
5488 && TYPE_ALIGN (cand) == align
5489 /* Check this is a user-aligned type as build_aligned_type
5490 would create. */
5491 && TYPE_USER_ALIGN (cand)
5492 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5493 TYPE_ATTRIBUTES (base))
5494 && check_lang_type (cand, base));
5497 /* Return a version of the TYPE, qualified as indicated by the
5498 TYPE_QUALS, if one exists. If no qualified version exists yet,
5499 return NULL_TREE. */
5501 tree
5502 get_qualified_type (tree type, int type_quals)
5504 if (TYPE_QUALS (type) == type_quals)
5505 return type;
5507 tree mv = TYPE_MAIN_VARIANT (type);
5508 if (check_qualified_type (mv, type, type_quals))
5509 return mv;
5511 /* Search the chain of variants to see if there is already one there just
5512 like the one we need to have. If so, use that existing one. We must
5513 preserve the TYPE_NAME, since there is code that depends on this. */
5514 for (tree *tp = &TYPE_NEXT_VARIANT (mv); *tp; tp = &TYPE_NEXT_VARIANT (*tp))
5515 if (check_qualified_type (*tp, type, type_quals))
5517 /* Put the found variant at the head of the variant list so
5518 frequently searched variants get found faster. The C++ FE
5519 benefits greatly from this. */
5520 tree t = *tp;
5521 *tp = TYPE_NEXT_VARIANT (t);
5522 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
5523 TYPE_NEXT_VARIANT (mv) = t;
5524 return t;
5527 return NULL_TREE;
5530 /* Like get_qualified_type, but creates the type if it does not
5531 exist. This function never returns NULL_TREE. */
5533 tree
5534 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
5536 tree t;
5538 /* See if we already have the appropriate qualified variant. */
5539 t = get_qualified_type (type, type_quals);
5541 /* If not, build it. */
5542 if (!t)
5544 t = build_variant_type_copy (type PASS_MEM_STAT);
5545 set_type_quals (t, type_quals);
5547 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
5549 /* See if this object can map to a basic atomic type. */
5550 tree atomic_type = find_atomic_core_type (type);
5551 if (atomic_type)
5553 /* Ensure the alignment of this type is compatible with
5554 the required alignment of the atomic type. */
5555 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
5556 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
5560 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5561 /* Propagate structural equality. */
5562 SET_TYPE_STRUCTURAL_EQUALITY (t);
5563 else if (TYPE_CANONICAL (type) != type)
5564 /* Build the underlying canonical type, since it is different
5565 from TYPE. */
5567 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
5568 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
5570 else
5571 /* T is its own canonical type. */
5572 TYPE_CANONICAL (t) = t;
5576 return t;
5579 /* Create a variant of type T with alignment ALIGN. */
5581 tree
5582 build_aligned_type (tree type, unsigned int align)
5584 tree t;
5586 if (TYPE_PACKED (type)
5587 || TYPE_ALIGN (type) == align)
5588 return type;
5590 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5591 if (check_aligned_type (t, type, align))
5592 return t;
5594 t = build_variant_type_copy (type);
5595 SET_TYPE_ALIGN (t, align);
5596 TYPE_USER_ALIGN (t) = 1;
5598 return t;
5601 /* Create a new distinct copy of TYPE. The new type is made its own
5602 MAIN_VARIANT. If TYPE requires structural equality checks, the
5603 resulting type requires structural equality checks; otherwise, its
5604 TYPE_CANONICAL points to itself. */
5606 tree
5607 build_distinct_type_copy (tree type MEM_STAT_DECL)
5609 tree t = copy_node (type PASS_MEM_STAT);
5611 TYPE_POINTER_TO (t) = 0;
5612 TYPE_REFERENCE_TO (t) = 0;
5614 /* Set the canonical type either to a new equivalence class, or
5615 propagate the need for structural equality checks. */
5616 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5617 SET_TYPE_STRUCTURAL_EQUALITY (t);
5618 else
5619 TYPE_CANONICAL (t) = t;
5621 /* Make it its own variant. */
5622 TYPE_MAIN_VARIANT (t) = t;
5623 TYPE_NEXT_VARIANT (t) = 0;
5625 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
5626 whose TREE_TYPE is not t. This can also happen in the Ada
5627 frontend when using subtypes. */
5629 return t;
5632 /* Create a new variant of TYPE, equivalent but distinct. This is so
5633 the caller can modify it. TYPE_CANONICAL for the return type will
5634 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
5635 are considered equal by the language itself (or that both types
5636 require structural equality checks). */
5638 tree
5639 build_variant_type_copy (tree type MEM_STAT_DECL)
5641 tree t, m = TYPE_MAIN_VARIANT (type);
5643 t = build_distinct_type_copy (type PASS_MEM_STAT);
5645 /* Since we're building a variant, assume that it is a non-semantic
5646 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
5647 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
5648 /* Type variants have no alias set defined. */
5649 TYPE_ALIAS_SET (t) = -1;
5651 /* Add the new type to the chain of variants of TYPE. */
5652 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
5653 TYPE_NEXT_VARIANT (m) = t;
5654 TYPE_MAIN_VARIANT (t) = m;
5656 return t;
5659 /* Return true if the from tree in both tree maps are equal. */
5662 tree_map_base_eq (const void *va, const void *vb)
5664 const struct tree_map_base *const a = (const struct tree_map_base *) va,
5665 *const b = (const struct tree_map_base *) vb;
5666 return (a->from == b->from);
5669 /* Hash a from tree in a tree_base_map. */
5671 unsigned int
5672 tree_map_base_hash (const void *item)
5674 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
5677 /* Return true if this tree map structure is marked for garbage collection
5678 purposes. We simply return true if the from tree is marked, so that this
5679 structure goes away when the from tree goes away. */
5682 tree_map_base_marked_p (const void *p)
5684 return ggc_marked_p (((const struct tree_map_base *) p)->from);
5687 /* Hash a from tree in a tree_map. */
5689 unsigned int
5690 tree_map_hash (const void *item)
5692 return (((const struct tree_map *) item)->hash);
5695 /* Hash a from tree in a tree_decl_map. */
5697 unsigned int
5698 tree_decl_map_hash (const void *item)
5700 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
5703 /* Return the initialization priority for DECL. */
5705 priority_type
5706 decl_init_priority_lookup (tree decl)
5708 symtab_node *snode = symtab_node::get (decl);
5710 if (!snode)
5711 return DEFAULT_INIT_PRIORITY;
5712 return
5713 snode->get_init_priority ();
5716 /* Return the finalization priority for DECL. */
5718 priority_type
5719 decl_fini_priority_lookup (tree decl)
5721 cgraph_node *node = cgraph_node::get (decl);
5723 if (!node)
5724 return DEFAULT_INIT_PRIORITY;
5725 return
5726 node->get_fini_priority ();
5729 /* Set the initialization priority for DECL to PRIORITY. */
5731 void
5732 decl_init_priority_insert (tree decl, priority_type priority)
5734 struct symtab_node *snode;
5736 if (priority == DEFAULT_INIT_PRIORITY)
5738 snode = symtab_node::get (decl);
5739 if (!snode)
5740 return;
5742 else if (VAR_P (decl))
5743 snode = varpool_node::get_create (decl);
5744 else
5745 snode = cgraph_node::get_create (decl);
5746 snode->set_init_priority (priority);
5749 /* Set the finalization priority for DECL to PRIORITY. */
5751 void
5752 decl_fini_priority_insert (tree decl, priority_type priority)
5754 struct cgraph_node *node;
5756 if (priority == DEFAULT_INIT_PRIORITY)
5758 node = cgraph_node::get (decl);
5759 if (!node)
5760 return;
5762 else
5763 node = cgraph_node::get_create (decl);
5764 node->set_fini_priority (priority);
5767 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
5769 static void
5770 print_debug_expr_statistics (void)
5772 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
5773 (long) debug_expr_for_decl->size (),
5774 (long) debug_expr_for_decl->elements (),
5775 debug_expr_for_decl->collisions ());
5778 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
5780 static void
5781 print_value_expr_statistics (void)
5783 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
5784 (long) value_expr_for_decl->size (),
5785 (long) value_expr_for_decl->elements (),
5786 value_expr_for_decl->collisions ());
5789 /* Lookup a debug expression for FROM, and return it if we find one. */
5791 tree
5792 decl_debug_expr_lookup (tree from)
5794 struct tree_decl_map *h, in;
5795 in.base.from = from;
5797 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5798 if (h)
5799 return h->to;
5800 return NULL_TREE;
5803 /* Insert a mapping FROM->TO in the debug expression hashtable. */
5805 void
5806 decl_debug_expr_insert (tree from, tree to)
5808 struct tree_decl_map *h;
5810 h = ggc_alloc<tree_decl_map> ();
5811 h->base.from = from;
5812 h->to = to;
5813 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5816 /* Lookup a value expression for FROM, and return it if we find one. */
5818 tree
5819 decl_value_expr_lookup (tree from)
5821 struct tree_decl_map *h, in;
5822 in.base.from = from;
5824 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5825 if (h)
5826 return h->to;
5827 return NULL_TREE;
5830 /* Insert a mapping FROM->TO in the value expression hashtable. */
5832 void
5833 decl_value_expr_insert (tree from, tree to)
5835 struct tree_decl_map *h;
5837 h = ggc_alloc<tree_decl_map> ();
5838 h->base.from = from;
5839 h->to = to;
5840 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5843 /* Lookup a vector of debug arguments for FROM, and return it if we
5844 find one. */
5846 vec<tree, va_gc> **
5847 decl_debug_args_lookup (tree from)
5849 struct tree_vec_map *h, in;
5851 if (!DECL_HAS_DEBUG_ARGS_P (from))
5852 return NULL;
5853 gcc_checking_assert (debug_args_for_decl != NULL);
5854 in.base.from = from;
5855 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
5856 if (h)
5857 return &h->to;
5858 return NULL;
5861 /* Insert a mapping FROM->empty vector of debug arguments in the value
5862 expression hashtable. */
5864 vec<tree, va_gc> **
5865 decl_debug_args_insert (tree from)
5867 struct tree_vec_map *h;
5868 tree_vec_map **loc;
5870 if (DECL_HAS_DEBUG_ARGS_P (from))
5871 return decl_debug_args_lookup (from);
5872 if (debug_args_for_decl == NULL)
5873 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
5874 h = ggc_alloc<tree_vec_map> ();
5875 h->base.from = from;
5876 h->to = NULL;
5877 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
5878 *loc = h;
5879 DECL_HAS_DEBUG_ARGS_P (from) = 1;
5880 return &h->to;
5883 /* Hashing of types so that we don't make duplicates.
5884 The entry point is `type_hash_canon'. */
5886 /* Generate the default hash code for TYPE. This is designed for
5887 speed, rather than maximum entropy. */
5889 hashval_t
5890 type_hash_canon_hash (tree type)
5892 inchash::hash hstate;
5894 hstate.add_int (TREE_CODE (type));
5896 if (TREE_TYPE (type))
5897 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
5899 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
5900 /* Just the identifier is adequate to distinguish. */
5901 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
5903 switch (TREE_CODE (type))
5905 case METHOD_TYPE:
5906 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
5907 /* FALLTHROUGH. */
5908 case FUNCTION_TYPE:
5909 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
5910 if (TREE_VALUE (t) != error_mark_node)
5911 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
5912 break;
5914 case OFFSET_TYPE:
5915 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
5916 break;
5918 case ARRAY_TYPE:
5920 if (TYPE_DOMAIN (type))
5921 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
5922 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
5924 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
5925 hstate.add_object (typeless);
5928 break;
5930 case INTEGER_TYPE:
5932 tree t = TYPE_MAX_VALUE (type);
5933 if (!t)
5934 t = TYPE_MIN_VALUE (type);
5935 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
5936 hstate.add_object (TREE_INT_CST_ELT (t, i));
5937 break;
5940 case REAL_TYPE:
5941 case FIXED_POINT_TYPE:
5943 unsigned prec = TYPE_PRECISION (type);
5944 hstate.add_object (prec);
5945 break;
5948 case VECTOR_TYPE:
5949 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
5950 break;
5952 default:
5953 break;
5956 return hstate.end ();
5959 /* These are the Hashtable callback functions. */
5961 /* Returns true iff the types are equivalent. */
5963 bool
5964 type_cache_hasher::equal (type_hash *a, type_hash *b)
5966 /* First test the things that are the same for all types. */
5967 if (a->hash != b->hash
5968 || TREE_CODE (a->type) != TREE_CODE (b->type)
5969 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
5970 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
5971 TYPE_ATTRIBUTES (b->type))
5972 || (TREE_CODE (a->type) != COMPLEX_TYPE
5973 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
5974 return 0;
5976 /* Be careful about comparing arrays before and after the element type
5977 has been completed; don't compare TYPE_ALIGN unless both types are
5978 complete. */
5979 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
5980 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
5981 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
5982 return 0;
5984 switch (TREE_CODE (a->type))
5986 case VOID_TYPE:
5987 case OPAQUE_TYPE:
5988 case COMPLEX_TYPE:
5989 case POINTER_TYPE:
5990 case REFERENCE_TYPE:
5991 case NULLPTR_TYPE:
5992 return 1;
5994 case VECTOR_TYPE:
5995 return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
5996 TYPE_VECTOR_SUBPARTS (b->type));
5998 case ENUMERAL_TYPE:
5999 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6000 && !(TYPE_VALUES (a->type)
6001 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6002 && TYPE_VALUES (b->type)
6003 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6004 && type_list_equal (TYPE_VALUES (a->type),
6005 TYPE_VALUES (b->type))))
6006 return 0;
6008 /* fall through */
6010 case INTEGER_TYPE:
6011 case REAL_TYPE:
6012 case BOOLEAN_TYPE:
6013 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6014 return false;
6015 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6016 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6017 TYPE_MAX_VALUE (b->type)))
6018 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6019 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6020 TYPE_MIN_VALUE (b->type))));
6022 case FIXED_POINT_TYPE:
6023 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6025 case OFFSET_TYPE:
6026 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6028 case METHOD_TYPE:
6029 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6030 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6031 || (TYPE_ARG_TYPES (a->type)
6032 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6033 && TYPE_ARG_TYPES (b->type)
6034 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6035 && type_list_equal (TYPE_ARG_TYPES (a->type),
6036 TYPE_ARG_TYPES (b->type)))))
6037 break;
6038 return 0;
6039 case ARRAY_TYPE:
6040 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6041 where the flag should be inherited from the element type
6042 and can change after ARRAY_TYPEs are created; on non-aggregates
6043 compare it and hash it, scalars will never have that flag set
6044 and we need to differentiate between arrays created by different
6045 front-ends or middle-end created arrays. */
6046 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
6047 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
6048 || (TYPE_TYPELESS_STORAGE (a->type)
6049 == TYPE_TYPELESS_STORAGE (b->type))));
6051 case RECORD_TYPE:
6052 case UNION_TYPE:
6053 case QUAL_UNION_TYPE:
6054 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6055 || (TYPE_FIELDS (a->type)
6056 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6057 && TYPE_FIELDS (b->type)
6058 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6059 && type_list_equal (TYPE_FIELDS (a->type),
6060 TYPE_FIELDS (b->type))));
6062 case FUNCTION_TYPE:
6063 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6064 || (TYPE_ARG_TYPES (a->type)
6065 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6066 && TYPE_ARG_TYPES (b->type)
6067 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6068 && type_list_equal (TYPE_ARG_TYPES (a->type),
6069 TYPE_ARG_TYPES (b->type))))
6070 break;
6071 return 0;
6073 default:
6074 return 0;
6077 if (lang_hooks.types.type_hash_eq != NULL)
6078 return lang_hooks.types.type_hash_eq (a->type, b->type);
6080 return 1;
6083 /* Given TYPE, and HASHCODE its hash code, return the canonical
6084 object for an identical type if one already exists.
6085 Otherwise, return TYPE, and record it as the canonical object.
6087 To use this function, first create a type of the sort you want.
6088 Then compute its hash code from the fields of the type that
6089 make it different from other similar types.
6090 Then call this function and use the value. */
6092 tree
6093 type_hash_canon (unsigned int hashcode, tree type)
6095 type_hash in;
6096 type_hash **loc;
6098 /* The hash table only contains main variants, so ensure that's what we're
6099 being passed. */
6100 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6102 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6103 must call that routine before comparing TYPE_ALIGNs. */
6104 layout_type (type);
6106 in.hash = hashcode;
6107 in.type = type;
6109 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6110 if (*loc)
6112 tree t1 = ((type_hash *) *loc)->type;
6113 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
6114 && t1 != type);
6115 if (TYPE_UID (type) + 1 == next_type_uid)
6116 --next_type_uid;
6117 /* Free also min/max values and the cache for integer
6118 types. This can't be done in free_node, as LTO frees
6119 those on its own. */
6120 if (TREE_CODE (type) == INTEGER_TYPE)
6122 if (TYPE_MIN_VALUE (type)
6123 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
6125 /* Zero is always in TYPE_CACHED_VALUES. */
6126 if (! TYPE_UNSIGNED (type))
6127 int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
6128 ggc_free (TYPE_MIN_VALUE (type));
6130 if (TYPE_MAX_VALUE (type)
6131 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
6133 int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
6134 ggc_free (TYPE_MAX_VALUE (type));
6136 if (TYPE_CACHED_VALUES_P (type))
6137 ggc_free (TYPE_CACHED_VALUES (type));
6139 free_node (type);
6140 return t1;
6142 else
6144 struct type_hash *h;
6146 h = ggc_alloc<type_hash> ();
6147 h->hash = hashcode;
6148 h->type = type;
6149 *loc = h;
6151 return type;
6155 static void
6156 print_type_hash_statistics (void)
6158 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6159 (long) type_hash_table->size (),
6160 (long) type_hash_table->elements (),
6161 type_hash_table->collisions ());
6164 /* Given two lists of types
6165 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6166 return 1 if the lists contain the same types in the same order.
6167 Also, the TREE_PURPOSEs must match. */
6169 bool
6170 type_list_equal (const_tree l1, const_tree l2)
6172 const_tree t1, t2;
6174 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6175 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6176 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6177 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6178 && (TREE_TYPE (TREE_PURPOSE (t1))
6179 == TREE_TYPE (TREE_PURPOSE (t2))))))
6180 return false;
6182 return t1 == t2;
6185 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6186 given by TYPE. If the argument list accepts variable arguments,
6187 then this function counts only the ordinary arguments. */
6190 type_num_arguments (const_tree fntype)
6192 int i = 0;
6194 for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
6195 /* If the function does not take a variable number of arguments,
6196 the last element in the list will have type `void'. */
6197 if (VOID_TYPE_P (TREE_VALUE (t)))
6198 break;
6199 else
6200 ++i;
6202 return i;
6205 /* Return the type of the function TYPE's argument ARGNO if known.
6206 For vararg function's where ARGNO refers to one of the variadic
6207 arguments return null. Otherwise, return a void_type_node for
6208 out-of-bounds ARGNO. */
6210 tree
6211 type_argument_type (const_tree fntype, unsigned argno)
6213 /* Treat zero the same as an out-of-bounds argument number. */
6214 if (!argno)
6215 return void_type_node;
6217 function_args_iterator iter;
6219 tree argtype;
6220 unsigned i = 1;
6221 FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
6223 /* A vararg function's argument list ends in a null. Otherwise,
6224 an ordinary function's argument list ends with void. Return
6225 null if ARGNO refers to a vararg argument, void_type_node if
6226 it's out of bounds, and the formal argument type otherwise. */
6227 if (!argtype)
6228 break;
6230 if (i == argno || VOID_TYPE_P (argtype))
6231 return argtype;
6233 ++i;
6236 return NULL_TREE;
6239 /* Nonzero if integer constants T1 and T2
6240 represent the same constant value. */
6243 tree_int_cst_equal (const_tree t1, const_tree t2)
6245 if (t1 == t2)
6246 return 1;
6248 if (t1 == 0 || t2 == 0)
6249 return 0;
6251 STRIP_ANY_LOCATION_WRAPPER (t1);
6252 STRIP_ANY_LOCATION_WRAPPER (t2);
6254 if (TREE_CODE (t1) == INTEGER_CST
6255 && TREE_CODE (t2) == INTEGER_CST
6256 && wi::to_widest (t1) == wi::to_widest (t2))
6257 return 1;
6259 return 0;
6262 /* Return true if T is an INTEGER_CST whose numerical value (extended
6263 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6265 bool
6266 tree_fits_shwi_p (const_tree t)
6268 return (t != NULL_TREE
6269 && TREE_CODE (t) == INTEGER_CST
6270 && wi::fits_shwi_p (wi::to_widest (t)));
6273 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6274 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
6276 bool
6277 tree_fits_poly_int64_p (const_tree t)
6279 if (t == NULL_TREE)
6280 return false;
6281 if (POLY_INT_CST_P (t))
6283 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6284 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
6285 return false;
6286 return true;
6288 return (TREE_CODE (t) == INTEGER_CST
6289 && wi::fits_shwi_p (wi::to_widest (t)));
6292 /* Return true if T is an INTEGER_CST whose numerical value (extended
6293 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6295 bool
6296 tree_fits_uhwi_p (const_tree t)
6298 return (t != NULL_TREE
6299 && TREE_CODE (t) == INTEGER_CST
6300 && wi::fits_uhwi_p (wi::to_widest (t)));
6303 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6304 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
6306 bool
6307 tree_fits_poly_uint64_p (const_tree t)
6309 if (t == NULL_TREE)
6310 return false;
6311 if (POLY_INT_CST_P (t))
6313 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6314 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
6315 return false;
6316 return true;
6318 return (TREE_CODE (t) == INTEGER_CST
6319 && wi::fits_uhwi_p (wi::to_widest (t)));
6322 /* T is an INTEGER_CST whose numerical value (extended according to
6323 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6324 HOST_WIDE_INT. */
6326 HOST_WIDE_INT
6327 tree_to_shwi (const_tree t)
6329 gcc_assert (tree_fits_shwi_p (t));
6330 return TREE_INT_CST_LOW (t);
6333 /* T is an INTEGER_CST whose numerical value (extended according to
6334 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6335 HOST_WIDE_INT. */
6337 unsigned HOST_WIDE_INT
6338 tree_to_uhwi (const_tree t)
6340 gcc_assert (tree_fits_uhwi_p (t));
6341 return TREE_INT_CST_LOW (t);
6344 /* Return the most significant (sign) bit of T. */
6347 tree_int_cst_sign_bit (const_tree t)
6349 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
6351 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
6354 /* Return an indication of the sign of the integer constant T.
6355 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6356 Note that -1 will never be returned if T's type is unsigned. */
6359 tree_int_cst_sgn (const_tree t)
6361 if (wi::to_wide (t) == 0)
6362 return 0;
6363 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
6364 return 1;
6365 else if (wi::neg_p (wi::to_wide (t)))
6366 return -1;
6367 else
6368 return 1;
6371 /* Return the minimum number of bits needed to represent VALUE in a
6372 signed or unsigned type, UNSIGNEDP says which. */
6374 unsigned int
6375 tree_int_cst_min_precision (tree value, signop sgn)
6377 /* If the value is negative, compute its negative minus 1. The latter
6378 adjustment is because the absolute value of the largest negative value
6379 is one larger than the largest positive value. This is equivalent to
6380 a bit-wise negation, so use that operation instead. */
6382 if (tree_int_cst_sgn (value) < 0)
6383 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
6385 /* Return the number of bits needed, taking into account the fact
6386 that we need one more bit for a signed than unsigned type.
6387 If value is 0 or -1, the minimum precision is 1 no matter
6388 whether unsignedp is true or false. */
6390 if (integer_zerop (value))
6391 return 1;
6392 else
6393 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
6396 /* Return truthvalue of whether T1 is the same tree structure as T2.
6397 Return 1 if they are the same.
6398 Return 0 if they are understandably different.
6399 Return -1 if either contains tree structure not understood by
6400 this function. */
6403 simple_cst_equal (const_tree t1, const_tree t2)
6405 enum tree_code code1, code2;
6406 int cmp;
6407 int i;
6409 if (t1 == t2)
6410 return 1;
6411 if (t1 == 0 || t2 == 0)
6412 return 0;
6414 /* For location wrappers to be the same, they must be at the same
6415 source location (and wrap the same thing). */
6416 if (location_wrapper_p (t1) && location_wrapper_p (t2))
6418 if (EXPR_LOCATION (t1) != EXPR_LOCATION (t2))
6419 return 0;
6420 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6423 code1 = TREE_CODE (t1);
6424 code2 = TREE_CODE (t2);
6426 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
6428 if (CONVERT_EXPR_CODE_P (code2)
6429 || code2 == NON_LVALUE_EXPR)
6430 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6431 else
6432 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
6435 else if (CONVERT_EXPR_CODE_P (code2)
6436 || code2 == NON_LVALUE_EXPR)
6437 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
6439 if (code1 != code2)
6440 return 0;
6442 switch (code1)
6444 case INTEGER_CST:
6445 return wi::to_widest (t1) == wi::to_widest (t2);
6447 case REAL_CST:
6448 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
6450 case FIXED_CST:
6451 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
6453 case STRING_CST:
6454 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
6455 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
6456 TREE_STRING_LENGTH (t1)));
6458 case CONSTRUCTOR:
6460 unsigned HOST_WIDE_INT idx;
6461 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
6462 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
6464 if (vec_safe_length (v1) != vec_safe_length (v2))
6465 return false;
6467 for (idx = 0; idx < vec_safe_length (v1); ++idx)
6468 /* ??? Should we handle also fields here? */
6469 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
6470 return false;
6471 return true;
6474 case SAVE_EXPR:
6475 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6477 case CALL_EXPR:
6478 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
6479 if (cmp <= 0)
6480 return cmp;
6481 if (call_expr_nargs (t1) != call_expr_nargs (t2))
6482 return 0;
6484 const_tree arg1, arg2;
6485 const_call_expr_arg_iterator iter1, iter2;
6486 for (arg1 = first_const_call_expr_arg (t1, &iter1),
6487 arg2 = first_const_call_expr_arg (t2, &iter2);
6488 arg1 && arg2;
6489 arg1 = next_const_call_expr_arg (&iter1),
6490 arg2 = next_const_call_expr_arg (&iter2))
6492 cmp = simple_cst_equal (arg1, arg2);
6493 if (cmp <= 0)
6494 return cmp;
6496 return arg1 == arg2;
6499 case TARGET_EXPR:
6500 /* Special case: if either target is an unallocated VAR_DECL,
6501 it means that it's going to be unified with whatever the
6502 TARGET_EXPR is really supposed to initialize, so treat it
6503 as being equivalent to anything. */
6504 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
6505 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
6506 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
6507 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
6508 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
6509 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
6510 cmp = 1;
6511 else
6512 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6514 if (cmp <= 0)
6515 return cmp;
6517 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
6519 case WITH_CLEANUP_EXPR:
6520 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6521 if (cmp <= 0)
6522 return cmp;
6524 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
6526 case COMPONENT_REF:
6527 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
6528 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6530 return 0;
6532 case VAR_DECL:
6533 case PARM_DECL:
6534 case CONST_DECL:
6535 case FUNCTION_DECL:
6536 return 0;
6538 default:
6539 if (POLY_INT_CST_P (t1))
6540 /* A false return means maybe_ne rather than known_ne. */
6541 return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
6542 TYPE_SIGN (TREE_TYPE (t1))),
6543 poly_widest_int::from (poly_int_cst_value (t2),
6544 TYPE_SIGN (TREE_TYPE (t2))));
6545 break;
6548 /* This general rule works for most tree codes. All exceptions should be
6549 handled above. If this is a language-specific tree code, we can't
6550 trust what might be in the operand, so say we don't know
6551 the situation. */
6552 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
6553 return -1;
6555 switch (TREE_CODE_CLASS (code1))
6557 case tcc_unary:
6558 case tcc_binary:
6559 case tcc_comparison:
6560 case tcc_expression:
6561 case tcc_reference:
6562 case tcc_statement:
6563 cmp = 1;
6564 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
6566 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
6567 if (cmp <= 0)
6568 return cmp;
6571 return cmp;
6573 default:
6574 return -1;
6578 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
6579 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
6580 than U, respectively. */
6583 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
6585 if (tree_int_cst_sgn (t) < 0)
6586 return -1;
6587 else if (!tree_fits_uhwi_p (t))
6588 return 1;
6589 else if (TREE_INT_CST_LOW (t) == u)
6590 return 0;
6591 else if (TREE_INT_CST_LOW (t) < u)
6592 return -1;
6593 else
6594 return 1;
6597 /* Return true if SIZE represents a constant size that is in bounds of
6598 what the middle-end and the backend accepts (covering not more than
6599 half of the address-space).
6600 When PERR is non-null, set *PERR on failure to the description of
6601 why SIZE is not valid. */
6603 bool
6604 valid_constant_size_p (const_tree size, cst_size_error *perr /* = NULL */)
6606 if (POLY_INT_CST_P (size))
6608 if (TREE_OVERFLOW (size))
6609 return false;
6610 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
6611 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
6612 return false;
6613 return true;
6616 cst_size_error error;
6617 if (!perr)
6618 perr = &error;
6620 if (TREE_CODE (size) != INTEGER_CST)
6622 *perr = cst_size_not_constant;
6623 return false;
6626 if (TREE_OVERFLOW_P (size))
6628 *perr = cst_size_overflow;
6629 return false;
6632 if (tree_int_cst_sgn (size) < 0)
6634 *perr = cst_size_negative;
6635 return false;
6637 if (!tree_fits_uhwi_p (size)
6638 || (wi::to_widest (TYPE_MAX_VALUE (sizetype))
6639 < wi::to_widest (size) * 2))
6641 *perr = cst_size_too_big;
6642 return false;
6645 return true;
6648 /* Return the precision of the type, or for a complex or vector type the
6649 precision of the type of its elements. */
6651 unsigned int
6652 element_precision (const_tree type)
6654 if (!TYPE_P (type))
6655 type = TREE_TYPE (type);
6656 enum tree_code code = TREE_CODE (type);
6657 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
6658 type = TREE_TYPE (type);
6660 return TYPE_PRECISION (type);
6663 /* Return true if CODE represents an associative tree code. Otherwise
6664 return false. */
6665 bool
6666 associative_tree_code (enum tree_code code)
6668 switch (code)
6670 case BIT_IOR_EXPR:
6671 case BIT_AND_EXPR:
6672 case BIT_XOR_EXPR:
6673 case PLUS_EXPR:
6674 case MULT_EXPR:
6675 case MIN_EXPR:
6676 case MAX_EXPR:
6677 return true;
6679 default:
6680 break;
6682 return false;
6685 /* Return true if CODE represents a commutative tree code. Otherwise
6686 return false. */
6687 bool
6688 commutative_tree_code (enum tree_code code)
6690 switch (code)
6692 case PLUS_EXPR:
6693 case MULT_EXPR:
6694 case MULT_HIGHPART_EXPR:
6695 case MIN_EXPR:
6696 case MAX_EXPR:
6697 case BIT_IOR_EXPR:
6698 case BIT_XOR_EXPR:
6699 case BIT_AND_EXPR:
6700 case NE_EXPR:
6701 case EQ_EXPR:
6702 case UNORDERED_EXPR:
6703 case ORDERED_EXPR:
6704 case UNEQ_EXPR:
6705 case LTGT_EXPR:
6706 case TRUTH_AND_EXPR:
6707 case TRUTH_XOR_EXPR:
6708 case TRUTH_OR_EXPR:
6709 case WIDEN_MULT_EXPR:
6710 case VEC_WIDEN_MULT_HI_EXPR:
6711 case VEC_WIDEN_MULT_LO_EXPR:
6712 case VEC_WIDEN_MULT_EVEN_EXPR:
6713 case VEC_WIDEN_MULT_ODD_EXPR:
6714 return true;
6716 default:
6717 break;
6719 return false;
6722 /* Return true if CODE represents a ternary tree code for which the
6723 first two operands are commutative. Otherwise return false. */
6724 bool
6725 commutative_ternary_tree_code (enum tree_code code)
6727 switch (code)
6729 case WIDEN_MULT_PLUS_EXPR:
6730 case WIDEN_MULT_MINUS_EXPR:
6731 case DOT_PROD_EXPR:
6732 return true;
6734 default:
6735 break;
6737 return false;
6740 /* Returns true if CODE can overflow. */
6742 bool
6743 operation_can_overflow (enum tree_code code)
6745 switch (code)
6747 case PLUS_EXPR:
6748 case MINUS_EXPR:
6749 case MULT_EXPR:
6750 case LSHIFT_EXPR:
6751 /* Can overflow in various ways. */
6752 return true;
6753 case TRUNC_DIV_EXPR:
6754 case EXACT_DIV_EXPR:
6755 case FLOOR_DIV_EXPR:
6756 case CEIL_DIV_EXPR:
6757 /* For INT_MIN / -1. */
6758 return true;
6759 case NEGATE_EXPR:
6760 case ABS_EXPR:
6761 /* For -INT_MIN. */
6762 return true;
6763 default:
6764 /* These operators cannot overflow. */
6765 return false;
6769 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
6770 ftrapv doesn't generate trapping insns for CODE. */
6772 bool
6773 operation_no_trapping_overflow (tree type, enum tree_code code)
6775 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
6777 /* We don't generate instructions that trap on overflow for complex or vector
6778 types. */
6779 if (!INTEGRAL_TYPE_P (type))
6780 return true;
6782 if (!TYPE_OVERFLOW_TRAPS (type))
6783 return true;
6785 switch (code)
6787 case PLUS_EXPR:
6788 case MINUS_EXPR:
6789 case MULT_EXPR:
6790 case NEGATE_EXPR:
6791 case ABS_EXPR:
6792 /* These operators can overflow, and -ftrapv generates trapping code for
6793 these. */
6794 return false;
6795 case TRUNC_DIV_EXPR:
6796 case EXACT_DIV_EXPR:
6797 case FLOOR_DIV_EXPR:
6798 case CEIL_DIV_EXPR:
6799 case LSHIFT_EXPR:
6800 /* These operators can overflow, but -ftrapv does not generate trapping
6801 code for these. */
6802 return true;
6803 default:
6804 /* These operators cannot overflow. */
6805 return true;
6809 /* Constructors for pointer, array and function types.
6810 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
6811 constructed by language-dependent code, not here.) */
6813 /* Construct, lay out and return the type of pointers to TO_TYPE with
6814 mode MODE. If MODE is VOIDmode, a pointer mode for the address
6815 space of TO_TYPE will be picked. If CAN_ALIAS_ALL is TRUE,
6816 indicate this type can reference all of memory. If such a type has
6817 already been constructed, reuse it. */
6819 tree
6820 build_pointer_type_for_mode (tree to_type, machine_mode mode,
6821 bool can_alias_all)
6823 tree t;
6824 bool could_alias = can_alias_all;
6826 if (to_type == error_mark_node)
6827 return error_mark_node;
6829 if (mode == VOIDmode)
6831 addr_space_t as = TYPE_ADDR_SPACE (to_type);
6832 mode = targetm.addr_space.pointer_mode (as);
6835 /* If the pointed-to type has the may_alias attribute set, force
6836 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6837 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
6838 can_alias_all = true;
6840 /* In some cases, languages will have things that aren't a POINTER_TYPE
6841 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
6842 In that case, return that type without regard to the rest of our
6843 operands.
6845 ??? This is a kludge, but consistent with the way this function has
6846 always operated and there doesn't seem to be a good way to avoid this
6847 at the moment. */
6848 if (TYPE_POINTER_TO (to_type) != 0
6849 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
6850 return TYPE_POINTER_TO (to_type);
6852 /* First, if we already have a type for pointers to TO_TYPE and it's
6853 the proper mode, use it. */
6854 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
6855 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
6856 return t;
6858 t = make_node (POINTER_TYPE);
6860 TREE_TYPE (t) = to_type;
6861 SET_TYPE_MODE (t, mode);
6862 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
6863 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
6864 TYPE_POINTER_TO (to_type) = t;
6866 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6867 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
6868 SET_TYPE_STRUCTURAL_EQUALITY (t);
6869 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
6870 TYPE_CANONICAL (t)
6871 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
6872 mode, false);
6874 /* Lay out the type. This function has many callers that are concerned
6875 with expression-construction, and this simplifies them all. */
6876 layout_type (t);
6878 return t;
6881 /* By default build pointers in ptr_mode. */
6883 tree
6884 build_pointer_type (tree to_type)
6886 return build_pointer_type_for_mode (to_type, VOIDmode, false);
6889 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
6891 tree
6892 build_reference_type_for_mode (tree to_type, machine_mode mode,
6893 bool can_alias_all)
6895 tree t;
6896 bool could_alias = can_alias_all;
6898 if (to_type == error_mark_node)
6899 return error_mark_node;
6901 if (mode == VOIDmode)
6903 addr_space_t as = TYPE_ADDR_SPACE (to_type);
6904 mode = targetm.addr_space.pointer_mode (as);
6907 /* If the pointed-to type has the may_alias attribute set, force
6908 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6909 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
6910 can_alias_all = true;
6912 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
6913 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
6914 In that case, return that type without regard to the rest of our
6915 operands.
6917 ??? This is a kludge, but consistent with the way this function has
6918 always operated and there doesn't seem to be a good way to avoid this
6919 at the moment. */
6920 if (TYPE_REFERENCE_TO (to_type) != 0
6921 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
6922 return TYPE_REFERENCE_TO (to_type);
6924 /* First, if we already have a type for pointers to TO_TYPE and it's
6925 the proper mode, use it. */
6926 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
6927 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
6928 return t;
6930 t = make_node (REFERENCE_TYPE);
6932 TREE_TYPE (t) = to_type;
6933 SET_TYPE_MODE (t, mode);
6934 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
6935 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
6936 TYPE_REFERENCE_TO (to_type) = t;
6938 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6939 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
6940 SET_TYPE_STRUCTURAL_EQUALITY (t);
6941 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
6942 TYPE_CANONICAL (t)
6943 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
6944 mode, false);
6946 layout_type (t);
6948 return t;
6952 /* Build the node for the type of references-to-TO_TYPE by default
6953 in ptr_mode. */
6955 tree
6956 build_reference_type (tree to_type)
6958 return build_reference_type_for_mode (to_type, VOIDmode, false);
6961 #define MAX_INT_CACHED_PREC \
6962 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
6963 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
6965 /* Builds a signed or unsigned integer type of precision PRECISION.
6966 Used for C bitfields whose precision does not match that of
6967 built-in target types. */
6968 tree
6969 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
6970 int unsignedp)
6972 tree itype, ret;
6974 if (unsignedp)
6975 unsignedp = MAX_INT_CACHED_PREC + 1;
6977 if (precision <= MAX_INT_CACHED_PREC)
6979 itype = nonstandard_integer_type_cache[precision + unsignedp];
6980 if (itype)
6981 return itype;
6984 itype = make_node (INTEGER_TYPE);
6985 TYPE_PRECISION (itype) = precision;
6987 if (unsignedp)
6988 fixup_unsigned_type (itype);
6989 else
6990 fixup_signed_type (itype);
6992 inchash::hash hstate;
6993 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
6994 ret = type_hash_canon (hstate.end (), itype);
6995 if (precision <= MAX_INT_CACHED_PREC)
6996 nonstandard_integer_type_cache[precision + unsignedp] = ret;
6998 return ret;
7001 #define MAX_BOOL_CACHED_PREC \
7002 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7003 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
7005 /* Builds a boolean type of precision PRECISION.
7006 Used for boolean vectors to choose proper vector element size. */
7007 tree
7008 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
7010 tree type;
7012 if (precision <= MAX_BOOL_CACHED_PREC)
7014 type = nonstandard_boolean_type_cache[precision];
7015 if (type)
7016 return type;
7019 type = make_node (BOOLEAN_TYPE);
7020 TYPE_PRECISION (type) = precision;
7021 fixup_signed_type (type);
7023 if (precision <= MAX_INT_CACHED_PREC)
7024 nonstandard_boolean_type_cache[precision] = type;
7026 return type;
7029 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7030 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7031 is true, reuse such a type that has already been constructed. */
7033 static tree
7034 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7036 tree itype = make_node (INTEGER_TYPE);
7038 TREE_TYPE (itype) = type;
7040 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7041 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7043 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7044 SET_TYPE_MODE (itype, TYPE_MODE (type));
7045 TYPE_SIZE (itype) = TYPE_SIZE (type);
7046 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7047 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
7048 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7049 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
7051 if (!shared)
7052 return itype;
7054 if ((TYPE_MIN_VALUE (itype)
7055 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7056 || (TYPE_MAX_VALUE (itype)
7057 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7059 /* Since we cannot reliably merge this type, we need to compare it using
7060 structural equality checks. */
7061 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7062 return itype;
7065 hashval_t hash = type_hash_canon_hash (itype);
7066 itype = type_hash_canon (hash, itype);
7068 return itype;
7071 /* Wrapper around build_range_type_1 with SHARED set to true. */
7073 tree
7074 build_range_type (tree type, tree lowval, tree highval)
7076 return build_range_type_1 (type, lowval, highval, true);
7079 /* Wrapper around build_range_type_1 with SHARED set to false. */
7081 tree
7082 build_nonshared_range_type (tree type, tree lowval, tree highval)
7084 return build_range_type_1 (type, lowval, highval, false);
7087 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7088 MAXVAL should be the maximum value in the domain
7089 (one less than the length of the array).
7091 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7092 We don't enforce this limit, that is up to caller (e.g. language front end).
7093 The limit exists because the result is a signed type and we don't handle
7094 sizes that use more than one HOST_WIDE_INT. */
7096 tree
7097 build_index_type (tree maxval)
7099 return build_range_type (sizetype, size_zero_node, maxval);
7102 /* Return true if the debug information for TYPE, a subtype, should be emitted
7103 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7104 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7105 debug info and doesn't reflect the source code. */
7107 bool
7108 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7110 tree base_type = TREE_TYPE (type), low, high;
7112 /* Subrange types have a base type which is an integral type. */
7113 if (!INTEGRAL_TYPE_P (base_type))
7114 return false;
7116 /* Get the real bounds of the subtype. */
7117 if (lang_hooks.types.get_subrange_bounds)
7118 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7119 else
7121 low = TYPE_MIN_VALUE (type);
7122 high = TYPE_MAX_VALUE (type);
7125 /* If the type and its base type have the same representation and the same
7126 name, then the type is not a subrange but a copy of the base type. */
7127 if ((TREE_CODE (base_type) == INTEGER_TYPE
7128 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7129 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7130 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7131 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7132 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7133 return false;
7135 if (lowval)
7136 *lowval = low;
7137 if (highval)
7138 *highval = high;
7139 return true;
7142 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7143 and number of elements specified by the range of values of INDEX_TYPE.
7144 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7145 If SHARED is true, reuse such a type that has already been constructed.
7146 If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type. */
7148 tree
7149 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
7150 bool shared, bool set_canonical)
7152 tree t;
7154 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7156 error ("arrays of functions are not meaningful");
7157 elt_type = integer_type_node;
7160 t = make_node (ARRAY_TYPE);
7161 TREE_TYPE (t) = elt_type;
7162 TYPE_DOMAIN (t) = index_type;
7163 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7164 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
7165 layout_type (t);
7167 if (shared)
7169 hashval_t hash = type_hash_canon_hash (t);
7170 t = type_hash_canon (hash, t);
7173 if (TYPE_CANONICAL (t) == t && set_canonical)
7175 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7176 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
7177 || in_lto_p)
7178 SET_TYPE_STRUCTURAL_EQUALITY (t);
7179 else if (TYPE_CANONICAL (elt_type) != elt_type
7180 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7181 TYPE_CANONICAL (t)
7182 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7183 index_type
7184 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7185 typeless_storage, shared, set_canonical);
7188 return t;
7191 /* Wrapper around build_array_type_1 with SHARED set to true. */
7193 tree
7194 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
7196 return
7197 build_array_type_1 (elt_type, index_type, typeless_storage, true, true);
7200 /* Wrapper around build_array_type_1 with SHARED set to false. */
7202 tree
7203 build_nonshared_array_type (tree elt_type, tree index_type)
7205 return build_array_type_1 (elt_type, index_type, false, false, true);
7208 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7209 sizetype. */
7211 tree
7212 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
7214 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7217 /* Recursively examines the array elements of TYPE, until a non-array
7218 element type is found. */
7220 tree
7221 strip_array_types (tree type)
7223 while (TREE_CODE (type) == ARRAY_TYPE)
7224 type = TREE_TYPE (type);
7226 return type;
7229 /* Computes the canonical argument types from the argument type list
7230 ARGTYPES.
7232 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7233 on entry to this function, or if any of the ARGTYPES are
7234 structural.
7236 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7237 true on entry to this function, or if any of the ARGTYPES are
7238 non-canonical.
7240 Returns a canonical argument list, which may be ARGTYPES when the
7241 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7242 true) or would not differ from ARGTYPES. */
7244 static tree
7245 maybe_canonicalize_argtypes (tree argtypes,
7246 bool *any_structural_p,
7247 bool *any_noncanonical_p)
7249 tree arg;
7250 bool any_noncanonical_argtypes_p = false;
7252 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7254 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7255 /* Fail gracefully by stating that the type is structural. */
7256 *any_structural_p = true;
7257 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7258 *any_structural_p = true;
7259 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7260 || TREE_PURPOSE (arg))
7261 /* If the argument has a default argument, we consider it
7262 non-canonical even though the type itself is canonical.
7263 That way, different variants of function and method types
7264 with default arguments will all point to the variant with
7265 no defaults as their canonical type. */
7266 any_noncanonical_argtypes_p = true;
7269 if (*any_structural_p)
7270 return argtypes;
7272 if (any_noncanonical_argtypes_p)
7274 /* Build the canonical list of argument types. */
7275 tree canon_argtypes = NULL_TREE;
7276 bool is_void = false;
7278 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7280 if (arg == void_list_node)
7281 is_void = true;
7282 else
7283 canon_argtypes = tree_cons (NULL_TREE,
7284 TYPE_CANONICAL (TREE_VALUE (arg)),
7285 canon_argtypes);
7288 canon_argtypes = nreverse (canon_argtypes);
7289 if (is_void)
7290 canon_argtypes = chainon (canon_argtypes, void_list_node);
7292 /* There is a non-canonical type. */
7293 *any_noncanonical_p = true;
7294 return canon_argtypes;
7297 /* The canonical argument types are the same as ARGTYPES. */
7298 return argtypes;
7301 /* Construct, lay out and return
7302 the type of functions returning type VALUE_TYPE
7303 given arguments of types ARG_TYPES.
7304 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7305 are data type nodes for the arguments of the function.
7306 If such a type has already been constructed, reuse it. */
7308 tree
7309 build_function_type (tree value_type, tree arg_types)
7311 tree t;
7312 inchash::hash hstate;
7313 bool any_structural_p, any_noncanonical_p;
7314 tree canon_argtypes;
7316 gcc_assert (arg_types != error_mark_node);
7318 if (TREE_CODE (value_type) == FUNCTION_TYPE)
7320 error ("function return type cannot be function");
7321 value_type = integer_type_node;
7324 /* Make a node of the sort we want. */
7325 t = make_node (FUNCTION_TYPE);
7326 TREE_TYPE (t) = value_type;
7327 TYPE_ARG_TYPES (t) = arg_types;
7329 /* If we already have such a type, use the old one. */
7330 hashval_t hash = type_hash_canon_hash (t);
7331 t = type_hash_canon (hash, t);
7333 /* Set up the canonical type. */
7334 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
7335 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
7336 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
7337 &any_structural_p,
7338 &any_noncanonical_p);
7339 if (any_structural_p)
7340 SET_TYPE_STRUCTURAL_EQUALITY (t);
7341 else if (any_noncanonical_p)
7342 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
7343 canon_argtypes);
7345 if (!COMPLETE_TYPE_P (t))
7346 layout_type (t);
7347 return t;
7350 /* Build a function type. The RETURN_TYPE is the type returned by the
7351 function. If VAARGS is set, no void_type_node is appended to the
7352 list. ARGP must be always be terminated be a NULL_TREE. */
7354 static tree
7355 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
7357 tree t, args, last;
7359 t = va_arg (argp, tree);
7360 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
7361 args = tree_cons (NULL_TREE, t, args);
7363 if (vaargs)
7365 last = args;
7366 if (args != NULL_TREE)
7367 args = nreverse (args);
7368 gcc_assert (last != void_list_node);
7370 else if (args == NULL_TREE)
7371 args = void_list_node;
7372 else
7374 last = args;
7375 args = nreverse (args);
7376 TREE_CHAIN (last) = void_list_node;
7378 args = build_function_type (return_type, args);
7380 return args;
7383 /* Build a function type. The RETURN_TYPE is the type returned by the
7384 function. If additional arguments are provided, they are
7385 additional argument types. The list of argument types must always
7386 be terminated by NULL_TREE. */
7388 tree
7389 build_function_type_list (tree return_type, ...)
7391 tree args;
7392 va_list p;
7394 va_start (p, return_type);
7395 args = build_function_type_list_1 (false, return_type, p);
7396 va_end (p);
7397 return args;
7400 /* Build a variable argument function type. The RETURN_TYPE is the
7401 type returned by the function. If additional arguments are provided,
7402 they are additional argument types. The list of argument types must
7403 always be terminated by NULL_TREE. */
7405 tree
7406 build_varargs_function_type_list (tree return_type, ...)
7408 tree args;
7409 va_list p;
7411 va_start (p, return_type);
7412 args = build_function_type_list_1 (true, return_type, p);
7413 va_end (p);
7415 return args;
7418 /* Build a function type. RETURN_TYPE is the type returned by the
7419 function; VAARGS indicates whether the function takes varargs. The
7420 function takes N named arguments, the types of which are provided in
7421 ARG_TYPES. */
7423 static tree
7424 build_function_type_array_1 (bool vaargs, tree return_type, int n,
7425 tree *arg_types)
7427 int i;
7428 tree t = vaargs ? NULL_TREE : void_list_node;
7430 for (i = n - 1; i >= 0; i--)
7431 t = tree_cons (NULL_TREE, arg_types[i], t);
7433 return build_function_type (return_type, t);
7436 /* Build a function type. RETURN_TYPE is the type returned by the
7437 function. The function takes N named arguments, the types of which
7438 are provided in ARG_TYPES. */
7440 tree
7441 build_function_type_array (tree return_type, int n, tree *arg_types)
7443 return build_function_type_array_1 (false, return_type, n, arg_types);
7446 /* Build a variable argument function type. RETURN_TYPE is the type
7447 returned by the function. The function takes N named arguments, the
7448 types of which are provided in ARG_TYPES. */
7450 tree
7451 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
7453 return build_function_type_array_1 (true, return_type, n, arg_types);
7456 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
7457 and ARGTYPES (a TREE_LIST) are the return type and arguments types
7458 for the method. An implicit additional parameter (of type
7459 pointer-to-BASETYPE) is added to the ARGTYPES. */
7461 tree
7462 build_method_type_directly (tree basetype,
7463 tree rettype,
7464 tree argtypes)
7466 tree t;
7467 tree ptype;
7468 bool any_structural_p, any_noncanonical_p;
7469 tree canon_argtypes;
7471 /* Make a node of the sort we want. */
7472 t = make_node (METHOD_TYPE);
7474 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7475 TREE_TYPE (t) = rettype;
7476 ptype = build_pointer_type (basetype);
7478 /* The actual arglist for this function includes a "hidden" argument
7479 which is "this". Put it into the list of argument types. */
7480 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
7481 TYPE_ARG_TYPES (t) = argtypes;
7483 /* If we already have such a type, use the old one. */
7484 hashval_t hash = type_hash_canon_hash (t);
7485 t = type_hash_canon (hash, t);
7487 /* Set up the canonical type. */
7488 any_structural_p
7489 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7490 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
7491 any_noncanonical_p
7492 = (TYPE_CANONICAL (basetype) != basetype
7493 || TYPE_CANONICAL (rettype) != rettype);
7494 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
7495 &any_structural_p,
7496 &any_noncanonical_p);
7497 if (any_structural_p)
7498 SET_TYPE_STRUCTURAL_EQUALITY (t);
7499 else if (any_noncanonical_p)
7500 TYPE_CANONICAL (t)
7501 = build_method_type_directly (TYPE_CANONICAL (basetype),
7502 TYPE_CANONICAL (rettype),
7503 canon_argtypes);
7504 if (!COMPLETE_TYPE_P (t))
7505 layout_type (t);
7507 return t;
7510 /* Construct, lay out and return the type of methods belonging to class
7511 BASETYPE and whose arguments and values are described by TYPE.
7512 If that type exists already, reuse it.
7513 TYPE must be a FUNCTION_TYPE node. */
7515 tree
7516 build_method_type (tree basetype, tree type)
7518 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
7520 return build_method_type_directly (basetype,
7521 TREE_TYPE (type),
7522 TYPE_ARG_TYPES (type));
7525 /* Construct, lay out and return the type of offsets to a value
7526 of type TYPE, within an object of type BASETYPE.
7527 If a suitable offset type exists already, reuse it. */
7529 tree
7530 build_offset_type (tree basetype, tree type)
7532 tree t;
7534 /* Make a node of the sort we want. */
7535 t = make_node (OFFSET_TYPE);
7537 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7538 TREE_TYPE (t) = type;
7540 /* If we already have such a type, use the old one. */
7541 hashval_t hash = type_hash_canon_hash (t);
7542 t = type_hash_canon (hash, t);
7544 if (!COMPLETE_TYPE_P (t))
7545 layout_type (t);
7547 if (TYPE_CANONICAL (t) == t)
7549 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7550 || TYPE_STRUCTURAL_EQUALITY_P (type))
7551 SET_TYPE_STRUCTURAL_EQUALITY (t);
7552 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
7553 || TYPE_CANONICAL (type) != type)
7554 TYPE_CANONICAL (t)
7555 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
7556 TYPE_CANONICAL (type));
7559 return t;
7562 /* Create a complex type whose components are COMPONENT_TYPE.
7564 If NAMED is true, the type is given a TYPE_NAME. We do not always
7565 do so because this creates a DECL node and thus make the DECL_UIDs
7566 dependent on the type canonicalization hashtable, which is GC-ed,
7567 so the DECL_UIDs would not be stable wrt garbage collection. */
7569 tree
7570 build_complex_type (tree component_type, bool named)
7572 gcc_assert (INTEGRAL_TYPE_P (component_type)
7573 || SCALAR_FLOAT_TYPE_P (component_type)
7574 || FIXED_POINT_TYPE_P (component_type));
7576 /* Make a node of the sort we want. */
7577 tree probe = make_node (COMPLEX_TYPE);
7579 TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
7581 /* If we already have such a type, use the old one. */
7582 hashval_t hash = type_hash_canon_hash (probe);
7583 tree t = type_hash_canon (hash, probe);
7585 if (t == probe)
7587 /* We created a new type. The hash insertion will have laid
7588 out the type. We need to check the canonicalization and
7589 maybe set the name. */
7590 gcc_checking_assert (COMPLETE_TYPE_P (t)
7591 && !TYPE_NAME (t)
7592 && TYPE_CANONICAL (t) == t);
7594 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
7595 SET_TYPE_STRUCTURAL_EQUALITY (t);
7596 else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
7597 TYPE_CANONICAL (t)
7598 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
7600 /* We need to create a name, since complex is a fundamental type. */
7601 if (named)
7603 const char *name = NULL;
7605 if (TREE_TYPE (t) == char_type_node)
7606 name = "complex char";
7607 else if (TREE_TYPE (t) == signed_char_type_node)
7608 name = "complex signed char";
7609 else if (TREE_TYPE (t) == unsigned_char_type_node)
7610 name = "complex unsigned char";
7611 else if (TREE_TYPE (t) == short_integer_type_node)
7612 name = "complex short int";
7613 else if (TREE_TYPE (t) == short_unsigned_type_node)
7614 name = "complex short unsigned int";
7615 else if (TREE_TYPE (t) == integer_type_node)
7616 name = "complex int";
7617 else if (TREE_TYPE (t) == unsigned_type_node)
7618 name = "complex unsigned int";
7619 else if (TREE_TYPE (t) == long_integer_type_node)
7620 name = "complex long int";
7621 else if (TREE_TYPE (t) == long_unsigned_type_node)
7622 name = "complex long unsigned int";
7623 else if (TREE_TYPE (t) == long_long_integer_type_node)
7624 name = "complex long long int";
7625 else if (TREE_TYPE (t) == long_long_unsigned_type_node)
7626 name = "complex long long unsigned int";
7628 if (name != NULL)
7629 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
7630 get_identifier (name), t);
7634 return build_qualified_type (t, TYPE_QUALS (component_type));
7637 /* If TYPE is a real or complex floating-point type and the target
7638 does not directly support arithmetic on TYPE then return the wider
7639 type to be used for arithmetic on TYPE. Otherwise, return
7640 NULL_TREE. */
7642 tree
7643 excess_precision_type (tree type)
7645 /* The target can give two different responses to the question of
7646 which excess precision mode it would like depending on whether we
7647 are in -fexcess-precision=standard or -fexcess-precision=fast. */
7649 enum excess_precision_type requested_type
7650 = (flag_excess_precision == EXCESS_PRECISION_FAST
7651 ? EXCESS_PRECISION_TYPE_FAST
7652 : (flag_excess_precision == EXCESS_PRECISION_FLOAT16
7653 ? EXCESS_PRECISION_TYPE_FLOAT16 :EXCESS_PRECISION_TYPE_STANDARD));
7655 enum flt_eval_method target_flt_eval_method
7656 = targetm.c.excess_precision (requested_type);
7658 /* The target should not ask for unpredictable float evaluation (though
7659 it might advertise that implicitly the evaluation is unpredictable,
7660 but we don't care about that here, it will have been reported
7661 elsewhere). If it does ask for unpredictable evaluation, we have
7662 nothing to do here. */
7663 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
7665 /* Nothing to do. The target has asked for all types we know about
7666 to be computed with their native precision and range. */
7667 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
7668 return NULL_TREE;
7670 /* The target will promote this type in a target-dependent way, so excess
7671 precision ought to leave it alone. */
7672 if (targetm.promoted_type (type) != NULL_TREE)
7673 return NULL_TREE;
7675 machine_mode float16_type_mode = (float16_type_node
7676 ? TYPE_MODE (float16_type_node)
7677 : VOIDmode);
7678 machine_mode float_type_mode = TYPE_MODE (float_type_node);
7679 machine_mode double_type_mode = TYPE_MODE (double_type_node);
7681 switch (TREE_CODE (type))
7683 case REAL_TYPE:
7685 machine_mode type_mode = TYPE_MODE (type);
7686 switch (target_flt_eval_method)
7688 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7689 if (type_mode == float16_type_mode)
7690 return float_type_node;
7691 break;
7692 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7693 if (type_mode == float16_type_mode
7694 || type_mode == float_type_mode)
7695 return double_type_node;
7696 break;
7697 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7698 if (type_mode == float16_type_mode
7699 || type_mode == float_type_mode
7700 || type_mode == double_type_mode)
7701 return long_double_type_node;
7702 break;
7703 default:
7704 gcc_unreachable ();
7706 break;
7708 case COMPLEX_TYPE:
7710 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
7711 return NULL_TREE;
7712 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
7713 switch (target_flt_eval_method)
7715 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7716 if (type_mode == float16_type_mode)
7717 return complex_float_type_node;
7718 break;
7719 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7720 if (type_mode == float16_type_mode
7721 || type_mode == float_type_mode)
7722 return complex_double_type_node;
7723 break;
7724 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7725 if (type_mode == float16_type_mode
7726 || type_mode == float_type_mode
7727 || type_mode == double_type_mode)
7728 return complex_long_double_type_node;
7729 break;
7730 default:
7731 gcc_unreachable ();
7733 break;
7735 default:
7736 break;
7739 return NULL_TREE;
7742 /* Return OP, stripped of any conversions to wider types as much as is safe.
7743 Converting the value back to OP's type makes a value equivalent to OP.
7745 If FOR_TYPE is nonzero, we return a value which, if converted to
7746 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
7748 OP must have integer, real or enumeral type. Pointers are not allowed!
7750 There are some cases where the obvious value we could return
7751 would regenerate to OP if converted to OP's type,
7752 but would not extend like OP to wider types.
7753 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
7754 For example, if OP is (unsigned short)(signed char)-1,
7755 we avoid returning (signed char)-1 if FOR_TYPE is int,
7756 even though extending that to an unsigned short would regenerate OP,
7757 since the result of extending (signed char)-1 to (int)
7758 is different from (int) OP. */
7760 tree
7761 get_unwidened (tree op, tree for_type)
7763 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
7764 tree type = TREE_TYPE (op);
7765 unsigned final_prec
7766 = TYPE_PRECISION (for_type != 0 ? for_type : type);
7767 int uns
7768 = (for_type != 0 && for_type != type
7769 && final_prec > TYPE_PRECISION (type)
7770 && TYPE_UNSIGNED (type));
7771 tree win = op;
7773 while (CONVERT_EXPR_P (op))
7775 int bitschange;
7777 /* TYPE_PRECISION on vector types has different meaning
7778 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
7779 so avoid them here. */
7780 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
7781 break;
7783 bitschange = TYPE_PRECISION (TREE_TYPE (op))
7784 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
7786 /* Truncations are many-one so cannot be removed.
7787 Unless we are later going to truncate down even farther. */
7788 if (bitschange < 0
7789 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
7790 break;
7792 /* See what's inside this conversion. If we decide to strip it,
7793 we will set WIN. */
7794 op = TREE_OPERAND (op, 0);
7796 /* If we have not stripped any zero-extensions (uns is 0),
7797 we can strip any kind of extension.
7798 If we have previously stripped a zero-extension,
7799 only zero-extensions can safely be stripped.
7800 Any extension can be stripped if the bits it would produce
7801 are all going to be discarded later by truncating to FOR_TYPE. */
7803 if (bitschange > 0)
7805 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
7806 win = op;
7807 /* TYPE_UNSIGNED says whether this is a zero-extension.
7808 Let's avoid computing it if it does not affect WIN
7809 and if UNS will not be needed again. */
7810 if ((uns
7811 || CONVERT_EXPR_P (op))
7812 && TYPE_UNSIGNED (TREE_TYPE (op)))
7814 uns = 1;
7815 win = op;
7820 /* If we finally reach a constant see if it fits in sth smaller and
7821 in that case convert it. */
7822 if (TREE_CODE (win) == INTEGER_CST)
7824 tree wtype = TREE_TYPE (win);
7825 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
7826 if (for_type)
7827 prec = MAX (prec, final_prec);
7828 if (prec < TYPE_PRECISION (wtype))
7830 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
7831 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
7832 win = fold_convert (t, win);
7836 return win;
7839 /* Return OP or a simpler expression for a narrower value
7840 which can be sign-extended or zero-extended to give back OP.
7841 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
7842 or 0 if the value should be sign-extended. */
7844 tree
7845 get_narrower (tree op, int *unsignedp_ptr)
7847 int uns = 0;
7848 int first = 1;
7849 tree win = op;
7850 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
7852 if (TREE_CODE (op) == COMPOUND_EXPR)
7855 op = TREE_OPERAND (op, 1);
7856 while (TREE_CODE (op) == COMPOUND_EXPR);
7857 tree ret = get_narrower (op, unsignedp_ptr);
7858 if (ret == op)
7859 return win;
7860 auto_vec <tree, 16> v;
7861 unsigned int i;
7862 for (op = win; TREE_CODE (op) == COMPOUND_EXPR;
7863 op = TREE_OPERAND (op, 1))
7864 v.safe_push (op);
7865 FOR_EACH_VEC_ELT_REVERSE (v, i, op)
7866 ret = build2_loc (EXPR_LOCATION (op), COMPOUND_EXPR,
7867 TREE_TYPE (ret), TREE_OPERAND (op, 0),
7868 ret);
7869 return ret;
7871 while (TREE_CODE (op) == NOP_EXPR)
7873 int bitschange
7874 = (TYPE_PRECISION (TREE_TYPE (op))
7875 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
7877 /* Truncations are many-one so cannot be removed. */
7878 if (bitschange < 0)
7879 break;
7881 /* See what's inside this conversion. If we decide to strip it,
7882 we will set WIN. */
7884 if (bitschange > 0)
7886 op = TREE_OPERAND (op, 0);
7887 /* An extension: the outermost one can be stripped,
7888 but remember whether it is zero or sign extension. */
7889 if (first)
7890 uns = TYPE_UNSIGNED (TREE_TYPE (op));
7891 /* Otherwise, if a sign extension has been stripped,
7892 only sign extensions can now be stripped;
7893 if a zero extension has been stripped, only zero-extensions. */
7894 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
7895 break;
7896 first = 0;
7898 else /* bitschange == 0 */
7900 /* A change in nominal type can always be stripped, but we must
7901 preserve the unsignedness. */
7902 if (first)
7903 uns = TYPE_UNSIGNED (TREE_TYPE (op));
7904 first = 0;
7905 op = TREE_OPERAND (op, 0);
7906 /* Keep trying to narrow, but don't assign op to win if it
7907 would turn an integral type into something else. */
7908 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
7909 continue;
7912 win = op;
7915 if (TREE_CODE (op) == COMPONENT_REF
7916 /* Since type_for_size always gives an integer type. */
7917 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
7918 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
7919 /* Ensure field is laid out already. */
7920 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
7921 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
7923 unsigned HOST_WIDE_INT innerprec
7924 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
7925 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
7926 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
7927 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
7929 /* We can get this structure field in a narrower type that fits it,
7930 but the resulting extension to its nominal type (a fullword type)
7931 must satisfy the same conditions as for other extensions.
7933 Do this only for fields that are aligned (not bit-fields),
7934 because when bit-field insns will be used there is no
7935 advantage in doing this. */
7937 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
7938 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
7939 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
7940 && type != 0)
7942 if (first)
7943 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
7944 win = fold_convert (type, op);
7948 *unsignedp_ptr = uns;
7949 return win;
7952 /* Return true if integer constant C has a value that is permissible
7953 for TYPE, an integral type. */
7955 bool
7956 int_fits_type_p (const_tree c, const_tree type)
7958 tree type_low_bound, type_high_bound;
7959 bool ok_for_low_bound, ok_for_high_bound;
7960 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
7962 /* Non-standard boolean types can have arbitrary precision but various
7963 transformations assume that they can only take values 0 and +/-1. */
7964 if (TREE_CODE (type) == BOOLEAN_TYPE)
7965 return wi::fits_to_boolean_p (wi::to_wide (c), type);
7967 retry:
7968 type_low_bound = TYPE_MIN_VALUE (type);
7969 type_high_bound = TYPE_MAX_VALUE (type);
7971 /* If at least one bound of the type is a constant integer, we can check
7972 ourselves and maybe make a decision. If no such decision is possible, but
7973 this type is a subtype, try checking against that. Otherwise, use
7974 fits_to_tree_p, which checks against the precision.
7976 Compute the status for each possibly constant bound, and return if we see
7977 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
7978 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
7979 for "constant known to fit". */
7981 /* Check if c >= type_low_bound. */
7982 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
7984 if (tree_int_cst_lt (c, type_low_bound))
7985 return false;
7986 ok_for_low_bound = true;
7988 else
7989 ok_for_low_bound = false;
7991 /* Check if c <= type_high_bound. */
7992 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
7994 if (tree_int_cst_lt (type_high_bound, c))
7995 return false;
7996 ok_for_high_bound = true;
7998 else
7999 ok_for_high_bound = false;
8001 /* If the constant fits both bounds, the result is known. */
8002 if (ok_for_low_bound && ok_for_high_bound)
8003 return true;
8005 /* Perform some generic filtering which may allow making a decision
8006 even if the bounds are not constant. First, negative integers
8007 never fit in unsigned types, */
8008 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
8009 return false;
8011 /* Second, narrower types always fit in wider ones. */
8012 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8013 return true;
8015 /* Third, unsigned integers with top bit set never fit signed types. */
8016 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8018 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
8019 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8021 /* When a tree_cst is converted to a wide-int, the precision
8022 is taken from the type. However, if the precision of the
8023 mode underneath the type is smaller than that, it is
8024 possible that the value will not fit. The test below
8025 fails if any bit is set between the sign bit of the
8026 underlying mode and the top bit of the type. */
8027 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
8028 return false;
8030 else if (wi::neg_p (wi::to_wide (c)))
8031 return false;
8034 /* If we haven't been able to decide at this point, there nothing more we
8035 can check ourselves here. Look at the base type if we have one and it
8036 has the same precision. */
8037 if (TREE_CODE (type) == INTEGER_TYPE
8038 && TREE_TYPE (type) != 0
8039 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8041 type = TREE_TYPE (type);
8042 goto retry;
8045 /* Or to fits_to_tree_p, if nothing else. */
8046 return wi::fits_to_tree_p (wi::to_wide (c), type);
8049 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8050 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8051 represented (assuming two's-complement arithmetic) within the bit
8052 precision of the type are returned instead. */
8054 void
8055 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8057 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8058 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8059 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
8060 else
8062 if (TYPE_UNSIGNED (type))
8063 mpz_set_ui (min, 0);
8064 else
8066 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8067 wi::to_mpz (mn, min, SIGNED);
8071 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8072 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8073 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
8074 else
8076 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8077 wi::to_mpz (mn, max, TYPE_SIGN (type));
8081 /* Return true if VAR is an automatic variable. */
8083 bool
8084 auto_var_p (const_tree var)
8086 return ((((VAR_P (var) && ! DECL_EXTERNAL (var))
8087 || TREE_CODE (var) == PARM_DECL)
8088 && ! TREE_STATIC (var))
8089 || TREE_CODE (var) == RESULT_DECL);
8092 /* Return true if VAR is an automatic variable defined in function FN. */
8094 bool
8095 auto_var_in_fn_p (const_tree var, const_tree fn)
8097 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8098 && (auto_var_p (var)
8099 || TREE_CODE (var) == LABEL_DECL));
8102 /* Subprogram of following function. Called by walk_tree.
8104 Return *TP if it is an automatic variable or parameter of the
8105 function passed in as DATA. */
8107 static tree
8108 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8110 tree fn = (tree) data;
8112 if (TYPE_P (*tp))
8113 *walk_subtrees = 0;
8115 else if (DECL_P (*tp)
8116 && auto_var_in_fn_p (*tp, fn))
8117 return *tp;
8119 return NULL_TREE;
8122 /* Returns true if T is, contains, or refers to a type with variable
8123 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8124 arguments, but not the return type. If FN is nonzero, only return
8125 true if a modifier of the type or position of FN is a variable or
8126 parameter inside FN.
8128 This concept is more general than that of C99 'variably modified types':
8129 in C99, a struct type is never variably modified because a VLA may not
8130 appear as a structure member. However, in GNU C code like:
8132 struct S { int i[f()]; };
8134 is valid, and other languages may define similar constructs. */
8136 bool
8137 variably_modified_type_p (tree type, tree fn)
8139 tree t;
8141 /* Test if T is either variable (if FN is zero) or an expression containing
8142 a variable in FN. If TYPE isn't gimplified, return true also if
8143 gimplify_one_sizepos would gimplify the expression into a local
8144 variable. */
8145 #define RETURN_TRUE_IF_VAR(T) \
8146 do { tree _t = (T); \
8147 if (_t != NULL_TREE \
8148 && _t != error_mark_node \
8149 && !CONSTANT_CLASS_P (_t) \
8150 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8151 && (!fn \
8152 || (!TYPE_SIZES_GIMPLIFIED (type) \
8153 && (TREE_CODE (_t) != VAR_DECL \
8154 && !CONTAINS_PLACEHOLDER_P (_t))) \
8155 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8156 return true; } while (0)
8158 if (type == error_mark_node)
8159 return false;
8161 /* If TYPE itself has variable size, it is variably modified. */
8162 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8163 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8165 switch (TREE_CODE (type))
8167 case POINTER_TYPE:
8168 case REFERENCE_TYPE:
8169 case VECTOR_TYPE:
8170 /* Ada can have pointer types refering to themselves indirectly. */
8171 if (TREE_VISITED (type))
8172 return false;
8173 TREE_VISITED (type) = true;
8174 if (variably_modified_type_p (TREE_TYPE (type), fn))
8176 TREE_VISITED (type) = false;
8177 return true;
8179 TREE_VISITED (type) = false;
8180 break;
8182 case FUNCTION_TYPE:
8183 case METHOD_TYPE:
8184 /* If TYPE is a function type, it is variably modified if the
8185 return type is variably modified. */
8186 if (variably_modified_type_p (TREE_TYPE (type), fn))
8187 return true;
8188 break;
8190 case INTEGER_TYPE:
8191 case REAL_TYPE:
8192 case FIXED_POINT_TYPE:
8193 case ENUMERAL_TYPE:
8194 case BOOLEAN_TYPE:
8195 /* Scalar types are variably modified if their end points
8196 aren't constant. */
8197 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8198 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8199 break;
8201 case RECORD_TYPE:
8202 case UNION_TYPE:
8203 case QUAL_UNION_TYPE:
8204 /* We can't see if any of the fields are variably-modified by the
8205 definition we normally use, since that would produce infinite
8206 recursion via pointers. */
8207 /* This is variably modified if some field's type is. */
8208 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8209 if (TREE_CODE (t) == FIELD_DECL)
8211 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8212 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8213 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8215 /* If the type is a qualified union, then the DECL_QUALIFIER
8216 of fields can also be an expression containing a variable. */
8217 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8218 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8220 /* If the field is a qualified union, then it's only a container
8221 for what's inside so we look into it. That's necessary in LTO
8222 mode because the sizes of the field tested above have been set
8223 to PLACEHOLDER_EXPRs by free_lang_data. */
8224 if (TREE_CODE (TREE_TYPE (t)) == QUAL_UNION_TYPE
8225 && variably_modified_type_p (TREE_TYPE (t), fn))
8226 return true;
8228 break;
8230 case ARRAY_TYPE:
8231 /* Do not call ourselves to avoid infinite recursion. This is
8232 variably modified if the element type is. */
8233 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8234 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8235 break;
8237 default:
8238 break;
8241 /* The current language may have other cases to check, but in general,
8242 all other types are not variably modified. */
8243 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8245 #undef RETURN_TRUE_IF_VAR
8248 /* Given a DECL or TYPE, return the scope in which it was declared, or
8249 NULL_TREE if there is no containing scope. */
8251 tree
8252 get_containing_scope (const_tree t)
8254 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8257 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
8259 const_tree
8260 get_ultimate_context (const_tree decl)
8262 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
8264 if (TREE_CODE (decl) == BLOCK)
8265 decl = BLOCK_SUPERCONTEXT (decl);
8266 else
8267 decl = get_containing_scope (decl);
8269 return decl;
8272 /* Return the innermost context enclosing DECL that is
8273 a FUNCTION_DECL, or zero if none. */
8275 tree
8276 decl_function_context (const_tree decl)
8278 tree context;
8280 if (TREE_CODE (decl) == ERROR_MARK)
8281 return 0;
8283 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8284 where we look up the function at runtime. Such functions always take
8285 a first argument of type 'pointer to real context'.
8287 C++ should really be fixed to use DECL_CONTEXT for the real context,
8288 and use something else for the "virtual context". */
8289 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
8290 context
8291 = TYPE_MAIN_VARIANT
8292 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8293 else
8294 context = DECL_CONTEXT (decl);
8296 while (context && TREE_CODE (context) != FUNCTION_DECL)
8298 if (TREE_CODE (context) == BLOCK)
8299 context = BLOCK_SUPERCONTEXT (context);
8300 else
8301 context = get_containing_scope (context);
8304 return context;
8307 /* Return the innermost context enclosing DECL that is
8308 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8309 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8311 tree
8312 decl_type_context (const_tree decl)
8314 tree context = DECL_CONTEXT (decl);
8316 while (context)
8317 switch (TREE_CODE (context))
8319 case NAMESPACE_DECL:
8320 case TRANSLATION_UNIT_DECL:
8321 return NULL_TREE;
8323 case RECORD_TYPE:
8324 case UNION_TYPE:
8325 case QUAL_UNION_TYPE:
8326 return context;
8328 case TYPE_DECL:
8329 case FUNCTION_DECL:
8330 context = DECL_CONTEXT (context);
8331 break;
8333 case BLOCK:
8334 context = BLOCK_SUPERCONTEXT (context);
8335 break;
8337 default:
8338 gcc_unreachable ();
8341 return NULL_TREE;
8344 /* CALL is a CALL_EXPR. Return the declaration for the function
8345 called, or NULL_TREE if the called function cannot be
8346 determined. */
8348 tree
8349 get_callee_fndecl (const_tree call)
8351 tree addr;
8353 if (call == error_mark_node)
8354 return error_mark_node;
8356 /* It's invalid to call this function with anything but a
8357 CALL_EXPR. */
8358 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8360 /* The first operand to the CALL is the address of the function
8361 called. */
8362 addr = CALL_EXPR_FN (call);
8364 /* If there is no function, return early. */
8365 if (addr == NULL_TREE)
8366 return NULL_TREE;
8368 STRIP_NOPS (addr);
8370 /* If this is a readonly function pointer, extract its initial value. */
8371 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8372 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8373 && DECL_INITIAL (addr))
8374 addr = DECL_INITIAL (addr);
8376 /* If the address is just `&f' for some function `f', then we know
8377 that `f' is being called. */
8378 if (TREE_CODE (addr) == ADDR_EXPR
8379 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8380 return TREE_OPERAND (addr, 0);
8382 /* We couldn't figure out what was being called. */
8383 return NULL_TREE;
8386 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
8387 return the associated function code, otherwise return CFN_LAST. */
8389 combined_fn
8390 get_call_combined_fn (const_tree call)
8392 /* It's invalid to call this function with anything but a CALL_EXPR. */
8393 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8395 if (!CALL_EXPR_FN (call))
8396 return as_combined_fn (CALL_EXPR_IFN (call));
8398 tree fndecl = get_callee_fndecl (call);
8399 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8400 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
8402 return CFN_LAST;
8405 /* Comparator of indices based on tree_node_counts. */
8407 static int
8408 tree_nodes_cmp (const void *p1, const void *p2)
8410 const unsigned *n1 = (const unsigned *)p1;
8411 const unsigned *n2 = (const unsigned *)p2;
8413 return tree_node_counts[*n1] - tree_node_counts[*n2];
8416 /* Comparator of indices based on tree_code_counts. */
8418 static int
8419 tree_codes_cmp (const void *p1, const void *p2)
8421 const unsigned *n1 = (const unsigned *)p1;
8422 const unsigned *n2 = (const unsigned *)p2;
8424 return tree_code_counts[*n1] - tree_code_counts[*n2];
8427 #define TREE_MEM_USAGE_SPACES 40
8429 /* Print debugging information about tree nodes generated during the compile,
8430 and any language-specific information. */
8432 void
8433 dump_tree_statistics (void)
8435 if (GATHER_STATISTICS)
8437 uint64_t total_nodes, total_bytes;
8438 fprintf (stderr, "\nKind Nodes Bytes\n");
8439 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8440 total_nodes = total_bytes = 0;
8443 auto_vec<unsigned> indices (all_kinds);
8444 for (unsigned i = 0; i < all_kinds; i++)
8445 indices.quick_push (i);
8446 indices.qsort (tree_nodes_cmp);
8448 for (unsigned i = 0; i < (int) all_kinds; i++)
8450 unsigned j = indices[i];
8451 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
8452 tree_node_kind_names[j], SIZE_AMOUNT (tree_node_counts[j]),
8453 SIZE_AMOUNT (tree_node_sizes[j]));
8454 total_nodes += tree_node_counts[j];
8455 total_bytes += tree_node_sizes[j];
8457 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8458 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
8459 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
8460 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8464 fprintf (stderr, "Code Nodes\n");
8465 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8467 auto_vec<unsigned> indices (MAX_TREE_CODES);
8468 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8469 indices.quick_push (i);
8470 indices.qsort (tree_codes_cmp);
8472 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8474 unsigned j = indices[i];
8475 fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
8476 get_tree_code_name ((enum tree_code) j),
8477 SIZE_AMOUNT (tree_code_counts[j]));
8479 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8480 fprintf (stderr, "\n");
8481 ssanames_print_statistics ();
8482 fprintf (stderr, "\n");
8483 phinodes_print_statistics ();
8484 fprintf (stderr, "\n");
8487 else
8488 fprintf (stderr, "(No per-node statistics)\n");
8490 print_type_hash_statistics ();
8491 print_debug_expr_statistics ();
8492 print_value_expr_statistics ();
8493 lang_hooks.print_statistics ();
8496 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8498 /* Generate a crc32 of the low BYTES bytes of VALUE. */
8500 unsigned
8501 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
8503 /* This relies on the raw feedback's top 4 bits being zero. */
8504 #define FEEDBACK(X) ((X) * 0x04c11db7)
8505 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
8506 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
8507 static const unsigned syndromes[16] =
8509 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
8510 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
8511 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
8512 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
8514 #undef FEEDBACK
8515 #undef SYNDROME
8517 value <<= (32 - bytes * 8);
8518 for (unsigned ix = bytes * 2; ix--; value <<= 4)
8520 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
8522 chksum = (chksum << 4) ^ feedback;
8525 return chksum;
8528 /* Generate a crc32 of a string. */
8530 unsigned
8531 crc32_string (unsigned chksum, const char *string)
8534 chksum = crc32_byte (chksum, *string);
8535 while (*string++);
8536 return chksum;
8539 /* P is a string that will be used in a symbol. Mask out any characters
8540 that are not valid in that context. */
8542 void
8543 clean_symbol_name (char *p)
8545 for (; *p; p++)
8546 if (! (ISALNUM (*p)
8547 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8548 || *p == '$'
8549 #endif
8550 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
8551 || *p == '.'
8552 #endif
8554 *p = '_';
8557 static GTY(()) unsigned anon_cnt = 0; /* Saved for PCH. */
8559 /* Create a unique anonymous identifier. The identifier is still a
8560 valid assembly label. */
8562 tree
8563 make_anon_name ()
8565 const char *fmt =
8566 #if !defined (NO_DOT_IN_LABEL)
8568 #elif !defined (NO_DOLLAR_IN_LABEL)
8570 #else
8572 #endif
8573 "_anon_%d";
8575 char buf[24];
8576 int len = snprintf (buf, sizeof (buf), fmt, anon_cnt++);
8577 gcc_checking_assert (len < int (sizeof (buf)));
8579 tree id = get_identifier_with_length (buf, len);
8580 IDENTIFIER_ANON_P (id) = true;
8582 return id;
8585 /* Generate a name for a special-purpose function.
8586 The generated name may need to be unique across the whole link.
8587 Changes to this function may also require corresponding changes to
8588 xstrdup_mask_random.
8589 TYPE is some string to identify the purpose of this function to the
8590 linker or collect2; it must start with an uppercase letter,
8591 one of:
8592 I - for constructors
8593 D - for destructors
8594 N - for C++ anonymous namespaces
8595 F - for DWARF unwind frame information. */
8597 tree
8598 get_file_function_name (const char *type)
8600 char *buf;
8601 const char *p;
8602 char *q;
8604 /* If we already have a name we know to be unique, just use that. */
8605 if (first_global_object_name)
8606 p = q = ASTRDUP (first_global_object_name);
8607 /* If the target is handling the constructors/destructors, they
8608 will be local to this file and the name is only necessary for
8609 debugging purposes.
8610 We also assign sub_I and sub_D sufixes to constructors called from
8611 the global static constructors. These are always local. */
8612 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
8613 || (startswith (type, "sub_")
8614 && (type[4] == 'I' || type[4] == 'D')))
8616 const char *file = main_input_filename;
8617 if (! file)
8618 file = LOCATION_FILE (input_location);
8619 /* Just use the file's basename, because the full pathname
8620 might be quite long. */
8621 p = q = ASTRDUP (lbasename (file));
8623 else
8625 /* Otherwise, the name must be unique across the entire link.
8626 We don't have anything that we know to be unique to this translation
8627 unit, so use what we do have and throw in some randomness. */
8628 unsigned len;
8629 const char *name = weak_global_object_name;
8630 const char *file = main_input_filename;
8632 if (! name)
8633 name = "";
8634 if (! file)
8635 file = LOCATION_FILE (input_location);
8637 len = strlen (file);
8638 q = (char *) alloca (9 + 19 + len + 1);
8639 memcpy (q, file, len + 1);
8641 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
8642 crc32_string (0, name), get_random_seed (false));
8644 p = q;
8647 clean_symbol_name (q);
8648 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
8649 + strlen (type));
8651 /* Set up the name of the file-level functions we may need.
8652 Use a global object (which is already required to be unique over
8653 the program) rather than the file name (which imposes extra
8654 constraints). */
8655 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
8657 return get_identifier (buf);
8660 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
8662 /* Complain that the tree code of NODE does not match the expected 0
8663 terminated list of trailing codes. The trailing code list can be
8664 empty, for a more vague error message. FILE, LINE, and FUNCTION
8665 are of the caller. */
8667 void
8668 tree_check_failed (const_tree node, const char *file,
8669 int line, const char *function, ...)
8671 va_list args;
8672 const char *buffer;
8673 unsigned length = 0;
8674 enum tree_code code;
8676 va_start (args, function);
8677 while ((code = (enum tree_code) va_arg (args, int)))
8678 length += 4 + strlen (get_tree_code_name (code));
8679 va_end (args);
8680 if (length)
8682 char *tmp;
8683 va_start (args, function);
8684 length += strlen ("expected ");
8685 buffer = tmp = (char *) alloca (length);
8686 length = 0;
8687 while ((code = (enum tree_code) va_arg (args, int)))
8689 const char *prefix = length ? " or " : "expected ";
8691 strcpy (tmp + length, prefix);
8692 length += strlen (prefix);
8693 strcpy (tmp + length, get_tree_code_name (code));
8694 length += strlen (get_tree_code_name (code));
8696 va_end (args);
8698 else
8699 buffer = "unexpected node";
8701 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8702 buffer, get_tree_code_name (TREE_CODE (node)),
8703 function, trim_filename (file), line);
8706 /* Complain that the tree code of NODE does match the expected 0
8707 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
8708 the caller. */
8710 void
8711 tree_not_check_failed (const_tree node, const char *file,
8712 int line, const char *function, ...)
8714 va_list args;
8715 char *buffer;
8716 unsigned length = 0;
8717 enum tree_code code;
8719 va_start (args, function);
8720 while ((code = (enum tree_code) va_arg (args, int)))
8721 length += 4 + strlen (get_tree_code_name (code));
8722 va_end (args);
8723 va_start (args, function);
8724 buffer = (char *) alloca (length);
8725 length = 0;
8726 while ((code = (enum tree_code) va_arg (args, int)))
8728 if (length)
8730 strcpy (buffer + length, " or ");
8731 length += 4;
8733 strcpy (buffer + length, get_tree_code_name (code));
8734 length += strlen (get_tree_code_name (code));
8736 va_end (args);
8738 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
8739 buffer, get_tree_code_name (TREE_CODE (node)),
8740 function, trim_filename (file), line);
8743 /* Similar to tree_check_failed, except that we check for a class of tree
8744 code, given in CL. */
8746 void
8747 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
8748 const char *file, int line, const char *function)
8750 internal_error
8751 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
8752 TREE_CODE_CLASS_STRING (cl),
8753 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
8754 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8757 /* Similar to tree_check_failed, except that instead of specifying a
8758 dozen codes, use the knowledge that they're all sequential. */
8760 void
8761 tree_range_check_failed (const_tree node, const char *file, int line,
8762 const char *function, enum tree_code c1,
8763 enum tree_code c2)
8765 char *buffer;
8766 unsigned length = 0;
8767 unsigned int c;
8769 for (c = c1; c <= c2; ++c)
8770 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
8772 length += strlen ("expected ");
8773 buffer = (char *) alloca (length);
8774 length = 0;
8776 for (c = c1; c <= c2; ++c)
8778 const char *prefix = length ? " or " : "expected ";
8780 strcpy (buffer + length, prefix);
8781 length += strlen (prefix);
8782 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
8783 length += strlen (get_tree_code_name ((enum tree_code) c));
8786 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8787 buffer, get_tree_code_name (TREE_CODE (node)),
8788 function, trim_filename (file), line);
8792 /* Similar to tree_check_failed, except that we check that a tree does
8793 not have the specified code, given in CL. */
8795 void
8796 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
8797 const char *file, int line, const char *function)
8799 internal_error
8800 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
8801 TREE_CODE_CLASS_STRING (cl),
8802 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
8803 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8807 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
8809 void
8810 omp_clause_check_failed (const_tree node, const char *file, int line,
8811 const char *function, enum omp_clause_code code)
8813 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
8814 "in %s, at %s:%d",
8815 omp_clause_code_name[code],
8816 get_tree_code_name (TREE_CODE (node)),
8817 function, trim_filename (file), line);
8821 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
8823 void
8824 omp_clause_range_check_failed (const_tree node, const char *file, int line,
8825 const char *function, enum omp_clause_code c1,
8826 enum omp_clause_code c2)
8828 char *buffer;
8829 unsigned length = 0;
8830 unsigned int c;
8832 for (c = c1; c <= c2; ++c)
8833 length += 4 + strlen (omp_clause_code_name[c]);
8835 length += strlen ("expected ");
8836 buffer = (char *) alloca (length);
8837 length = 0;
8839 for (c = c1; c <= c2; ++c)
8841 const char *prefix = length ? " or " : "expected ";
8843 strcpy (buffer + length, prefix);
8844 length += strlen (prefix);
8845 strcpy (buffer + length, omp_clause_code_name[c]);
8846 length += strlen (omp_clause_code_name[c]);
8849 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8850 buffer, omp_clause_code_name[TREE_CODE (node)],
8851 function, trim_filename (file), line);
8855 #undef DEFTREESTRUCT
8856 #define DEFTREESTRUCT(VAL, NAME) NAME,
8858 static const char *ts_enum_names[] = {
8859 #include "treestruct.def"
8861 #undef DEFTREESTRUCT
8863 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
8865 /* Similar to tree_class_check_failed, except that we check for
8866 whether CODE contains the tree structure identified by EN. */
8868 void
8869 tree_contains_struct_check_failed (const_tree node,
8870 const enum tree_node_structure_enum en,
8871 const char *file, int line,
8872 const char *function)
8874 internal_error
8875 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
8876 TS_ENUM_NAME (en),
8877 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8881 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
8882 (dynamically sized) vector. */
8884 void
8885 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
8886 const char *function)
8888 internal_error
8889 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
8890 "at %s:%d",
8891 idx + 1, len, function, trim_filename (file), line);
8894 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
8895 (dynamically sized) vector. */
8897 void
8898 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
8899 const char *function)
8901 internal_error
8902 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
8903 idx + 1, len, function, trim_filename (file), line);
8906 /* Similar to above, except that the check is for the bounds of the operand
8907 vector of an expression node EXP. */
8909 void
8910 tree_operand_check_failed (int idx, const_tree exp, const char *file,
8911 int line, const char *function)
8913 enum tree_code code = TREE_CODE (exp);
8914 internal_error
8915 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
8916 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
8917 function, trim_filename (file), line);
8920 /* Similar to above, except that the check is for the number of
8921 operands of an OMP_CLAUSE node. */
8923 void
8924 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
8925 int line, const char *function)
8927 internal_error
8928 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
8929 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
8930 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
8931 trim_filename (file), line);
8933 #endif /* ENABLE_TREE_CHECKING */
8935 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
8936 and mapped to the machine mode MODE. Initialize its fields and build
8937 the information necessary for debugging output. */
8939 static tree
8940 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
8942 tree t;
8943 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
8945 t = make_node (VECTOR_TYPE);
8946 TREE_TYPE (t) = mv_innertype;
8947 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
8948 SET_TYPE_MODE (t, mode);
8950 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
8951 SET_TYPE_STRUCTURAL_EQUALITY (t);
8952 else if ((TYPE_CANONICAL (mv_innertype) != innertype
8953 || mode != VOIDmode)
8954 && !VECTOR_BOOLEAN_TYPE_P (t))
8955 TYPE_CANONICAL (t)
8956 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
8958 layout_type (t);
8960 hashval_t hash = type_hash_canon_hash (t);
8961 t = type_hash_canon (hash, t);
8963 /* We have built a main variant, based on the main variant of the
8964 inner type. Use it to build the variant we return. */
8965 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
8966 && TREE_TYPE (t) != innertype)
8967 return build_type_attribute_qual_variant (t,
8968 TYPE_ATTRIBUTES (innertype),
8969 TYPE_QUALS (innertype));
8971 return t;
8974 static tree
8975 make_or_reuse_type (unsigned size, int unsignedp)
8977 int i;
8979 if (size == INT_TYPE_SIZE)
8980 return unsignedp ? unsigned_type_node : integer_type_node;
8981 if (size == CHAR_TYPE_SIZE)
8982 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
8983 if (size == SHORT_TYPE_SIZE)
8984 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
8985 if (size == LONG_TYPE_SIZE)
8986 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
8987 if (size == LONG_LONG_TYPE_SIZE)
8988 return (unsignedp ? long_long_unsigned_type_node
8989 : long_long_integer_type_node);
8991 for (i = 0; i < NUM_INT_N_ENTS; i ++)
8992 if (size == int_n_data[i].bitsize
8993 && int_n_enabled_p[i])
8994 return (unsignedp ? int_n_trees[i].unsigned_type
8995 : int_n_trees[i].signed_type);
8997 if (unsignedp)
8998 return make_unsigned_type (size);
8999 else
9000 return make_signed_type (size);
9003 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9005 static tree
9006 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9008 if (satp)
9010 if (size == SHORT_FRACT_TYPE_SIZE)
9011 return unsignedp ? sat_unsigned_short_fract_type_node
9012 : sat_short_fract_type_node;
9013 if (size == FRACT_TYPE_SIZE)
9014 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9015 if (size == LONG_FRACT_TYPE_SIZE)
9016 return unsignedp ? sat_unsigned_long_fract_type_node
9017 : sat_long_fract_type_node;
9018 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9019 return unsignedp ? sat_unsigned_long_long_fract_type_node
9020 : sat_long_long_fract_type_node;
9022 else
9024 if (size == SHORT_FRACT_TYPE_SIZE)
9025 return unsignedp ? unsigned_short_fract_type_node
9026 : short_fract_type_node;
9027 if (size == FRACT_TYPE_SIZE)
9028 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9029 if (size == LONG_FRACT_TYPE_SIZE)
9030 return unsignedp ? unsigned_long_fract_type_node
9031 : long_fract_type_node;
9032 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9033 return unsignedp ? unsigned_long_long_fract_type_node
9034 : long_long_fract_type_node;
9037 return make_fract_type (size, unsignedp, satp);
9040 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9042 static tree
9043 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9045 if (satp)
9047 if (size == SHORT_ACCUM_TYPE_SIZE)
9048 return unsignedp ? sat_unsigned_short_accum_type_node
9049 : sat_short_accum_type_node;
9050 if (size == ACCUM_TYPE_SIZE)
9051 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9052 if (size == LONG_ACCUM_TYPE_SIZE)
9053 return unsignedp ? sat_unsigned_long_accum_type_node
9054 : sat_long_accum_type_node;
9055 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9056 return unsignedp ? sat_unsigned_long_long_accum_type_node
9057 : sat_long_long_accum_type_node;
9059 else
9061 if (size == SHORT_ACCUM_TYPE_SIZE)
9062 return unsignedp ? unsigned_short_accum_type_node
9063 : short_accum_type_node;
9064 if (size == ACCUM_TYPE_SIZE)
9065 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9066 if (size == LONG_ACCUM_TYPE_SIZE)
9067 return unsignedp ? unsigned_long_accum_type_node
9068 : long_accum_type_node;
9069 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9070 return unsignedp ? unsigned_long_long_accum_type_node
9071 : long_long_accum_type_node;
9074 return make_accum_type (size, unsignedp, satp);
9078 /* Create an atomic variant node for TYPE. This routine is called
9079 during initialization of data types to create the 5 basic atomic
9080 types. The generic build_variant_type function requires these to
9081 already be set up in order to function properly, so cannot be
9082 called from there. If ALIGN is non-zero, then ensure alignment is
9083 overridden to this value. */
9085 static tree
9086 build_atomic_base (tree type, unsigned int align)
9088 tree t;
9090 /* Make sure its not already registered. */
9091 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9092 return t;
9094 t = build_variant_type_copy (type);
9095 set_type_quals (t, TYPE_QUAL_ATOMIC);
9097 if (align)
9098 SET_TYPE_ALIGN (t, align);
9100 return t;
9103 /* Information about the _FloatN and _FloatNx types. This must be in
9104 the same order as the corresponding TI_* enum values. */
9105 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
9107 { 16, false },
9108 { 32, false },
9109 { 64, false },
9110 { 128, false },
9111 { 32, true },
9112 { 64, true },
9113 { 128, true },
9117 /* Create nodes for all integer types (and error_mark_node) using the sizes
9118 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9120 void
9121 build_common_tree_nodes (bool signed_char)
9123 int i;
9125 error_mark_node = make_node (ERROR_MARK);
9126 TREE_TYPE (error_mark_node) = error_mark_node;
9128 initialize_sizetypes ();
9130 /* Define both `signed char' and `unsigned char'. */
9131 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9132 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9133 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9134 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9136 /* Define `char', which is like either `signed char' or `unsigned char'
9137 but not the same as either. */
9138 char_type_node
9139 = (signed_char
9140 ? make_signed_type (CHAR_TYPE_SIZE)
9141 : make_unsigned_type (CHAR_TYPE_SIZE));
9142 TYPE_STRING_FLAG (char_type_node) = 1;
9144 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9145 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9146 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9147 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9148 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9149 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9150 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9151 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9153 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9155 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9156 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9158 if (int_n_enabled_p[i])
9160 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9161 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9165 /* Define a boolean type. This type only represents boolean values but
9166 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9167 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9168 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9169 TYPE_PRECISION (boolean_type_node) = 1;
9170 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9172 /* Define what type to use for size_t. */
9173 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9174 size_type_node = unsigned_type_node;
9175 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9176 size_type_node = long_unsigned_type_node;
9177 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9178 size_type_node = long_long_unsigned_type_node;
9179 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9180 size_type_node = short_unsigned_type_node;
9181 else
9183 int i;
9185 size_type_node = NULL_TREE;
9186 for (i = 0; i < NUM_INT_N_ENTS; i++)
9187 if (int_n_enabled_p[i])
9189 char name[50], altname[50];
9190 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9191 sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
9193 if (strcmp (name, SIZE_TYPE) == 0
9194 || strcmp (altname, SIZE_TYPE) == 0)
9196 size_type_node = int_n_trees[i].unsigned_type;
9199 if (size_type_node == NULL_TREE)
9200 gcc_unreachable ();
9203 /* Define what type to use for ptrdiff_t. */
9204 if (strcmp (PTRDIFF_TYPE, "int") == 0)
9205 ptrdiff_type_node = integer_type_node;
9206 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
9207 ptrdiff_type_node = long_integer_type_node;
9208 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
9209 ptrdiff_type_node = long_long_integer_type_node;
9210 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
9211 ptrdiff_type_node = short_integer_type_node;
9212 else
9214 ptrdiff_type_node = NULL_TREE;
9215 for (int i = 0; i < NUM_INT_N_ENTS; i++)
9216 if (int_n_enabled_p[i])
9218 char name[50], altname[50];
9219 sprintf (name, "__int%d", int_n_data[i].bitsize);
9220 sprintf (altname, "__int%d__", int_n_data[i].bitsize);
9222 if (strcmp (name, PTRDIFF_TYPE) == 0
9223 || strcmp (altname, PTRDIFF_TYPE) == 0)
9224 ptrdiff_type_node = int_n_trees[i].signed_type;
9226 if (ptrdiff_type_node == NULL_TREE)
9227 gcc_unreachable ();
9230 /* Fill in the rest of the sized types. Reuse existing type nodes
9231 when possible. */
9232 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9233 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9234 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9235 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9236 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9238 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9239 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9240 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9241 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9242 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9244 /* Don't call build_qualified type for atomics. That routine does
9245 special processing for atomics, and until they are initialized
9246 it's better not to make that call.
9248 Check to see if there is a target override for atomic types. */
9250 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9251 targetm.atomic_align_for_mode (QImode));
9252 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9253 targetm.atomic_align_for_mode (HImode));
9254 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9255 targetm.atomic_align_for_mode (SImode));
9256 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9257 targetm.atomic_align_for_mode (DImode));
9258 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9259 targetm.atomic_align_for_mode (TImode));
9261 access_public_node = get_identifier ("public");
9262 access_protected_node = get_identifier ("protected");
9263 access_private_node = get_identifier ("private");
9265 /* Define these next since types below may used them. */
9266 integer_zero_node = build_int_cst (integer_type_node, 0);
9267 integer_one_node = build_int_cst (integer_type_node, 1);
9268 integer_three_node = build_int_cst (integer_type_node, 3);
9269 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9271 size_zero_node = size_int (0);
9272 size_one_node = size_int (1);
9273 bitsize_zero_node = bitsize_int (0);
9274 bitsize_one_node = bitsize_int (1);
9275 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9277 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9278 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9280 void_type_node = make_node (VOID_TYPE);
9281 layout_type (void_type_node);
9283 /* We are not going to have real types in C with less than byte alignment,
9284 so we might as well not have any types that claim to have it. */
9285 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
9286 TYPE_USER_ALIGN (void_type_node) = 0;
9288 void_node = make_node (VOID_CST);
9289 TREE_TYPE (void_node) = void_type_node;
9291 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9292 layout_type (TREE_TYPE (null_pointer_node));
9294 ptr_type_node = build_pointer_type (void_type_node);
9295 const_ptr_type_node
9296 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9297 for (unsigned i = 0;
9298 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
9299 ++i)
9300 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
9302 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9304 float_type_node = make_node (REAL_TYPE);
9305 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9306 layout_type (float_type_node);
9308 double_type_node = make_node (REAL_TYPE);
9309 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9310 layout_type (double_type_node);
9312 long_double_type_node = make_node (REAL_TYPE);
9313 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9314 layout_type (long_double_type_node);
9316 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9318 int n = floatn_nx_types[i].n;
9319 bool extended = floatn_nx_types[i].extended;
9320 scalar_float_mode mode;
9321 if (!targetm.floatn_mode (n, extended).exists (&mode))
9322 continue;
9323 int precision = GET_MODE_PRECISION (mode);
9324 /* Work around the rs6000 KFmode having precision 113 not
9325 128. */
9326 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
9327 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
9328 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
9329 if (!extended)
9330 gcc_assert (min_precision == n);
9331 if (precision < min_precision)
9332 precision = min_precision;
9333 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
9334 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
9335 layout_type (FLOATN_NX_TYPE_NODE (i));
9336 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
9339 float_ptr_type_node = build_pointer_type (float_type_node);
9340 double_ptr_type_node = build_pointer_type (double_type_node);
9341 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9342 integer_ptr_type_node = build_pointer_type (integer_type_node);
9344 /* Fixed size integer types. */
9345 uint16_type_node = make_or_reuse_type (16, 1);
9346 uint32_type_node = make_or_reuse_type (32, 1);
9347 uint64_type_node = make_or_reuse_type (64, 1);
9348 if (targetm.scalar_mode_supported_p (TImode))
9349 uint128_type_node = make_or_reuse_type (128, 1);
9351 /* Decimal float types. */
9352 if (targetm.decimal_float_supported_p ())
9354 dfloat32_type_node = make_node (REAL_TYPE);
9355 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9356 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9357 layout_type (dfloat32_type_node);
9359 dfloat64_type_node = make_node (REAL_TYPE);
9360 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9361 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9362 layout_type (dfloat64_type_node);
9364 dfloat128_type_node = make_node (REAL_TYPE);
9365 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9366 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9367 layout_type (dfloat128_type_node);
9370 complex_integer_type_node = build_complex_type (integer_type_node, true);
9371 complex_float_type_node = build_complex_type (float_type_node, true);
9372 complex_double_type_node = build_complex_type (double_type_node, true);
9373 complex_long_double_type_node = build_complex_type (long_double_type_node,
9374 true);
9376 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9378 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
9379 COMPLEX_FLOATN_NX_TYPE_NODE (i)
9380 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
9383 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9384 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9385 sat_ ## KIND ## _type_node = \
9386 make_sat_signed_ ## KIND ## _type (SIZE); \
9387 sat_unsigned_ ## KIND ## _type_node = \
9388 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9389 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9390 unsigned_ ## KIND ## _type_node = \
9391 make_unsigned_ ## KIND ## _type (SIZE);
9393 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9394 sat_ ## WIDTH ## KIND ## _type_node = \
9395 make_sat_signed_ ## KIND ## _type (SIZE); \
9396 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9397 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9398 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9399 unsigned_ ## WIDTH ## KIND ## _type_node = \
9400 make_unsigned_ ## KIND ## _type (SIZE);
9402 /* Make fixed-point type nodes based on four different widths. */
9403 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9404 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9405 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9406 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9407 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9409 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9410 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9411 NAME ## _type_node = \
9412 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9413 u ## NAME ## _type_node = \
9414 make_or_reuse_unsigned_ ## KIND ## _type \
9415 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9416 sat_ ## NAME ## _type_node = \
9417 make_or_reuse_sat_signed_ ## KIND ## _type \
9418 (GET_MODE_BITSIZE (MODE ## mode)); \
9419 sat_u ## NAME ## _type_node = \
9420 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9421 (GET_MODE_BITSIZE (U ## MODE ## mode));
9423 /* Fixed-point type and mode nodes. */
9424 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9425 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9426 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9427 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9428 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9429 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9430 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9431 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9432 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9433 MAKE_FIXED_MODE_NODE (accum, da, DA)
9434 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9437 tree t = targetm.build_builtin_va_list ();
9439 /* Many back-ends define record types without setting TYPE_NAME.
9440 If we copied the record type here, we'd keep the original
9441 record type without a name. This breaks name mangling. So,
9442 don't copy record types and let c_common_nodes_and_builtins()
9443 declare the type to be __builtin_va_list. */
9444 if (TREE_CODE (t) != RECORD_TYPE)
9445 t = build_variant_type_copy (t);
9447 va_list_type_node = t;
9450 /* SCEV analyzer global shared trees. */
9451 chrec_dont_know = make_node (SCEV_NOT_KNOWN);
9452 TREE_TYPE (chrec_dont_know) = void_type_node;
9453 chrec_known = make_node (SCEV_KNOWN);
9454 TREE_TYPE (chrec_known) = void_type_node;
9457 /* Modify DECL for given flags.
9458 TM_PURE attribute is set only on types, so the function will modify
9459 DECL's type when ECF_TM_PURE is used. */
9461 void
9462 set_call_expr_flags (tree decl, int flags)
9464 if (flags & ECF_NOTHROW)
9465 TREE_NOTHROW (decl) = 1;
9466 if (flags & ECF_CONST)
9467 TREE_READONLY (decl) = 1;
9468 if (flags & ECF_PURE)
9469 DECL_PURE_P (decl) = 1;
9470 if (flags & ECF_LOOPING_CONST_OR_PURE)
9471 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9472 if (flags & ECF_NOVOPS)
9473 DECL_IS_NOVOPS (decl) = 1;
9474 if (flags & ECF_NORETURN)
9475 TREE_THIS_VOLATILE (decl) = 1;
9476 if (flags & ECF_MALLOC)
9477 DECL_IS_MALLOC (decl) = 1;
9478 if (flags & ECF_RETURNS_TWICE)
9479 DECL_IS_RETURNS_TWICE (decl) = 1;
9480 if (flags & ECF_LEAF)
9481 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9482 NULL, DECL_ATTRIBUTES (decl));
9483 if (flags & ECF_COLD)
9484 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
9485 NULL, DECL_ATTRIBUTES (decl));
9486 if (flags & ECF_RET1)
9487 DECL_ATTRIBUTES (decl)
9488 = tree_cons (get_identifier ("fn spec"),
9489 build_tree_list (NULL_TREE, build_string (2, "1 ")),
9490 DECL_ATTRIBUTES (decl));
9491 if ((flags & ECF_TM_PURE) && flag_tm)
9492 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9493 /* Looping const or pure is implied by noreturn.
9494 There is currently no way to declare looping const or looping pure alone. */
9495 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9496 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9500 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9502 static void
9503 local_define_builtin (const char *name, tree type, enum built_in_function code,
9504 const char *library_name, int ecf_flags)
9506 tree decl;
9508 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9509 library_name, NULL_TREE);
9510 set_call_expr_flags (decl, ecf_flags);
9512 set_builtin_decl (code, decl, true);
9515 /* Call this function after instantiating all builtins that the language
9516 front end cares about. This will build the rest of the builtins
9517 and internal functions that are relied upon by the tree optimizers and
9518 the middle-end. */
9520 void
9521 build_common_builtin_nodes (void)
9523 tree tmp, ftype;
9524 int ecf_flags;
9526 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_PADDING))
9528 ftype = build_function_type_list (void_type_node,
9529 ptr_type_node,
9530 ptr_type_node,
9531 integer_type_node,
9532 NULL_TREE);
9533 local_define_builtin ("__builtin_clear_padding", ftype,
9534 BUILT_IN_CLEAR_PADDING,
9535 "__builtin_clear_padding",
9536 ECF_LEAF | ECF_NOTHROW);
9539 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
9540 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
9542 ftype = build_function_type (void_type_node, void_list_node);
9543 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9544 local_define_builtin ("__builtin_unreachable", ftype,
9545 BUILT_IN_UNREACHABLE,
9546 "__builtin_unreachable",
9547 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9548 | ECF_CONST | ECF_COLD);
9549 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
9550 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
9551 "abort",
9552 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
9555 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9556 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9558 ftype = build_function_type_list (ptr_type_node,
9559 ptr_type_node, const_ptr_type_node,
9560 size_type_node, NULL_TREE);
9562 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9563 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9564 "memcpy", ECF_NOTHROW | ECF_LEAF);
9565 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9566 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9567 "memmove", ECF_NOTHROW | ECF_LEAF);
9570 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9572 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9573 const_ptr_type_node, size_type_node,
9574 NULL_TREE);
9575 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9576 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9579 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9581 ftype = build_function_type_list (ptr_type_node,
9582 ptr_type_node, integer_type_node,
9583 size_type_node, NULL_TREE);
9584 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9585 "memset", ECF_NOTHROW | ECF_LEAF);
9588 /* If we're checking the stack, `alloca' can throw. */
9589 const int alloca_flags
9590 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
9592 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9594 ftype = build_function_type_list (ptr_type_node,
9595 size_type_node, NULL_TREE);
9596 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9597 "alloca", alloca_flags);
9600 ftype = build_function_type_list (ptr_type_node, size_type_node,
9601 size_type_node, NULL_TREE);
9602 local_define_builtin ("__builtin_alloca_with_align", ftype,
9603 BUILT_IN_ALLOCA_WITH_ALIGN,
9604 "__builtin_alloca_with_align",
9605 alloca_flags);
9607 ftype = build_function_type_list (ptr_type_node, size_type_node,
9608 size_type_node, size_type_node, NULL_TREE);
9609 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
9610 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
9611 "__builtin_alloca_with_align_and_max",
9612 alloca_flags);
9614 ftype = build_function_type_list (void_type_node,
9615 ptr_type_node, ptr_type_node,
9616 ptr_type_node, NULL_TREE);
9617 local_define_builtin ("__builtin_init_trampoline", ftype,
9618 BUILT_IN_INIT_TRAMPOLINE,
9619 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9620 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9621 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9622 "__builtin_init_heap_trampoline",
9623 ECF_NOTHROW | ECF_LEAF);
9624 local_define_builtin ("__builtin_init_descriptor", ftype,
9625 BUILT_IN_INIT_DESCRIPTOR,
9626 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
9628 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9629 local_define_builtin ("__builtin_adjust_trampoline", ftype,
9630 BUILT_IN_ADJUST_TRAMPOLINE,
9631 "__builtin_adjust_trampoline",
9632 ECF_CONST | ECF_NOTHROW);
9633 local_define_builtin ("__builtin_adjust_descriptor", ftype,
9634 BUILT_IN_ADJUST_DESCRIPTOR,
9635 "__builtin_adjust_descriptor",
9636 ECF_CONST | ECF_NOTHROW);
9638 ftype = build_function_type_list (void_type_node,
9639 ptr_type_node, ptr_type_node, NULL_TREE);
9640 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_CACHE))
9641 local_define_builtin ("__builtin___clear_cache", ftype,
9642 BUILT_IN_CLEAR_CACHE,
9643 "__clear_cache",
9644 ECF_NOTHROW);
9646 local_define_builtin ("__builtin_nonlocal_goto", ftype,
9647 BUILT_IN_NONLOCAL_GOTO,
9648 "__builtin_nonlocal_goto",
9649 ECF_NORETURN | ECF_NOTHROW);
9651 ftype = build_function_type_list (void_type_node,
9652 ptr_type_node, ptr_type_node, NULL_TREE);
9653 local_define_builtin ("__builtin_setjmp_setup", ftype,
9654 BUILT_IN_SETJMP_SETUP,
9655 "__builtin_setjmp_setup", ECF_NOTHROW);
9657 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9658 local_define_builtin ("__builtin_setjmp_receiver", ftype,
9659 BUILT_IN_SETJMP_RECEIVER,
9660 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
9662 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
9663 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
9664 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
9666 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9667 local_define_builtin ("__builtin_stack_restore", ftype,
9668 BUILT_IN_STACK_RESTORE,
9669 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
9671 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9672 const_ptr_type_node, size_type_node,
9673 NULL_TREE);
9674 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
9675 "__builtin_memcmp_eq",
9676 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9678 local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
9679 "__builtin_strncmp_eq",
9680 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9682 local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
9683 "__builtin_strcmp_eq",
9684 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9686 /* If there's a possibility that we might use the ARM EABI, build the
9687 alternate __cxa_end_cleanup node used to resume from C++. */
9688 if (targetm.arm_eabi_unwinder)
9690 ftype = build_function_type_list (void_type_node, NULL_TREE);
9691 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
9692 BUILT_IN_CXA_END_CLEANUP,
9693 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
9696 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9697 local_define_builtin ("__builtin_unwind_resume", ftype,
9698 BUILT_IN_UNWIND_RESUME,
9699 ((targetm_common.except_unwind_info (&global_options)
9700 == UI_SJLJ)
9701 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
9702 ECF_NORETURN);
9704 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
9706 ftype = build_function_type_list (ptr_type_node, integer_type_node,
9707 NULL_TREE);
9708 local_define_builtin ("__builtin_return_address", ftype,
9709 BUILT_IN_RETURN_ADDRESS,
9710 "__builtin_return_address",
9711 ECF_NOTHROW);
9714 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
9715 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9717 ftype = build_function_type_list (void_type_node, ptr_type_node,
9718 ptr_type_node, NULL_TREE);
9719 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
9720 local_define_builtin ("__cyg_profile_func_enter", ftype,
9721 BUILT_IN_PROFILE_FUNC_ENTER,
9722 "__cyg_profile_func_enter", 0);
9723 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9724 local_define_builtin ("__cyg_profile_func_exit", ftype,
9725 BUILT_IN_PROFILE_FUNC_EXIT,
9726 "__cyg_profile_func_exit", 0);
9729 /* The exception object and filter values from the runtime. The argument
9730 must be zero before exception lowering, i.e. from the front end. After
9731 exception lowering, it will be the region number for the exception
9732 landing pad. These functions are PURE instead of CONST to prevent
9733 them from being hoisted past the exception edge that will initialize
9734 its value in the landing pad. */
9735 ftype = build_function_type_list (ptr_type_node,
9736 integer_type_node, NULL_TREE);
9737 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
9738 /* Only use TM_PURE if we have TM language support. */
9739 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
9740 ecf_flags |= ECF_TM_PURE;
9741 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
9742 "__builtin_eh_pointer", ecf_flags);
9744 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
9745 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
9746 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
9747 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9749 ftype = build_function_type_list (void_type_node,
9750 integer_type_node, integer_type_node,
9751 NULL_TREE);
9752 local_define_builtin ("__builtin_eh_copy_values", ftype,
9753 BUILT_IN_EH_COPY_VALUES,
9754 "__builtin_eh_copy_values", ECF_NOTHROW);
9756 /* Complex multiplication and division. These are handled as builtins
9757 rather than optabs because emit_library_call_value doesn't support
9758 complex. Further, we can do slightly better with folding these
9759 beasties if the real and complex parts of the arguments are separate. */
9761 int mode;
9763 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
9765 char mode_name_buf[4], *q;
9766 const char *p;
9767 enum built_in_function mcode, dcode;
9768 tree type, inner_type;
9769 const char *prefix = "__";
9771 if (targetm.libfunc_gnu_prefix)
9772 prefix = "__gnu_";
9774 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
9775 if (type == NULL)
9776 continue;
9777 inner_type = TREE_TYPE (type);
9779 ftype = build_function_type_list (type, inner_type, inner_type,
9780 inner_type, inner_type, NULL_TREE);
9782 mcode = ((enum built_in_function)
9783 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
9784 dcode = ((enum built_in_function)
9785 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
9787 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
9788 *q = TOLOWER (*p);
9789 *q = '\0';
9791 /* For -ftrapping-math these should throw from a former
9792 -fnon-call-exception stmt. */
9793 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
9794 NULL);
9795 local_define_builtin (built_in_names[mcode], ftype, mcode,
9796 built_in_names[mcode],
9797 ECF_CONST | ECF_LEAF);
9799 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
9800 NULL);
9801 local_define_builtin (built_in_names[dcode], ftype, dcode,
9802 built_in_names[dcode],
9803 ECF_CONST | ECF_LEAF);
9807 init_internal_fns ();
9810 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
9811 better way.
9813 If we requested a pointer to a vector, build up the pointers that
9814 we stripped off while looking for the inner type. Similarly for
9815 return values from functions.
9817 The argument TYPE is the top of the chain, and BOTTOM is the
9818 new type which we will point to. */
9820 tree
9821 reconstruct_complex_type (tree type, tree bottom)
9823 tree inner, outer;
9825 if (TREE_CODE (type) == POINTER_TYPE)
9827 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9828 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
9829 TYPE_REF_CAN_ALIAS_ALL (type));
9831 else if (TREE_CODE (type) == REFERENCE_TYPE)
9833 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9834 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
9835 TYPE_REF_CAN_ALIAS_ALL (type));
9837 else if (TREE_CODE (type) == ARRAY_TYPE)
9839 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9840 outer = build_array_type (inner, TYPE_DOMAIN (type));
9842 else if (TREE_CODE (type) == FUNCTION_TYPE)
9844 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9845 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
9847 else if (TREE_CODE (type) == METHOD_TYPE)
9849 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9850 /* The build_method_type_directly() routine prepends 'this' to argument list,
9851 so we must compensate by getting rid of it. */
9852 outer
9853 = build_method_type_directly
9854 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
9855 inner,
9856 TREE_CHAIN (TYPE_ARG_TYPES (type)));
9858 else if (TREE_CODE (type) == OFFSET_TYPE)
9860 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9861 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
9863 else
9864 return bottom;
9866 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
9867 TYPE_QUALS (type));
9870 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
9871 the inner type. */
9872 tree
9873 build_vector_type_for_mode (tree innertype, machine_mode mode)
9875 poly_int64 nunits;
9876 unsigned int bitsize;
9878 switch (GET_MODE_CLASS (mode))
9880 case MODE_VECTOR_BOOL:
9881 case MODE_VECTOR_INT:
9882 case MODE_VECTOR_FLOAT:
9883 case MODE_VECTOR_FRACT:
9884 case MODE_VECTOR_UFRACT:
9885 case MODE_VECTOR_ACCUM:
9886 case MODE_VECTOR_UACCUM:
9887 nunits = GET_MODE_NUNITS (mode);
9888 break;
9890 case MODE_INT:
9891 /* Check that there are no leftover bits. */
9892 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
9893 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
9894 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
9895 break;
9897 default:
9898 gcc_unreachable ();
9901 return make_vector_type (innertype, nunits, mode);
9904 /* Similarly, but takes the inner type and number of units, which must be
9905 a power of two. */
9907 tree
9908 build_vector_type (tree innertype, poly_int64 nunits)
9910 return make_vector_type (innertype, nunits, VOIDmode);
9913 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE. */
9915 tree
9916 build_truth_vector_type_for_mode (poly_uint64 nunits, machine_mode mask_mode)
9918 gcc_assert (mask_mode != BLKmode);
9920 unsigned HOST_WIDE_INT esize;
9921 if (VECTOR_MODE_P (mask_mode))
9923 poly_uint64 vsize = GET_MODE_BITSIZE (mask_mode);
9924 esize = vector_element_size (vsize, nunits);
9926 else
9927 esize = 1;
9929 tree bool_type = build_nonstandard_boolean_type (esize);
9931 return make_vector_type (bool_type, nunits, mask_mode);
9934 /* Build a vector type that holds one boolean result for each element of
9935 vector type VECTYPE. The public interface for this operation is
9936 truth_type_for. */
9938 static tree
9939 build_truth_vector_type_for (tree vectype)
9941 machine_mode vector_mode = TYPE_MODE (vectype);
9942 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
9944 machine_mode mask_mode;
9945 if (VECTOR_MODE_P (vector_mode)
9946 && targetm.vectorize.get_mask_mode (vector_mode).exists (&mask_mode))
9947 return build_truth_vector_type_for_mode (nunits, mask_mode);
9949 poly_uint64 vsize = tree_to_poly_uint64 (TYPE_SIZE (vectype));
9950 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
9951 tree bool_type = build_nonstandard_boolean_type (esize);
9953 return make_vector_type (bool_type, nunits, VOIDmode);
9956 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
9957 set. */
9959 tree
9960 build_opaque_vector_type (tree innertype, poly_int64 nunits)
9962 tree t = make_vector_type (innertype, nunits, VOIDmode);
9963 tree cand;
9964 /* We always build the non-opaque variant before the opaque one,
9965 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
9966 cand = TYPE_NEXT_VARIANT (t);
9967 if (cand
9968 && TYPE_VECTOR_OPAQUE (cand)
9969 && check_qualified_type (cand, t, TYPE_QUALS (t)))
9970 return cand;
9971 /* Othewise build a variant type and make sure to queue it after
9972 the non-opaque type. */
9973 cand = build_distinct_type_copy (t);
9974 TYPE_VECTOR_OPAQUE (cand) = true;
9975 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
9976 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
9977 TYPE_NEXT_VARIANT (t) = cand;
9978 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
9979 return cand;
9982 /* Return the value of element I of VECTOR_CST T as a wide_int. */
9984 static poly_wide_int
9985 vector_cst_int_elt (const_tree t, unsigned int i)
9987 /* First handle elements that are directly encoded. */
9988 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
9989 if (i < encoded_nelts)
9990 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, i));
9992 /* Identify the pattern that contains element I and work out the index of
9993 the last encoded element for that pattern. */
9994 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
9995 unsigned int pattern = i % npatterns;
9996 unsigned int count = i / npatterns;
9997 unsigned int final_i = encoded_nelts - npatterns + pattern;
9999 /* If there are no steps, the final encoded value is the right one. */
10000 if (!VECTOR_CST_STEPPED_P (t))
10001 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
10003 /* Otherwise work out the value from the last two encoded elements. */
10004 tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
10005 tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
10006 poly_wide_int diff = wi::to_poly_wide (v2) - wi::to_poly_wide (v1);
10007 return wi::to_poly_wide (v2) + (count - 2) * diff;
10010 /* Return the value of element I of VECTOR_CST T. */
10012 tree
10013 vector_cst_elt (const_tree t, unsigned int i)
10015 /* First handle elements that are directly encoded. */
10016 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10017 if (i < encoded_nelts)
10018 return VECTOR_CST_ENCODED_ELT (t, i);
10020 /* If there are no steps, the final encoded value is the right one. */
10021 if (!VECTOR_CST_STEPPED_P (t))
10023 /* Identify the pattern that contains element I and work out the index of
10024 the last encoded element for that pattern. */
10025 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10026 unsigned int pattern = i % npatterns;
10027 unsigned int final_i = encoded_nelts - npatterns + pattern;
10028 return VECTOR_CST_ENCODED_ELT (t, final_i);
10031 /* Otherwise work out the value from the last two encoded elements. */
10032 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
10033 vector_cst_int_elt (t, i));
10036 /* Given an initializer INIT, return TRUE if INIT is zero or some
10037 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
10038 null, set *NONZERO if and only if INIT is known not to be all
10039 zeros. The combination of return value of false and *NONZERO
10040 false implies that INIT may but need not be all zeros. Other
10041 combinations indicate definitive answers. */
10043 bool
10044 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
10046 bool dummy;
10047 if (!nonzero)
10048 nonzero = &dummy;
10050 /* Conservatively clear NONZERO and set it only if INIT is definitely
10051 not all zero. */
10052 *nonzero = false;
10054 STRIP_NOPS (init);
10056 unsigned HOST_WIDE_INT off = 0;
10058 switch (TREE_CODE (init))
10060 case INTEGER_CST:
10061 if (integer_zerop (init))
10062 return true;
10064 *nonzero = true;
10065 return false;
10067 case REAL_CST:
10068 /* ??? Note that this is not correct for C4X float formats. There,
10069 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10070 negative exponent. */
10071 if (real_zerop (init)
10072 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
10073 return true;
10075 *nonzero = true;
10076 return false;
10078 case FIXED_CST:
10079 if (fixed_zerop (init))
10080 return true;
10082 *nonzero = true;
10083 return false;
10085 case COMPLEX_CST:
10086 if (integer_zerop (init)
10087 || (real_zerop (init)
10088 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10089 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
10090 return true;
10092 *nonzero = true;
10093 return false;
10095 case VECTOR_CST:
10096 if (VECTOR_CST_NPATTERNS (init) == 1
10097 && VECTOR_CST_DUPLICATE_P (init)
10098 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
10099 return true;
10101 *nonzero = true;
10102 return false;
10104 case CONSTRUCTOR:
10106 if (TREE_CLOBBER_P (init))
10107 return false;
10109 unsigned HOST_WIDE_INT idx;
10110 tree elt;
10112 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10113 if (!initializer_zerop (elt, nonzero))
10114 return false;
10116 return true;
10119 case MEM_REF:
10121 tree arg = TREE_OPERAND (init, 0);
10122 if (TREE_CODE (arg) != ADDR_EXPR)
10123 return false;
10124 tree offset = TREE_OPERAND (init, 1);
10125 if (TREE_CODE (offset) != INTEGER_CST
10126 || !tree_fits_uhwi_p (offset))
10127 return false;
10128 off = tree_to_uhwi (offset);
10129 if (INT_MAX < off)
10130 return false;
10131 arg = TREE_OPERAND (arg, 0);
10132 if (TREE_CODE (arg) != STRING_CST)
10133 return false;
10134 init = arg;
10136 /* Fall through. */
10138 case STRING_CST:
10140 gcc_assert (off <= INT_MAX);
10142 int i = off;
10143 int n = TREE_STRING_LENGTH (init);
10144 if (n <= i)
10145 return false;
10147 /* We need to loop through all elements to handle cases like
10148 "\0" and "\0foobar". */
10149 for (i = 0; i < n; ++i)
10150 if (TREE_STRING_POINTER (init)[i] != '\0')
10152 *nonzero = true;
10153 return false;
10156 return true;
10159 default:
10160 return false;
10164 /* Return true if EXPR is an initializer expression in which every element
10165 is a constant that is numerically equal to 0 or 1. The elements do not
10166 need to be equal to each other. */
10168 bool
10169 initializer_each_zero_or_onep (const_tree expr)
10171 STRIP_ANY_LOCATION_WRAPPER (expr);
10173 switch (TREE_CODE (expr))
10175 case INTEGER_CST:
10176 return integer_zerop (expr) || integer_onep (expr);
10178 case REAL_CST:
10179 return real_zerop (expr) || real_onep (expr);
10181 case VECTOR_CST:
10183 unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (expr);
10184 if (VECTOR_CST_STEPPED_P (expr)
10185 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)).is_constant (&nelts))
10186 return false;
10188 for (unsigned int i = 0; i < nelts; ++i)
10190 tree elt = vector_cst_elt (expr, i);
10191 if (!initializer_each_zero_or_onep (elt))
10192 return false;
10195 return true;
10198 default:
10199 return false;
10203 /* Check if vector VEC consists of all the equal elements and
10204 that the number of elements corresponds to the type of VEC.
10205 The function returns first element of the vector
10206 or NULL_TREE if the vector is not uniform. */
10207 tree
10208 uniform_vector_p (const_tree vec)
10210 tree first, t;
10211 unsigned HOST_WIDE_INT i, nelts;
10213 if (vec == NULL_TREE)
10214 return NULL_TREE;
10216 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10218 if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
10219 return TREE_OPERAND (vec, 0);
10221 else if (TREE_CODE (vec) == VECTOR_CST)
10223 if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
10224 return VECTOR_CST_ENCODED_ELT (vec, 0);
10225 return NULL_TREE;
10228 else if (TREE_CODE (vec) == CONSTRUCTOR
10229 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
10231 first = error_mark_node;
10233 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10235 if (i == 0)
10237 first = t;
10238 continue;
10240 if (!operand_equal_p (first, t, 0))
10241 return NULL_TREE;
10243 if (i != nelts)
10244 return NULL_TREE;
10246 return first;
10249 return NULL_TREE;
10252 /* If the argument is INTEGER_CST, return it. If the argument is vector
10253 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
10254 return NULL_TREE.
10255 Look through location wrappers. */
10257 tree
10258 uniform_integer_cst_p (tree t)
10260 STRIP_ANY_LOCATION_WRAPPER (t);
10262 if (TREE_CODE (t) == INTEGER_CST)
10263 return t;
10265 if (VECTOR_TYPE_P (TREE_TYPE (t)))
10267 t = uniform_vector_p (t);
10268 if (t && TREE_CODE (t) == INTEGER_CST)
10269 return t;
10272 return NULL_TREE;
10275 /* If VECTOR_CST T has a single nonzero element, return the index of that
10276 element, otherwise return -1. */
10279 single_nonzero_element (const_tree t)
10281 unsigned HOST_WIDE_INT nelts;
10282 unsigned int repeat_nelts;
10283 if (VECTOR_CST_NELTS (t).is_constant (&nelts))
10284 repeat_nelts = nelts;
10285 else if (VECTOR_CST_NELTS_PER_PATTERN (t) == 2)
10287 nelts = vector_cst_encoded_nelts (t);
10288 repeat_nelts = VECTOR_CST_NPATTERNS (t);
10290 else
10291 return -1;
10293 int res = -1;
10294 for (unsigned int i = 0; i < nelts; ++i)
10296 tree elt = vector_cst_elt (t, i);
10297 if (!integer_zerop (elt) && !real_zerop (elt))
10299 if (res >= 0 || i >= repeat_nelts)
10300 return -1;
10301 res = i;
10304 return res;
10307 /* Build an empty statement at location LOC. */
10309 tree
10310 build_empty_stmt (location_t loc)
10312 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10313 SET_EXPR_LOCATION (t, loc);
10314 return t;
10318 /* Build an OMP clause with code CODE. LOC is the location of the
10319 clause. */
10321 tree
10322 build_omp_clause (location_t loc, enum omp_clause_code code)
10324 tree t;
10325 int size, length;
10327 length = omp_clause_num_ops[code];
10328 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10330 record_node_allocation_statistics (OMP_CLAUSE, size);
10332 t = (tree) ggc_internal_alloc (size);
10333 memset (t, 0, size);
10334 TREE_SET_CODE (t, OMP_CLAUSE);
10335 OMP_CLAUSE_SET_CODE (t, code);
10336 OMP_CLAUSE_LOCATION (t) = loc;
10338 return t;
10341 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10342 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10343 Except for the CODE and operand count field, other storage for the
10344 object is initialized to zeros. */
10346 tree
10347 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
10349 tree t;
10350 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10352 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10353 gcc_assert (len >= 1);
10355 record_node_allocation_statistics (code, length);
10357 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10359 TREE_SET_CODE (t, code);
10361 /* Can't use TREE_OPERAND to store the length because if checking is
10362 enabled, it will try to check the length before we store it. :-P */
10363 t->exp.operands[0] = build_int_cst (sizetype, len);
10365 return t;
10368 /* Helper function for build_call_* functions; build a CALL_EXPR with
10369 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10370 the argument slots. */
10372 static tree
10373 build_call_1 (tree return_type, tree fn, int nargs)
10375 tree t;
10377 t = build_vl_exp (CALL_EXPR, nargs + 3);
10378 TREE_TYPE (t) = return_type;
10379 CALL_EXPR_FN (t) = fn;
10380 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10382 return t;
10385 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10386 FN and a null static chain slot. NARGS is the number of call arguments
10387 which are specified as "..." arguments. */
10389 tree
10390 build_call_nary (tree return_type, tree fn, int nargs, ...)
10392 tree ret;
10393 va_list args;
10394 va_start (args, nargs);
10395 ret = build_call_valist (return_type, fn, nargs, args);
10396 va_end (args);
10397 return ret;
10400 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10401 FN and a null static chain slot. NARGS is the number of call arguments
10402 which are specified as a va_list ARGS. */
10404 tree
10405 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10407 tree t;
10408 int i;
10410 t = build_call_1 (return_type, fn, nargs);
10411 for (i = 0; i < nargs; i++)
10412 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10413 process_call_operands (t);
10414 return t;
10417 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10418 FN and a null static chain slot. NARGS is the number of call arguments
10419 which are specified as a tree array ARGS. */
10421 tree
10422 build_call_array_loc (location_t loc, tree return_type, tree fn,
10423 int nargs, const tree *args)
10425 tree t;
10426 int i;
10428 t = build_call_1 (return_type, fn, nargs);
10429 for (i = 0; i < nargs; i++)
10430 CALL_EXPR_ARG (t, i) = args[i];
10431 process_call_operands (t);
10432 SET_EXPR_LOCATION (t, loc);
10433 return t;
10436 /* Like build_call_array, but takes a vec. */
10438 tree
10439 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10441 tree ret, t;
10442 unsigned int ix;
10444 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10445 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10446 CALL_EXPR_ARG (ret, ix) = t;
10447 process_call_operands (ret);
10448 return ret;
10451 /* Conveniently construct a function call expression. FNDECL names the
10452 function to be called and N arguments are passed in the array
10453 ARGARRAY. */
10455 tree
10456 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10458 tree fntype = TREE_TYPE (fndecl);
10459 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10461 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10464 /* Conveniently construct a function call expression. FNDECL names the
10465 function to be called and the arguments are passed in the vector
10466 VEC. */
10468 tree
10469 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10471 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10472 vec_safe_address (vec));
10476 /* Conveniently construct a function call expression. FNDECL names the
10477 function to be called, N is the number of arguments, and the "..."
10478 parameters are the argument expressions. */
10480 tree
10481 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10483 va_list ap;
10484 tree *argarray = XALLOCAVEC (tree, n);
10485 int i;
10487 va_start (ap, n);
10488 for (i = 0; i < n; i++)
10489 argarray[i] = va_arg (ap, tree);
10490 va_end (ap);
10491 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10494 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10495 varargs macros aren't supported by all bootstrap compilers. */
10497 tree
10498 build_call_expr (tree fndecl, int n, ...)
10500 va_list ap;
10501 tree *argarray = XALLOCAVEC (tree, n);
10502 int i;
10504 va_start (ap, n);
10505 for (i = 0; i < n; i++)
10506 argarray[i] = va_arg (ap, tree);
10507 va_end (ap);
10508 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10511 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10512 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10513 It will get gimplified later into an ordinary internal function. */
10515 tree
10516 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
10517 tree type, int n, const tree *args)
10519 tree t = build_call_1 (type, NULL_TREE, n);
10520 for (int i = 0; i < n; ++i)
10521 CALL_EXPR_ARG (t, i) = args[i];
10522 SET_EXPR_LOCATION (t, loc);
10523 CALL_EXPR_IFN (t) = ifn;
10524 process_call_operands (t);
10525 return t;
10528 /* Build internal call expression. This is just like CALL_EXPR, except
10529 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10530 internal function. */
10532 tree
10533 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10534 tree type, int n, ...)
10536 va_list ap;
10537 tree *argarray = XALLOCAVEC (tree, n);
10538 int i;
10540 va_start (ap, n);
10541 for (i = 0; i < n; i++)
10542 argarray[i] = va_arg (ap, tree);
10543 va_end (ap);
10544 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10547 /* Return a function call to FN, if the target is guaranteed to support it,
10548 or null otherwise.
10550 N is the number of arguments, passed in the "...", and TYPE is the
10551 type of the return value. */
10553 tree
10554 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
10555 int n, ...)
10557 va_list ap;
10558 tree *argarray = XALLOCAVEC (tree, n);
10559 int i;
10561 va_start (ap, n);
10562 for (i = 0; i < n; i++)
10563 argarray[i] = va_arg (ap, tree);
10564 va_end (ap);
10565 if (internal_fn_p (fn))
10567 internal_fn ifn = as_internal_fn (fn);
10568 if (direct_internal_fn_p (ifn))
10570 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
10571 if (!direct_internal_fn_supported_p (ifn, types,
10572 OPTIMIZE_FOR_BOTH))
10573 return NULL_TREE;
10575 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10577 else
10579 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
10580 if (!fndecl)
10581 return NULL_TREE;
10582 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10586 /* Return a function call to the appropriate builtin alloca variant.
10588 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
10589 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
10590 bound for SIZE in case it is not a fixed value. */
10592 tree
10593 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
10595 if (max_size >= 0)
10597 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
10598 return
10599 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
10601 else if (align > 0)
10603 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10604 return build_call_expr (t, 2, size, size_int (align));
10606 else
10608 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
10609 return build_call_expr (t, 1, size);
10613 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
10614 if SIZE == -1) and return a tree node representing char* pointer to
10615 it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). When STR is nonnull
10616 the STRING_CST value is the LEN bytes at STR (the representation
10617 of the string, which may be wide). Otherwise it's all zeros. */
10619 tree
10620 build_string_literal (unsigned len, const char *str /* = NULL */,
10621 tree eltype /* = char_type_node */,
10622 unsigned HOST_WIDE_INT size /* = -1 */)
10624 tree t = build_string (len, str);
10625 /* Set the maximum valid index based on the string length or SIZE. */
10626 unsigned HOST_WIDE_INT maxidx
10627 = (size == HOST_WIDE_INT_M1U ? len : size) - 1;
10629 tree index = build_index_type (size_int (maxidx));
10630 eltype = build_type_variant (eltype, 1, 0);
10631 tree type = build_array_type (eltype, index);
10632 TREE_TYPE (t) = type;
10633 TREE_CONSTANT (t) = 1;
10634 TREE_READONLY (t) = 1;
10635 TREE_STATIC (t) = 1;
10637 type = build_pointer_type (eltype);
10638 t = build1 (ADDR_EXPR, type,
10639 build4 (ARRAY_REF, eltype,
10640 t, integer_zero_node, NULL_TREE, NULL_TREE));
10641 return t;
10646 /* Return true if T (assumed to be a DECL) must be assigned a memory
10647 location. */
10649 bool
10650 needs_to_live_in_memory (const_tree t)
10652 return (TREE_ADDRESSABLE (t)
10653 || is_global_var (t)
10654 || (TREE_CODE (t) == RESULT_DECL
10655 && !DECL_BY_REFERENCE (t)
10656 && aggregate_value_p (t, current_function_decl)));
10659 /* Return value of a constant X and sign-extend it. */
10661 HOST_WIDE_INT
10662 int_cst_value (const_tree x)
10664 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10665 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10667 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10668 gcc_assert (cst_and_fits_in_hwi (x));
10670 if (bits < HOST_BITS_PER_WIDE_INT)
10672 bool negative = ((val >> (bits - 1)) & 1) != 0;
10673 if (negative)
10674 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
10675 else
10676 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
10679 return val;
10682 /* If TYPE is an integral or pointer type, return an integer type with
10683 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10684 if TYPE is already an integer type of signedness UNSIGNEDP.
10685 If TYPE is a floating-point type, return an integer type with the same
10686 bitsize and with the signedness given by UNSIGNEDP; this is useful
10687 when doing bit-level operations on a floating-point value. */
10689 tree
10690 signed_or_unsigned_type_for (int unsignedp, tree type)
10692 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
10693 return type;
10695 if (TREE_CODE (type) == VECTOR_TYPE)
10697 tree inner = TREE_TYPE (type);
10698 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10699 if (!inner2)
10700 return NULL_TREE;
10701 if (inner == inner2)
10702 return type;
10703 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10706 if (TREE_CODE (type) == COMPLEX_TYPE)
10708 tree inner = TREE_TYPE (type);
10709 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10710 if (!inner2)
10711 return NULL_TREE;
10712 if (inner == inner2)
10713 return type;
10714 return build_complex_type (inner2);
10717 unsigned int bits;
10718 if (INTEGRAL_TYPE_P (type)
10719 || POINTER_TYPE_P (type)
10720 || TREE_CODE (type) == OFFSET_TYPE)
10721 bits = TYPE_PRECISION (type);
10722 else if (TREE_CODE (type) == REAL_TYPE)
10723 bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type));
10724 else
10725 return NULL_TREE;
10727 return build_nonstandard_integer_type (bits, unsignedp);
10730 /* If TYPE is an integral or pointer type, return an integer type with
10731 the same precision which is unsigned, or itself if TYPE is already an
10732 unsigned integer type. If TYPE is a floating-point type, return an
10733 unsigned integer type with the same bitsize as TYPE. */
10735 tree
10736 unsigned_type_for (tree type)
10738 return signed_or_unsigned_type_for (1, type);
10741 /* If TYPE is an integral or pointer type, return an integer type with
10742 the same precision which is signed, or itself if TYPE is already a
10743 signed integer type. If TYPE is a floating-point type, return a
10744 signed integer type with the same bitsize as TYPE. */
10746 tree
10747 signed_type_for (tree type)
10749 return signed_or_unsigned_type_for (0, type);
10752 /* - For VECTOR_TYPEs:
10753 - The truth type must be a VECTOR_BOOLEAN_TYPE.
10754 - The number of elements must match (known_eq).
10755 - targetm.vectorize.get_mask_mode exists, and exactly
10756 the same mode as the truth type.
10757 - Otherwise, the truth type must be a BOOLEAN_TYPE
10758 or useless_type_conversion_p to BOOLEAN_TYPE. */
10759 bool
10760 is_truth_type_for (tree type, tree truth_type)
10762 machine_mode mask_mode = TYPE_MODE (truth_type);
10763 machine_mode vmode = TYPE_MODE (type);
10764 machine_mode tmask_mode;
10766 if (TREE_CODE (type) == VECTOR_TYPE)
10768 if (VECTOR_BOOLEAN_TYPE_P (truth_type)
10769 && known_eq (TYPE_VECTOR_SUBPARTS (type),
10770 TYPE_VECTOR_SUBPARTS (truth_type))
10771 && targetm.vectorize.get_mask_mode (vmode).exists (&tmask_mode)
10772 && tmask_mode == mask_mode)
10773 return true;
10775 return false;
10778 return useless_type_conversion_p (boolean_type_node, truth_type);
10781 /* If TYPE is a vector type, return a signed integer vector type with the
10782 same width and number of subparts. Otherwise return boolean_type_node. */
10784 tree
10785 truth_type_for (tree type)
10787 if (TREE_CODE (type) == VECTOR_TYPE)
10789 if (VECTOR_BOOLEAN_TYPE_P (type))
10790 return type;
10791 return build_truth_vector_type_for (type);
10793 else
10794 return boolean_type_node;
10797 /* Returns the largest value obtainable by casting something in INNER type to
10798 OUTER type. */
10800 tree
10801 upper_bound_in_type (tree outer, tree inner)
10803 unsigned int det = 0;
10804 unsigned oprec = TYPE_PRECISION (outer);
10805 unsigned iprec = TYPE_PRECISION (inner);
10806 unsigned prec;
10808 /* Compute a unique number for every combination. */
10809 det |= (oprec > iprec) ? 4 : 0;
10810 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10811 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10813 /* Determine the exponent to use. */
10814 switch (det)
10816 case 0:
10817 case 1:
10818 /* oprec <= iprec, outer: signed, inner: don't care. */
10819 prec = oprec - 1;
10820 break;
10821 case 2:
10822 case 3:
10823 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10824 prec = oprec;
10825 break;
10826 case 4:
10827 /* oprec > iprec, outer: signed, inner: signed. */
10828 prec = iprec - 1;
10829 break;
10830 case 5:
10831 /* oprec > iprec, outer: signed, inner: unsigned. */
10832 prec = iprec;
10833 break;
10834 case 6:
10835 /* oprec > iprec, outer: unsigned, inner: signed. */
10836 prec = oprec;
10837 break;
10838 case 7:
10839 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10840 prec = iprec;
10841 break;
10842 default:
10843 gcc_unreachable ();
10846 return wide_int_to_tree (outer,
10847 wi::mask (prec, false, TYPE_PRECISION (outer)));
10850 /* Returns the smallest value obtainable by casting something in INNER type to
10851 OUTER type. */
10853 tree
10854 lower_bound_in_type (tree outer, tree inner)
10856 unsigned oprec = TYPE_PRECISION (outer);
10857 unsigned iprec = TYPE_PRECISION (inner);
10859 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10860 and obtain 0. */
10861 if (TYPE_UNSIGNED (outer)
10862 /* If we are widening something of an unsigned type, OUTER type
10863 contains all values of INNER type. In particular, both INNER
10864 and OUTER types have zero in common. */
10865 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10866 return build_int_cst (outer, 0);
10867 else
10869 /* If we are widening a signed type to another signed type, we
10870 want to obtain -2^^(iprec-1). If we are keeping the
10871 precision or narrowing to a signed type, we want to obtain
10872 -2^(oprec-1). */
10873 unsigned prec = oprec > iprec ? iprec : oprec;
10874 return wide_int_to_tree (outer,
10875 wi::mask (prec - 1, true,
10876 TYPE_PRECISION (outer)));
10880 /* Return nonzero if two operands that are suitable for PHI nodes are
10881 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10882 SSA_NAME or invariant. Note that this is strictly an optimization.
10883 That is, callers of this function can directly call operand_equal_p
10884 and get the same result, only slower. */
10887 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10889 if (arg0 == arg1)
10890 return 1;
10891 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10892 return 0;
10893 return operand_equal_p (arg0, arg1, 0);
10896 /* Returns number of zeros at the end of binary representation of X. */
10898 tree
10899 num_ending_zeros (const_tree x)
10901 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
10905 #define WALK_SUBTREE(NODE) \
10906 do \
10908 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10909 if (result) \
10910 return result; \
10912 while (0)
10914 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10915 be walked whenever a type is seen in the tree. Rest of operands and return
10916 value are as for walk_tree. */
10918 static tree
10919 walk_type_fields (tree type, walk_tree_fn func, void *data,
10920 hash_set<tree> *pset, walk_tree_lh lh)
10922 tree result = NULL_TREE;
10924 switch (TREE_CODE (type))
10926 case POINTER_TYPE:
10927 case REFERENCE_TYPE:
10928 case VECTOR_TYPE:
10929 /* We have to worry about mutually recursive pointers. These can't
10930 be written in C. They can in Ada. It's pathological, but
10931 there's an ACATS test (c38102a) that checks it. Deal with this
10932 by checking if we're pointing to another pointer, that one
10933 points to another pointer, that one does too, and we have no htab.
10934 If so, get a hash table. We check three levels deep to avoid
10935 the cost of the hash table if we don't need one. */
10936 if (POINTER_TYPE_P (TREE_TYPE (type))
10937 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10938 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10939 && !pset)
10941 result = walk_tree_without_duplicates (&TREE_TYPE (type),
10942 func, data);
10943 if (result)
10944 return result;
10946 break;
10949 /* fall through */
10951 case COMPLEX_TYPE:
10952 WALK_SUBTREE (TREE_TYPE (type));
10953 break;
10955 case METHOD_TYPE:
10956 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
10958 /* Fall through. */
10960 case FUNCTION_TYPE:
10961 WALK_SUBTREE (TREE_TYPE (type));
10963 tree arg;
10965 /* We never want to walk into default arguments. */
10966 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
10967 WALK_SUBTREE (TREE_VALUE (arg));
10969 break;
10971 case ARRAY_TYPE:
10972 /* Don't follow this nodes's type if a pointer for fear that
10973 we'll have infinite recursion. If we have a PSET, then we
10974 need not fear. */
10975 if (pset
10976 || (!POINTER_TYPE_P (TREE_TYPE (type))
10977 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
10978 WALK_SUBTREE (TREE_TYPE (type));
10979 WALK_SUBTREE (TYPE_DOMAIN (type));
10980 break;
10982 case OFFSET_TYPE:
10983 WALK_SUBTREE (TREE_TYPE (type));
10984 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
10985 break;
10987 default:
10988 break;
10991 return NULL_TREE;
10994 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10995 called with the DATA and the address of each sub-tree. If FUNC returns a
10996 non-NULL value, the traversal is stopped, and the value returned by FUNC
10997 is returned. If PSET is non-NULL it is used to record the nodes visited,
10998 and to avoid visiting a node more than once. */
11000 tree
11001 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11002 hash_set<tree> *pset, walk_tree_lh lh)
11004 enum tree_code code;
11005 int walk_subtrees;
11006 tree result;
11008 #define WALK_SUBTREE_TAIL(NODE) \
11009 do \
11011 tp = & (NODE); \
11012 goto tail_recurse; \
11014 while (0)
11016 tail_recurse:
11017 /* Skip empty subtrees. */
11018 if (!*tp)
11019 return NULL_TREE;
11021 /* Don't walk the same tree twice, if the user has requested
11022 that we avoid doing so. */
11023 if (pset && pset->add (*tp))
11024 return NULL_TREE;
11026 /* Call the function. */
11027 walk_subtrees = 1;
11028 result = (*func) (tp, &walk_subtrees, data);
11030 /* If we found something, return it. */
11031 if (result)
11032 return result;
11034 code = TREE_CODE (*tp);
11036 /* Even if we didn't, FUNC may have decided that there was nothing
11037 interesting below this point in the tree. */
11038 if (!walk_subtrees)
11040 /* But we still need to check our siblings. */
11041 if (code == TREE_LIST)
11042 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11043 else if (code == OMP_CLAUSE)
11044 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11045 else
11046 return NULL_TREE;
11049 if (lh)
11051 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11052 if (result || !walk_subtrees)
11053 return result;
11056 switch (code)
11058 case ERROR_MARK:
11059 case IDENTIFIER_NODE:
11060 case INTEGER_CST:
11061 case REAL_CST:
11062 case FIXED_CST:
11063 case STRING_CST:
11064 case BLOCK:
11065 case PLACEHOLDER_EXPR:
11066 case SSA_NAME:
11067 case FIELD_DECL:
11068 case RESULT_DECL:
11069 /* None of these have subtrees other than those already walked
11070 above. */
11071 break;
11073 case TREE_LIST:
11074 WALK_SUBTREE (TREE_VALUE (*tp));
11075 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11076 break;
11078 case TREE_VEC:
11080 int len = TREE_VEC_LENGTH (*tp);
11082 if (len == 0)
11083 break;
11085 /* Walk all elements but the first. */
11086 while (--len)
11087 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11089 /* Now walk the first one as a tail call. */
11090 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11093 case VECTOR_CST:
11095 unsigned len = vector_cst_encoded_nelts (*tp);
11096 if (len == 0)
11097 break;
11098 /* Walk all elements but the first. */
11099 while (--len)
11100 WALK_SUBTREE (VECTOR_CST_ENCODED_ELT (*tp, len));
11101 /* Now walk the first one as a tail call. */
11102 WALK_SUBTREE_TAIL (VECTOR_CST_ENCODED_ELT (*tp, 0));
11105 case COMPLEX_CST:
11106 WALK_SUBTREE (TREE_REALPART (*tp));
11107 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11109 case CONSTRUCTOR:
11111 unsigned HOST_WIDE_INT idx;
11112 constructor_elt *ce;
11114 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11115 idx++)
11116 WALK_SUBTREE (ce->value);
11118 break;
11120 case SAVE_EXPR:
11121 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11123 case BIND_EXPR:
11125 tree decl;
11126 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11128 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11129 into declarations that are just mentioned, rather than
11130 declared; they don't really belong to this part of the tree.
11131 And, we can see cycles: the initializer for a declaration
11132 can refer to the declaration itself. */
11133 WALK_SUBTREE (DECL_INITIAL (decl));
11134 WALK_SUBTREE (DECL_SIZE (decl));
11135 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11137 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11140 case STATEMENT_LIST:
11142 tree_stmt_iterator i;
11143 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11144 WALK_SUBTREE (*tsi_stmt_ptr (i));
11146 break;
11148 case OMP_CLAUSE:
11150 int len = omp_clause_num_ops[OMP_CLAUSE_CODE (*tp)];
11151 for (int i = 0; i < len; i++)
11152 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11153 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11155 break;
11157 case TARGET_EXPR:
11159 int i, len;
11161 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11162 But, we only want to walk once. */
11163 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11164 for (i = 0; i < len; ++i)
11165 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11166 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11169 case DECL_EXPR:
11170 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11171 defining. We only want to walk into these fields of a type in this
11172 case and not in the general case of a mere reference to the type.
11174 The criterion is as follows: if the field can be an expression, it
11175 must be walked only here. This should be in keeping with the fields
11176 that are directly gimplified in gimplify_type_sizes in order for the
11177 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11178 variable-sized types.
11180 Note that DECLs get walked as part of processing the BIND_EXPR. */
11181 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11183 /* Call the function for the decl so e.g. copy_tree_body_r can
11184 replace it with the remapped one. */
11185 result = (*func) (&DECL_EXPR_DECL (*tp), &walk_subtrees, data);
11186 if (result || !walk_subtrees)
11187 return result;
11189 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11190 if (TREE_CODE (*type_p) == ERROR_MARK)
11191 return NULL_TREE;
11193 /* Call the function for the type. See if it returns anything or
11194 doesn't want us to continue. If we are to continue, walk both
11195 the normal fields and those for the declaration case. */
11196 result = (*func) (type_p, &walk_subtrees, data);
11197 if (result || !walk_subtrees)
11198 return result;
11200 /* But do not walk a pointed-to type since it may itself need to
11201 be walked in the declaration case if it isn't anonymous. */
11202 if (!POINTER_TYPE_P (*type_p))
11204 result = walk_type_fields (*type_p, func, data, pset, lh);
11205 if (result)
11206 return result;
11209 /* If this is a record type, also walk the fields. */
11210 if (RECORD_OR_UNION_TYPE_P (*type_p))
11212 tree field;
11214 for (field = TYPE_FIELDS (*type_p); field;
11215 field = DECL_CHAIN (field))
11217 /* We'd like to look at the type of the field, but we can
11218 easily get infinite recursion. So assume it's pointed
11219 to elsewhere in the tree. Also, ignore things that
11220 aren't fields. */
11221 if (TREE_CODE (field) != FIELD_DECL)
11222 continue;
11224 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11225 WALK_SUBTREE (DECL_SIZE (field));
11226 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11227 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11228 WALK_SUBTREE (DECL_QUALIFIER (field));
11232 /* Same for scalar types. */
11233 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11234 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11235 || TREE_CODE (*type_p) == INTEGER_TYPE
11236 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11237 || TREE_CODE (*type_p) == REAL_TYPE)
11239 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11240 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11243 WALK_SUBTREE (TYPE_SIZE (*type_p));
11244 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11246 /* FALLTHRU */
11248 default:
11249 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11251 int i, len;
11253 /* Walk over all the sub-trees of this operand. */
11254 len = TREE_OPERAND_LENGTH (*tp);
11256 /* Go through the subtrees. We need to do this in forward order so
11257 that the scope of a FOR_EXPR is handled properly. */
11258 if (len)
11260 for (i = 0; i < len - 1; ++i)
11261 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11262 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11265 /* If this is a type, walk the needed fields in the type. */
11266 else if (TYPE_P (*tp))
11267 return walk_type_fields (*tp, func, data, pset, lh);
11268 break;
11271 /* We didn't find what we were looking for. */
11272 return NULL_TREE;
11274 #undef WALK_SUBTREE_TAIL
11276 #undef WALK_SUBTREE
11278 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11280 tree
11281 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11282 walk_tree_lh lh)
11284 tree result;
11286 hash_set<tree> pset;
11287 result = walk_tree_1 (tp, func, data, &pset, lh);
11288 return result;
11292 tree
11293 tree_block (tree t)
11295 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11297 if (IS_EXPR_CODE_CLASS (c))
11298 return LOCATION_BLOCK (t->exp.locus);
11299 gcc_unreachable ();
11300 return NULL;
11303 void
11304 tree_set_block (tree t, tree b)
11306 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11308 if (IS_EXPR_CODE_CLASS (c))
11310 t->exp.locus = set_block (t->exp.locus, b);
11312 else
11313 gcc_unreachable ();
11316 /* Create a nameless artificial label and put it in the current
11317 function context. The label has a location of LOC. Returns the
11318 newly created label. */
11320 tree
11321 create_artificial_label (location_t loc)
11323 tree lab = build_decl (loc,
11324 LABEL_DECL, NULL_TREE, void_type_node);
11326 DECL_ARTIFICIAL (lab) = 1;
11327 DECL_IGNORED_P (lab) = 1;
11328 DECL_CONTEXT (lab) = current_function_decl;
11329 return lab;
11332 /* Given a tree, try to return a useful variable name that we can use
11333 to prefix a temporary that is being assigned the value of the tree.
11334 I.E. given <temp> = &A, return A. */
11336 const char *
11337 get_name (tree t)
11339 tree stripped_decl;
11341 stripped_decl = t;
11342 STRIP_NOPS (stripped_decl);
11343 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11344 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11345 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11347 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11348 if (!name)
11349 return NULL;
11350 return IDENTIFIER_POINTER (name);
11352 else
11354 switch (TREE_CODE (stripped_decl))
11356 case ADDR_EXPR:
11357 return get_name (TREE_OPERAND (stripped_decl, 0));
11358 default:
11359 return NULL;
11364 /* Return true if TYPE has a variable argument list. */
11366 bool
11367 stdarg_p (const_tree fntype)
11369 function_args_iterator args_iter;
11370 tree n = NULL_TREE, t;
11372 if (!fntype)
11373 return false;
11375 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11377 n = t;
11380 return n != NULL_TREE && n != void_type_node;
11383 /* Return true if TYPE has a prototype. */
11385 bool
11386 prototype_p (const_tree fntype)
11388 tree t;
11390 gcc_assert (fntype != NULL_TREE);
11392 t = TYPE_ARG_TYPES (fntype);
11393 return (t != NULL_TREE);
11396 /* If BLOCK is inlined from an __attribute__((__artificial__))
11397 routine, return pointer to location from where it has been
11398 called. */
11399 location_t *
11400 block_nonartificial_location (tree block)
11402 location_t *ret = NULL;
11404 while (block && TREE_CODE (block) == BLOCK
11405 && BLOCK_ABSTRACT_ORIGIN (block))
11407 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11408 if (TREE_CODE (ao) == FUNCTION_DECL)
11410 /* If AO is an artificial inline, point RET to the
11411 call site locus at which it has been inlined and continue
11412 the loop, in case AO's caller is also an artificial
11413 inline. */
11414 if (DECL_DECLARED_INLINE_P (ao)
11415 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11416 ret = &BLOCK_SOURCE_LOCATION (block);
11417 else
11418 break;
11420 else if (TREE_CODE (ao) != BLOCK)
11421 break;
11423 block = BLOCK_SUPERCONTEXT (block);
11425 return ret;
11429 /* If EXP is inlined from an __attribute__((__artificial__))
11430 function, return the location of the original call expression. */
11432 location_t
11433 tree_nonartificial_location (tree exp)
11435 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11437 if (loc)
11438 return *loc;
11439 else
11440 return EXPR_LOCATION (exp);
11443 /* Return the location into which EXP has been inlined. Analogous
11444 to tree_nonartificial_location() above but not limited to artificial
11445 functions declared inline. If SYSTEM_HEADER is true, return
11446 the macro expansion point of the location if it's in a system header */
11448 location_t
11449 tree_inlined_location (tree exp, bool system_header /* = true */)
11451 location_t loc = UNKNOWN_LOCATION;
11453 tree block = TREE_BLOCK (exp);
11455 while (block && TREE_CODE (block) == BLOCK
11456 && BLOCK_ABSTRACT_ORIGIN (block))
11458 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11459 if (TREE_CODE (ao) == FUNCTION_DECL)
11460 loc = BLOCK_SOURCE_LOCATION (block);
11461 else if (TREE_CODE (ao) != BLOCK)
11462 break;
11464 block = BLOCK_SUPERCONTEXT (block);
11467 if (loc == UNKNOWN_LOCATION)
11469 loc = EXPR_LOCATION (exp);
11470 if (system_header)
11471 /* Only consider macro expansion when the block traversal failed
11472 to find a location. Otherwise it's not relevant. */
11473 return expansion_point_location_if_in_system_header (loc);
11476 return loc;
11479 /* These are the hash table functions for the hash table of OPTIMIZATION_NODE
11480 nodes. */
11482 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11484 hashval_t
11485 cl_option_hasher::hash (tree x)
11487 const_tree const t = x;
11489 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11490 return cl_optimization_hash (TREE_OPTIMIZATION (t));
11491 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11492 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11493 else
11494 gcc_unreachable ();
11497 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11498 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11499 same. */
11501 bool
11502 cl_option_hasher::equal (tree x, tree y)
11504 const_tree const xt = x;
11505 const_tree const yt = y;
11507 if (TREE_CODE (xt) != TREE_CODE (yt))
11508 return 0;
11510 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11511 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
11512 TREE_OPTIMIZATION (yt));
11513 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11514 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11515 TREE_TARGET_OPTION (yt));
11516 else
11517 gcc_unreachable ();
11520 /* Build an OPTIMIZATION_NODE based on the options in OPTS and OPTS_SET. */
11522 tree
11523 build_optimization_node (struct gcc_options *opts,
11524 struct gcc_options *opts_set)
11526 tree t;
11528 /* Use the cache of optimization nodes. */
11530 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11531 opts, opts_set);
11533 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11534 t = *slot;
11535 if (!t)
11537 /* Insert this one into the hash table. */
11538 t = cl_optimization_node;
11539 *slot = t;
11541 /* Make a new node for next time round. */
11542 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11545 return t;
11548 /* Build a TARGET_OPTION_NODE based on the options in OPTS and OPTS_SET. */
11550 tree
11551 build_target_option_node (struct gcc_options *opts,
11552 struct gcc_options *opts_set)
11554 tree t;
11556 /* Use the cache of optimization nodes. */
11558 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11559 opts, opts_set);
11561 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11562 t = *slot;
11563 if (!t)
11565 /* Insert this one into the hash table. */
11566 t = cl_target_option_node;
11567 *slot = t;
11569 /* Make a new node for next time round. */
11570 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11573 return t;
11576 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11577 so that they aren't saved during PCH writing. */
11579 void
11580 prepare_target_option_nodes_for_pch (void)
11582 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11583 for (; iter != cl_option_hash_table->end (); ++iter)
11584 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11585 TREE_TARGET_GLOBALS (*iter) = NULL;
11588 /* Determine the "ultimate origin" of a block. */
11590 tree
11591 block_ultimate_origin (const_tree block)
11593 tree origin = BLOCK_ABSTRACT_ORIGIN (block);
11595 if (origin == NULL_TREE)
11596 return NULL_TREE;
11597 else
11599 gcc_checking_assert ((DECL_P (origin)
11600 && DECL_ORIGIN (origin) == origin)
11601 || BLOCK_ORIGIN (origin) == origin);
11602 return origin;
11606 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11607 no instruction. */
11609 bool
11610 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11612 /* Do not strip casts into or out of differing address spaces. */
11613 if (POINTER_TYPE_P (outer_type)
11614 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
11616 if (!POINTER_TYPE_P (inner_type)
11617 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
11618 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
11619 return false;
11621 else if (POINTER_TYPE_P (inner_type)
11622 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
11624 /* We already know that outer_type is not a pointer with
11625 a non-generic address space. */
11626 return false;
11629 /* Use precision rather then machine mode when we can, which gives
11630 the correct answer even for submode (bit-field) types. */
11631 if ((INTEGRAL_TYPE_P (outer_type)
11632 || POINTER_TYPE_P (outer_type)
11633 || TREE_CODE (outer_type) == OFFSET_TYPE)
11634 && (INTEGRAL_TYPE_P (inner_type)
11635 || POINTER_TYPE_P (inner_type)
11636 || TREE_CODE (inner_type) == OFFSET_TYPE))
11637 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11639 /* Otherwise fall back on comparing machine modes (e.g. for
11640 aggregate types, floats). */
11641 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11644 /* Return true iff conversion in EXP generates no instruction. Mark
11645 it inline so that we fully inline into the stripping functions even
11646 though we have two uses of this function. */
11648 static inline bool
11649 tree_nop_conversion (const_tree exp)
11651 tree outer_type, inner_type;
11653 if (location_wrapper_p (exp))
11654 return true;
11655 if (!CONVERT_EXPR_P (exp)
11656 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11657 return false;
11659 outer_type = TREE_TYPE (exp);
11660 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11661 if (!inner_type || inner_type == error_mark_node)
11662 return false;
11664 return tree_nop_conversion_p (outer_type, inner_type);
11667 /* Return true iff conversion in EXP generates no instruction. Don't
11668 consider conversions changing the signedness. */
11670 static bool
11671 tree_sign_nop_conversion (const_tree exp)
11673 tree outer_type, inner_type;
11675 if (!tree_nop_conversion (exp))
11676 return false;
11678 outer_type = TREE_TYPE (exp);
11679 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11681 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11682 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11685 /* Strip conversions from EXP according to tree_nop_conversion and
11686 return the resulting expression. */
11688 tree
11689 tree_strip_nop_conversions (tree exp)
11691 while (tree_nop_conversion (exp))
11692 exp = TREE_OPERAND (exp, 0);
11693 return exp;
11696 /* Strip conversions from EXP according to tree_sign_nop_conversion
11697 and return the resulting expression. */
11699 tree
11700 tree_strip_sign_nop_conversions (tree exp)
11702 while (tree_sign_nop_conversion (exp))
11703 exp = TREE_OPERAND (exp, 0);
11704 return exp;
11707 /* Avoid any floating point extensions from EXP. */
11708 tree
11709 strip_float_extensions (tree exp)
11711 tree sub, expt, subt;
11713 /* For floating point constant look up the narrowest type that can hold
11714 it properly and handle it like (type)(narrowest_type)constant.
11715 This way we can optimize for instance a=a*2.0 where "a" is float
11716 but 2.0 is double constant. */
11717 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11719 REAL_VALUE_TYPE orig;
11720 tree type = NULL;
11722 orig = TREE_REAL_CST (exp);
11723 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11724 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11725 type = float_type_node;
11726 else if (TYPE_PRECISION (TREE_TYPE (exp))
11727 > TYPE_PRECISION (double_type_node)
11728 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11729 type = double_type_node;
11730 if (type)
11731 return build_real_truncate (type, orig);
11734 if (!CONVERT_EXPR_P (exp))
11735 return exp;
11737 sub = TREE_OPERAND (exp, 0);
11738 subt = TREE_TYPE (sub);
11739 expt = TREE_TYPE (exp);
11741 if (!FLOAT_TYPE_P (subt))
11742 return exp;
11744 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11745 return exp;
11747 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11748 return exp;
11750 return strip_float_extensions (sub);
11753 /* Strip out all handled components that produce invariant
11754 offsets. */
11756 const_tree
11757 strip_invariant_refs (const_tree op)
11759 while (handled_component_p (op))
11761 switch (TREE_CODE (op))
11763 case ARRAY_REF:
11764 case ARRAY_RANGE_REF:
11765 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11766 || TREE_OPERAND (op, 2) != NULL_TREE
11767 || TREE_OPERAND (op, 3) != NULL_TREE)
11768 return NULL;
11769 break;
11771 case COMPONENT_REF:
11772 if (TREE_OPERAND (op, 2) != NULL_TREE)
11773 return NULL;
11774 break;
11776 default:;
11778 op = TREE_OPERAND (op, 0);
11781 return op;
11784 static GTY(()) tree gcc_eh_personality_decl;
11786 /* Return the GCC personality function decl. */
11788 tree
11789 lhd_gcc_personality (void)
11791 if (!gcc_eh_personality_decl)
11792 gcc_eh_personality_decl = build_personality_function ("gcc");
11793 return gcc_eh_personality_decl;
11796 /* TARGET is a call target of GIMPLE call statement
11797 (obtained by gimple_call_fn). Return true if it is
11798 OBJ_TYPE_REF representing an virtual call of C++ method.
11799 (As opposed to OBJ_TYPE_REF representing objc calls
11800 through a cast where middle-end devirtualization machinery
11801 can't apply.) FOR_DUMP_P is true when being called from
11802 the dump routines. */
11804 bool
11805 virtual_method_call_p (const_tree target, bool for_dump_p)
11807 if (TREE_CODE (target) != OBJ_TYPE_REF)
11808 return false;
11809 tree t = TREE_TYPE (target);
11810 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
11811 t = TREE_TYPE (t);
11812 if (TREE_CODE (t) == FUNCTION_TYPE)
11813 return false;
11814 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
11815 /* If we do not have BINFO associated, it means that type was built
11816 without devirtualization enabled. Do not consider this a virtual
11817 call. */
11818 if (!TYPE_BINFO (obj_type_ref_class (target, for_dump_p)))
11819 return false;
11820 return true;
11823 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
11825 static tree
11826 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
11828 unsigned int i;
11829 tree base_binfo, b;
11831 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
11832 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
11833 && types_same_for_odr (TREE_TYPE (base_binfo), type))
11834 return base_binfo;
11835 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
11836 return b;
11837 return NULL;
11840 /* Try to find a base info of BINFO that would have its field decl at offset
11841 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11842 found, return, otherwise return NULL_TREE. */
11844 tree
11845 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
11847 tree type = BINFO_TYPE (binfo);
11849 while (true)
11851 HOST_WIDE_INT pos, size;
11852 tree fld;
11853 int i;
11855 if (types_same_for_odr (type, expected_type))
11856 return binfo;
11857 if (maybe_lt (offset, 0))
11858 return NULL_TREE;
11860 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
11862 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
11863 continue;
11865 pos = int_bit_position (fld);
11866 size = tree_to_uhwi (DECL_SIZE (fld));
11867 if (known_in_range_p (offset, pos, size))
11868 break;
11870 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
11871 return NULL_TREE;
11873 /* Offset 0 indicates the primary base, whose vtable contents are
11874 represented in the binfo for the derived class. */
11875 else if (maybe_ne (offset, 0))
11877 tree found_binfo = NULL, base_binfo;
11878 /* Offsets in BINFO are in bytes relative to the whole structure
11879 while POS is in bits relative to the containing field. */
11880 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
11881 / BITS_PER_UNIT);
11883 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
11884 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
11885 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
11887 found_binfo = base_binfo;
11888 break;
11890 if (found_binfo)
11891 binfo = found_binfo;
11892 else
11893 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
11894 binfo_offset);
11897 type = TREE_TYPE (fld);
11898 offset -= pos;
11902 /* Returns true if X is a typedef decl. */
11904 bool
11905 is_typedef_decl (const_tree x)
11907 return (x && TREE_CODE (x) == TYPE_DECL
11908 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
11911 /* Returns true iff TYPE is a type variant created for a typedef. */
11913 bool
11914 typedef_variant_p (const_tree type)
11916 return is_typedef_decl (TYPE_NAME (type));
11919 /* PR 84195: Replace control characters in "unescaped" with their
11920 escaped equivalents. Allow newlines if -fmessage-length has
11921 been set to a non-zero value. This is done here, rather than
11922 where the attribute is recorded as the message length can
11923 change between these two locations. */
11925 void
11926 escaped_string::escape (const char *unescaped)
11928 char *escaped;
11929 size_t i, new_i, len;
11931 if (m_owned)
11932 free (m_str);
11934 m_str = const_cast<char *> (unescaped);
11935 m_owned = false;
11937 if (unescaped == NULL || *unescaped == 0)
11938 return;
11940 len = strlen (unescaped);
11941 escaped = NULL;
11942 new_i = 0;
11944 for (i = 0; i < len; i++)
11946 char c = unescaped[i];
11948 if (!ISCNTRL (c))
11950 if (escaped)
11951 escaped[new_i++] = c;
11952 continue;
11955 if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
11957 if (escaped == NULL)
11959 /* We only allocate space for a new string if we
11960 actually encounter a control character that
11961 needs replacing. */
11962 escaped = (char *) xmalloc (len * 2 + 1);
11963 strncpy (escaped, unescaped, i);
11964 new_i = i;
11967 escaped[new_i++] = '\\';
11969 switch (c)
11971 case '\a': escaped[new_i++] = 'a'; break;
11972 case '\b': escaped[new_i++] = 'b'; break;
11973 case '\f': escaped[new_i++] = 'f'; break;
11974 case '\n': escaped[new_i++] = 'n'; break;
11975 case '\r': escaped[new_i++] = 'r'; break;
11976 case '\t': escaped[new_i++] = 't'; break;
11977 case '\v': escaped[new_i++] = 'v'; break;
11978 default: escaped[new_i++] = '?'; break;
11981 else if (escaped)
11982 escaped[new_i++] = c;
11985 if (escaped)
11987 escaped[new_i] = 0;
11988 m_str = escaped;
11989 m_owned = true;
11993 /* Warn about a use of an identifier which was marked deprecated. Returns
11994 whether a warning was given. */
11996 bool
11997 warn_deprecated_use (tree node, tree attr)
11999 escaped_string msg;
12001 if (node == 0 || !warn_deprecated_decl)
12002 return false;
12004 if (!attr)
12006 if (DECL_P (node))
12007 attr = DECL_ATTRIBUTES (node);
12008 else if (TYPE_P (node))
12010 tree decl = TYPE_STUB_DECL (node);
12011 if (decl)
12012 attr = lookup_attribute ("deprecated",
12013 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12017 if (attr)
12018 attr = lookup_attribute ("deprecated", attr);
12020 if (attr)
12021 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12023 bool w = false;
12024 if (DECL_P (node))
12026 auto_diagnostic_group d;
12027 if (msg)
12028 w = warning (OPT_Wdeprecated_declarations,
12029 "%qD is deprecated: %s", node, (const char *) msg);
12030 else
12031 w = warning (OPT_Wdeprecated_declarations,
12032 "%qD is deprecated", node);
12033 if (w)
12034 inform (DECL_SOURCE_LOCATION (node), "declared here");
12036 else if (TYPE_P (node))
12038 tree what = NULL_TREE;
12039 tree decl = TYPE_STUB_DECL (node);
12041 if (TYPE_NAME (node))
12043 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12044 what = TYPE_NAME (node);
12045 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12046 && DECL_NAME (TYPE_NAME (node)))
12047 what = DECL_NAME (TYPE_NAME (node));
12050 auto_diagnostic_group d;
12051 if (what)
12053 if (msg)
12054 w = warning (OPT_Wdeprecated_declarations,
12055 "%qE is deprecated: %s", what, (const char *) msg);
12056 else
12057 w = warning (OPT_Wdeprecated_declarations,
12058 "%qE is deprecated", what);
12060 else
12062 if (msg)
12063 w = warning (OPT_Wdeprecated_declarations,
12064 "type is deprecated: %s", (const char *) msg);
12065 else
12066 w = warning (OPT_Wdeprecated_declarations,
12067 "type is deprecated");
12070 if (w && decl)
12071 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12074 return w;
12077 /* Error out with an identifier which was marked 'unavailable'. */
12078 void
12079 error_unavailable_use (tree node, tree attr)
12081 escaped_string msg;
12083 if (node == 0)
12084 return;
12086 if (!attr)
12088 if (DECL_P (node))
12089 attr = DECL_ATTRIBUTES (node);
12090 else if (TYPE_P (node))
12092 tree decl = TYPE_STUB_DECL (node);
12093 if (decl)
12094 attr = lookup_attribute ("unavailable",
12095 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12099 if (attr)
12100 attr = lookup_attribute ("unavailable", attr);
12102 if (attr)
12103 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12105 if (DECL_P (node))
12107 auto_diagnostic_group d;
12108 if (msg)
12109 error ("%qD is unavailable: %s", node, (const char *) msg);
12110 else
12111 error ("%qD is unavailable", node);
12112 inform (DECL_SOURCE_LOCATION (node), "declared here");
12114 else if (TYPE_P (node))
12116 tree what = NULL_TREE;
12117 tree decl = TYPE_STUB_DECL (node);
12119 if (TYPE_NAME (node))
12121 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12122 what = TYPE_NAME (node);
12123 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12124 && DECL_NAME (TYPE_NAME (node)))
12125 what = DECL_NAME (TYPE_NAME (node));
12128 auto_diagnostic_group d;
12129 if (what)
12131 if (msg)
12132 error ("%qE is unavailable: %s", what, (const char *) msg);
12133 else
12134 error ("%qE is unavailable", what);
12136 else
12138 if (msg)
12139 error ("type is unavailable: %s", (const char *) msg);
12140 else
12141 error ("type is unavailable");
12144 if (decl)
12145 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12149 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12150 somewhere in it. */
12152 bool
12153 contains_bitfld_component_ref_p (const_tree ref)
12155 while (handled_component_p (ref))
12157 if (TREE_CODE (ref) == COMPONENT_REF
12158 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12159 return true;
12160 ref = TREE_OPERAND (ref, 0);
12163 return false;
12166 /* Try to determine whether a TRY_CATCH expression can fall through.
12167 This is a subroutine of block_may_fallthru. */
12169 static bool
12170 try_catch_may_fallthru (const_tree stmt)
12172 tree_stmt_iterator i;
12174 /* If the TRY block can fall through, the whole TRY_CATCH can
12175 fall through. */
12176 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12177 return true;
12179 i = tsi_start (TREE_OPERAND (stmt, 1));
12180 switch (TREE_CODE (tsi_stmt (i)))
12182 case CATCH_EXPR:
12183 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12184 catch expression and a body. The whole TRY_CATCH may fall
12185 through iff any of the catch bodies falls through. */
12186 for (; !tsi_end_p (i); tsi_next (&i))
12188 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12189 return true;
12191 return false;
12193 case EH_FILTER_EXPR:
12194 /* The exception filter expression only matters if there is an
12195 exception. If the exception does not match EH_FILTER_TYPES,
12196 we will execute EH_FILTER_FAILURE, and we will fall through
12197 if that falls through. If the exception does match
12198 EH_FILTER_TYPES, the stack unwinder will continue up the
12199 stack, so we will not fall through. We don't know whether we
12200 will throw an exception which matches EH_FILTER_TYPES or not,
12201 so we just ignore EH_FILTER_TYPES and assume that we might
12202 throw an exception which doesn't match. */
12203 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12205 default:
12206 /* This case represents statements to be executed when an
12207 exception occurs. Those statements are implicitly followed
12208 by a RESX statement to resume execution after the exception.
12209 So in this case the TRY_CATCH never falls through. */
12210 return false;
12214 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12215 need not be 100% accurate; simply be conservative and return true if we
12216 don't know. This is used only to avoid stupidly generating extra code.
12217 If we're wrong, we'll just delete the extra code later. */
12219 bool
12220 block_may_fallthru (const_tree block)
12222 /* This CONST_CAST is okay because expr_last returns its argument
12223 unmodified and we assign it to a const_tree. */
12224 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12226 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12228 case GOTO_EXPR:
12229 case RETURN_EXPR:
12230 /* Easy cases. If the last statement of the block implies
12231 control transfer, then we can't fall through. */
12232 return false;
12234 case SWITCH_EXPR:
12235 /* If there is a default: label or case labels cover all possible
12236 SWITCH_COND values, then the SWITCH_EXPR will transfer control
12237 to some case label in all cases and all we care is whether the
12238 SWITCH_BODY falls through. */
12239 if (SWITCH_ALL_CASES_P (stmt))
12240 return block_may_fallthru (SWITCH_BODY (stmt));
12241 return true;
12243 case COND_EXPR:
12244 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12245 return true;
12246 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12248 case BIND_EXPR:
12249 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12251 case TRY_CATCH_EXPR:
12252 return try_catch_may_fallthru (stmt);
12254 case TRY_FINALLY_EXPR:
12255 /* The finally clause is always executed after the try clause,
12256 so if it does not fall through, then the try-finally will not
12257 fall through. Otherwise, if the try clause does not fall
12258 through, then when the finally clause falls through it will
12259 resume execution wherever the try clause was going. So the
12260 whole try-finally will only fall through if both the try
12261 clause and the finally clause fall through. */
12262 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12263 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12265 case EH_ELSE_EXPR:
12266 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12268 case MODIFY_EXPR:
12269 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12270 stmt = TREE_OPERAND (stmt, 1);
12271 else
12272 return true;
12273 /* FALLTHRU */
12275 case CALL_EXPR:
12276 /* Functions that do not return do not fall through. */
12277 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12279 case CLEANUP_POINT_EXPR:
12280 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12282 case TARGET_EXPR:
12283 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12285 case ERROR_MARK:
12286 return true;
12288 default:
12289 return lang_hooks.block_may_fallthru (stmt);
12293 /* True if we are using EH to handle cleanups. */
12294 static bool using_eh_for_cleanups_flag = false;
12296 /* This routine is called from front ends to indicate eh should be used for
12297 cleanups. */
12298 void
12299 using_eh_for_cleanups (void)
12301 using_eh_for_cleanups_flag = true;
12304 /* Query whether EH is used for cleanups. */
12305 bool
12306 using_eh_for_cleanups_p (void)
12308 return using_eh_for_cleanups_flag;
12311 /* Wrapper for tree_code_name to ensure that tree code is valid */
12312 const char *
12313 get_tree_code_name (enum tree_code code)
12315 const char *invalid = "<invalid tree code>";
12317 /* The tree_code enum promotes to signed, but we could be getting
12318 invalid values, so force an unsigned comparison. */
12319 if (unsigned (code) >= MAX_TREE_CODES)
12321 if ((unsigned)code == 0xa5a5)
12322 return "ggc_freed";
12323 return invalid;
12326 return tree_code_name[code];
12329 /* Drops the TREE_OVERFLOW flag from T. */
12331 tree
12332 drop_tree_overflow (tree t)
12334 gcc_checking_assert (TREE_OVERFLOW (t));
12336 /* For tree codes with a sharing machinery re-build the result. */
12337 if (poly_int_tree_p (t))
12338 return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
12340 /* For VECTOR_CST, remove the overflow bits from the encoded elements
12341 and canonicalize the result. */
12342 if (TREE_CODE (t) == VECTOR_CST)
12344 tree_vector_builder builder;
12345 builder.new_unary_operation (TREE_TYPE (t), t, true);
12346 unsigned int count = builder.encoded_nelts ();
12347 for (unsigned int i = 0; i < count; ++i)
12349 tree elt = VECTOR_CST_ELT (t, i);
12350 if (TREE_OVERFLOW (elt))
12351 elt = drop_tree_overflow (elt);
12352 builder.quick_push (elt);
12354 return builder.build ();
12357 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12358 and drop the flag. */
12359 t = copy_node (t);
12360 TREE_OVERFLOW (t) = 0;
12362 /* For constants that contain nested constants, drop the flag
12363 from those as well. */
12364 if (TREE_CODE (t) == COMPLEX_CST)
12366 if (TREE_OVERFLOW (TREE_REALPART (t)))
12367 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
12368 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
12369 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
12372 return t;
12375 /* Given a memory reference expression T, return its base address.
12376 The base address of a memory reference expression is the main
12377 object being referenced. For instance, the base address for
12378 'array[i].fld[j]' is 'array'. You can think of this as stripping
12379 away the offset part from a memory address.
12381 This function calls handled_component_p to strip away all the inner
12382 parts of the memory reference until it reaches the base object. */
12384 tree
12385 get_base_address (tree t)
12387 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12388 t = TREE_OPERAND (t, 0);
12389 while (handled_component_p (t))
12390 t = TREE_OPERAND (t, 0);
12392 if ((TREE_CODE (t) == MEM_REF
12393 || TREE_CODE (t) == TARGET_MEM_REF)
12394 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12395 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12397 return t;
12400 /* Return a tree of sizetype representing the size, in bytes, of the element
12401 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12403 tree
12404 array_ref_element_size (tree exp)
12406 tree aligned_size = TREE_OPERAND (exp, 3);
12407 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12408 location_t loc = EXPR_LOCATION (exp);
12410 /* If a size was specified in the ARRAY_REF, it's the size measured
12411 in alignment units of the element type. So multiply by that value. */
12412 if (aligned_size)
12414 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12415 sizetype from another type of the same width and signedness. */
12416 if (TREE_TYPE (aligned_size) != sizetype)
12417 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12418 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12419 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12422 /* Otherwise, take the size from that of the element type. Substitute
12423 any PLACEHOLDER_EXPR that we have. */
12424 else
12425 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12428 /* Return a tree representing the lower bound of the array mentioned in
12429 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12431 tree
12432 array_ref_low_bound (tree exp)
12434 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12436 /* If a lower bound is specified in EXP, use it. */
12437 if (TREE_OPERAND (exp, 2))
12438 return TREE_OPERAND (exp, 2);
12440 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12441 substituting for a PLACEHOLDER_EXPR as needed. */
12442 if (domain_type && TYPE_MIN_VALUE (domain_type))
12443 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12445 /* Otherwise, return a zero of the appropriate type. */
12446 tree idxtype = TREE_TYPE (TREE_OPERAND (exp, 1));
12447 return (idxtype == error_mark_node
12448 ? integer_zero_node : build_int_cst (idxtype, 0));
12451 /* Return a tree representing the upper bound of the array mentioned in
12452 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12454 tree
12455 array_ref_up_bound (tree exp)
12457 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12459 /* If there is a domain type and it has an upper bound, use it, substituting
12460 for a PLACEHOLDER_EXPR as needed. */
12461 if (domain_type && TYPE_MAX_VALUE (domain_type))
12462 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12464 /* Otherwise fail. */
12465 return NULL_TREE;
12468 /* Returns true if REF is an array reference, component reference,
12469 or memory reference to an array at the end of a structure.
12470 If this is the case, the array may be allocated larger
12471 than its upper bound implies. */
12473 bool
12474 array_at_struct_end_p (tree ref)
12476 tree atype;
12478 if (TREE_CODE (ref) == ARRAY_REF
12479 || TREE_CODE (ref) == ARRAY_RANGE_REF)
12481 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
12482 ref = TREE_OPERAND (ref, 0);
12484 else if (TREE_CODE (ref) == COMPONENT_REF
12485 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
12486 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
12487 else if (TREE_CODE (ref) == MEM_REF)
12489 tree arg = TREE_OPERAND (ref, 0);
12490 if (TREE_CODE (arg) == ADDR_EXPR)
12491 arg = TREE_OPERAND (arg, 0);
12492 tree argtype = TREE_TYPE (arg);
12493 if (TREE_CODE (argtype) == RECORD_TYPE)
12495 if (tree fld = last_field (argtype))
12497 atype = TREE_TYPE (fld);
12498 if (TREE_CODE (atype) != ARRAY_TYPE)
12499 return false;
12500 if (VAR_P (arg) && DECL_SIZE (fld))
12501 return false;
12503 else
12504 return false;
12506 else
12507 return false;
12509 else
12510 return false;
12512 if (TREE_CODE (ref) == STRING_CST)
12513 return false;
12515 tree ref_to_array = ref;
12516 while (handled_component_p (ref))
12518 /* If the reference chain contains a component reference to a
12519 non-union type and there follows another field the reference
12520 is not at the end of a structure. */
12521 if (TREE_CODE (ref) == COMPONENT_REF)
12523 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12525 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12526 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12527 nextf = DECL_CHAIN (nextf);
12528 if (nextf)
12529 return false;
12532 /* If we have a multi-dimensional array we do not consider
12533 a non-innermost dimension as flex array if the whole
12534 multi-dimensional array is at struct end.
12535 Same for an array of aggregates with a trailing array
12536 member. */
12537 else if (TREE_CODE (ref) == ARRAY_REF)
12538 return false;
12539 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
12541 /* If we view an underlying object as sth else then what we
12542 gathered up to now is what we have to rely on. */
12543 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
12544 break;
12545 else
12546 gcc_unreachable ();
12548 ref = TREE_OPERAND (ref, 0);
12551 /* The array now is at struct end. Treat flexible arrays as
12552 always subject to extend, even into just padding constrained by
12553 an underlying decl. */
12554 if (! TYPE_SIZE (atype)
12555 || ! TYPE_DOMAIN (atype)
12556 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12557 return true;
12559 /* If the reference is based on a declared entity, the size of the array
12560 is constrained by its given domain. (Do not trust commons PR/69368). */
12561 ref = get_base_address (ref);
12562 if (ref
12563 && DECL_P (ref)
12564 && !(flag_unconstrained_commons
12565 && VAR_P (ref) && DECL_COMMON (ref))
12566 && DECL_SIZE_UNIT (ref)
12567 && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
12569 /* Check whether the array domain covers all of the available
12570 padding. */
12571 poly_int64 offset;
12572 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
12573 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
12574 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
12575 return true;
12576 if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
12577 return true;
12579 /* If at least one extra element fits it is a flexarray. */
12580 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12581 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
12582 + 2)
12583 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
12584 wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
12585 return true;
12587 return false;
12590 return true;
12593 /* Return a tree representing the offset, in bytes, of the field referenced
12594 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12596 tree
12597 component_ref_field_offset (tree exp)
12599 tree aligned_offset = TREE_OPERAND (exp, 2);
12600 tree field = TREE_OPERAND (exp, 1);
12601 location_t loc = EXPR_LOCATION (exp);
12603 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12604 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12605 value. */
12606 if (aligned_offset)
12608 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12609 sizetype from another type of the same width and signedness. */
12610 if (TREE_TYPE (aligned_offset) != sizetype)
12611 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12612 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12613 size_int (DECL_OFFSET_ALIGN (field)
12614 / BITS_PER_UNIT));
12617 /* Otherwise, take the offset from that of the field. Substitute
12618 any PLACEHOLDER_EXPR that we have. */
12619 else
12620 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12623 /* Given the initializer INIT, return the initializer for the field
12624 DECL if it exists, otherwise null. Used to obtain the initializer
12625 for a flexible array member and determine its size. */
12627 static tree
12628 get_initializer_for (tree init, tree decl)
12630 STRIP_NOPS (init);
12632 tree fld, fld_init;
12633 unsigned HOST_WIDE_INT i;
12634 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), i, fld, fld_init)
12636 if (decl == fld)
12637 return fld_init;
12639 if (TREE_CODE (fld) == CONSTRUCTOR)
12641 fld_init = get_initializer_for (fld_init, decl);
12642 if (fld_init)
12643 return fld_init;
12647 return NULL_TREE;
12650 /* Determines the size of the member referenced by the COMPONENT_REF
12651 REF, using its initializer expression if necessary in order to
12652 determine the size of an initialized flexible array member.
12653 If non-null, set *ARK when REF refers to an interior zero-length
12654 array or a trailing one-element array.
12655 Returns the size as sizetype (which might be zero for an object
12656 with an uninitialized flexible array member) or null if the size
12657 cannot be determined. */
12659 tree
12660 component_ref_size (tree ref, special_array_member *sam /* = NULL */)
12662 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
12664 special_array_member sambuf;
12665 if (!sam)
12666 sam = &sambuf;
12667 *sam = special_array_member::none;
12669 /* The object/argument referenced by the COMPONENT_REF and its type. */
12670 tree arg = TREE_OPERAND (ref, 0);
12671 tree argtype = TREE_TYPE (arg);
12672 /* The referenced member. */
12673 tree member = TREE_OPERAND (ref, 1);
12675 tree memsize = DECL_SIZE_UNIT (member);
12676 if (memsize)
12678 tree memtype = TREE_TYPE (member);
12679 if (TREE_CODE (memtype) != ARRAY_TYPE)
12680 /* DECL_SIZE may be less than TYPE_SIZE in C++ when referring
12681 to the type of a class with a virtual base which doesn't
12682 reflect the size of the virtual's members (see pr97595).
12683 If that's the case fail for now and implement something
12684 more robust in the future. */
12685 return (tree_int_cst_equal (memsize, TYPE_SIZE_UNIT (memtype))
12686 ? memsize : NULL_TREE);
12688 bool trailing = array_at_struct_end_p (ref);
12689 bool zero_length = integer_zerop (memsize);
12690 if (!trailing && !zero_length)
12691 /* MEMBER is either an interior array or is an array with
12692 more than one element. */
12693 return memsize;
12695 if (zero_length)
12697 if (trailing)
12698 *sam = special_array_member::trail_0;
12699 else
12701 *sam = special_array_member::int_0;
12702 memsize = NULL_TREE;
12706 if (!zero_length)
12707 if (tree dom = TYPE_DOMAIN (memtype))
12708 if (tree min = TYPE_MIN_VALUE (dom))
12709 if (tree max = TYPE_MAX_VALUE (dom))
12710 if (TREE_CODE (min) == INTEGER_CST
12711 && TREE_CODE (max) == INTEGER_CST)
12713 offset_int minidx = wi::to_offset (min);
12714 offset_int maxidx = wi::to_offset (max);
12715 offset_int neltsm1 = maxidx - minidx;
12716 if (neltsm1 > 0)
12717 /* MEMBER is an array with more than one element. */
12718 return memsize;
12720 if (neltsm1 == 0)
12721 *sam = special_array_member::trail_1;
12724 /* For a reference to a zero- or one-element array member of a union
12725 use the size of the union instead of the size of the member. */
12726 if (TREE_CODE (argtype) == UNION_TYPE)
12727 memsize = TYPE_SIZE_UNIT (argtype);
12730 /* MEMBER is either a bona fide flexible array member, or a zero-length
12731 array member, or an array of length one treated as such. */
12733 /* If the reference is to a declared object and the member a true
12734 flexible array, try to determine its size from its initializer. */
12735 poly_int64 baseoff = 0;
12736 tree base = get_addr_base_and_unit_offset (ref, &baseoff);
12737 if (!base || !VAR_P (base))
12739 if (*sam != special_array_member::int_0)
12740 return NULL_TREE;
12742 if (TREE_CODE (arg) != COMPONENT_REF)
12743 return NULL_TREE;
12745 base = arg;
12746 while (TREE_CODE (base) == COMPONENT_REF)
12747 base = TREE_OPERAND (base, 0);
12748 baseoff = tree_to_poly_int64 (byte_position (TREE_OPERAND (ref, 1)));
12751 /* BASE is the declared object of which MEMBER is either a member
12752 or that is cast to ARGTYPE (e.g., a char buffer used to store
12753 an ARGTYPE object). */
12754 tree basetype = TREE_TYPE (base);
12756 /* Determine the base type of the referenced object. If it's
12757 the same as ARGTYPE and MEMBER has a known size, return it. */
12758 tree bt = basetype;
12759 if (*sam != special_array_member::int_0)
12760 while (TREE_CODE (bt) == ARRAY_TYPE)
12761 bt = TREE_TYPE (bt);
12762 bool typematch = useless_type_conversion_p (argtype, bt);
12763 if (memsize && typematch)
12764 return memsize;
12766 memsize = NULL_TREE;
12768 if (typematch)
12769 /* MEMBER is a true flexible array member. Compute its size from
12770 the initializer of the BASE object if it has one. */
12771 if (tree init = DECL_P (base) ? DECL_INITIAL (base) : NULL_TREE)
12772 if (init != error_mark_node)
12774 init = get_initializer_for (init, member);
12775 if (init)
12777 memsize = TYPE_SIZE_UNIT (TREE_TYPE (init));
12778 if (tree refsize = TYPE_SIZE_UNIT (argtype))
12780 /* Use the larger of the initializer size and the tail
12781 padding in the enclosing struct. */
12782 poly_int64 rsz = tree_to_poly_int64 (refsize);
12783 rsz -= baseoff;
12784 if (known_lt (tree_to_poly_int64 (memsize), rsz))
12785 memsize = wide_int_to_tree (TREE_TYPE (memsize), rsz);
12788 baseoff = 0;
12792 if (!memsize)
12794 if (typematch)
12796 if (DECL_P (base)
12797 && DECL_EXTERNAL (base)
12798 && bt == basetype
12799 && *sam != special_array_member::int_0)
12800 /* The size of a flexible array member of an extern struct
12801 with no initializer cannot be determined (it's defined
12802 in another translation unit and can have an initializer
12803 with an arbitrary number of elements). */
12804 return NULL_TREE;
12806 /* Use the size of the base struct or, for interior zero-length
12807 arrays, the size of the enclosing type. */
12808 memsize = TYPE_SIZE_UNIT (bt);
12810 else if (DECL_P (base))
12811 /* Use the size of the BASE object (possibly an array of some
12812 other type such as char used to store the struct). */
12813 memsize = DECL_SIZE_UNIT (base);
12814 else
12815 return NULL_TREE;
12818 /* If the flexible array member has a known size use the greater
12819 of it and the tail padding in the enclosing struct.
12820 Otherwise, when the size of the flexible array member is unknown
12821 and the referenced object is not a struct, use the size of its
12822 type when known. This detects sizes of array buffers when cast
12823 to struct types with flexible array members. */
12824 if (memsize)
12826 poly_int64 memsz64 = memsize ? tree_to_poly_int64 (memsize) : 0;
12827 if (known_lt (baseoff, memsz64))
12829 memsz64 -= baseoff;
12830 return wide_int_to_tree (TREE_TYPE (memsize), memsz64);
12832 return size_zero_node;
12835 /* Return "don't know" for an external non-array object since its
12836 flexible array member can be initialized to have any number of
12837 elements. Otherwise, return zero because the flexible array
12838 member has no elements. */
12839 return (DECL_P (base)
12840 && DECL_EXTERNAL (base)
12841 && (!typematch
12842 || TREE_CODE (basetype) != ARRAY_TYPE)
12843 ? NULL_TREE : size_zero_node);
12846 /* Return the machine mode of T. For vectors, returns the mode of the
12847 inner type. The main use case is to feed the result to HONOR_NANS,
12848 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12850 machine_mode
12851 element_mode (const_tree t)
12853 if (!TYPE_P (t))
12854 t = TREE_TYPE (t);
12855 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12856 t = TREE_TYPE (t);
12857 return TYPE_MODE (t);
12860 /* Vector types need to re-check the target flags each time we report
12861 the machine mode. We need to do this because attribute target can
12862 change the result of vector_mode_supported_p and have_regs_of_mode
12863 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
12864 change on a per-function basis. */
12865 /* ??? Possibly a better solution is to run through all the types
12866 referenced by a function and re-compute the TYPE_MODE once, rather
12867 than make the TYPE_MODE macro call a function. */
12869 machine_mode
12870 vector_type_mode (const_tree t)
12872 machine_mode mode;
12874 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
12876 mode = t->type_common.mode;
12877 if (VECTOR_MODE_P (mode)
12878 && (!targetm.vector_mode_supported_p (mode)
12879 || !have_regs_of_mode[mode]))
12881 scalar_int_mode innermode;
12883 /* For integers, try mapping it to a same-sized scalar mode. */
12884 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
12886 poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
12887 * GET_MODE_BITSIZE (innermode));
12888 scalar_int_mode mode;
12889 if (int_mode_for_size (size, 0).exists (&mode)
12890 && have_regs_of_mode[mode])
12891 return mode;
12894 return BLKmode;
12897 return mode;
12900 /* Return the size in bits of each element of vector type TYPE. */
12902 unsigned int
12903 vector_element_bits (const_tree type)
12905 gcc_checking_assert (VECTOR_TYPE_P (type));
12906 if (VECTOR_BOOLEAN_TYPE_P (type))
12907 return TYPE_PRECISION (TREE_TYPE (type));
12908 return tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
12911 /* Calculate the size in bits of each element of vector type TYPE
12912 and return the result as a tree of type bitsizetype. */
12914 tree
12915 vector_element_bits_tree (const_tree type)
12917 gcc_checking_assert (VECTOR_TYPE_P (type));
12918 if (VECTOR_BOOLEAN_TYPE_P (type))
12919 return bitsize_int (vector_element_bits (type));
12920 return TYPE_SIZE (TREE_TYPE (type));
12923 /* Verify that basic properties of T match TV and thus T can be a variant of
12924 TV. TV should be the more specified variant (i.e. the main variant). */
12926 static bool
12927 verify_type_variant (const_tree t, tree tv)
12929 /* Type variant can differ by:
12931 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
12932 ENCODE_QUAL_ADDR_SPACE.
12933 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
12934 in this case some values may not be set in the variant types
12935 (see TYPE_COMPLETE_P checks).
12936 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
12937 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
12938 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
12939 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
12940 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
12941 this is necessary to make it possible to merge types form different TUs
12942 - arrays, pointers and references may have TREE_TYPE that is a variant
12943 of TREE_TYPE of their main variants.
12944 - aggregates may have new TYPE_FIELDS list that list variants of
12945 the main variant TYPE_FIELDS.
12946 - vector types may differ by TYPE_VECTOR_OPAQUE
12949 /* Convenience macro for matching individual fields. */
12950 #define verify_variant_match(flag) \
12951 do { \
12952 if (flag (tv) != flag (t)) \
12954 error ("type variant differs by %s", #flag); \
12955 debug_tree (tv); \
12956 return false; \
12958 } while (false)
12960 /* tree_base checks. */
12962 verify_variant_match (TREE_CODE);
12963 /* FIXME: Ada builds non-artificial variants of artificial types. */
12964 #if 0
12965 if (TYPE_ARTIFICIAL (tv))
12966 verify_variant_match (TYPE_ARTIFICIAL);
12967 #endif
12968 if (POINTER_TYPE_P (tv))
12969 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
12970 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
12971 verify_variant_match (TYPE_UNSIGNED);
12972 verify_variant_match (TYPE_PACKED);
12973 if (TREE_CODE (t) == REFERENCE_TYPE)
12974 verify_variant_match (TYPE_REF_IS_RVALUE);
12975 if (AGGREGATE_TYPE_P (t))
12976 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
12977 else
12978 verify_variant_match (TYPE_SATURATING);
12979 /* FIXME: This check trigger during libstdc++ build. */
12980 #if 0
12981 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t))
12982 verify_variant_match (TYPE_FINAL_P);
12983 #endif
12985 /* tree_type_common checks. */
12987 if (COMPLETE_TYPE_P (t))
12989 verify_variant_match (TYPE_MODE);
12990 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
12991 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
12992 verify_variant_match (TYPE_SIZE);
12993 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
12994 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
12995 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
12997 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
12998 TYPE_SIZE_UNIT (tv), 0));
12999 error ("type variant has different %<TYPE_SIZE_UNIT%>");
13000 debug_tree (tv);
13001 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13002 debug_tree (TYPE_SIZE_UNIT (tv));
13003 error ("type%'s %<TYPE_SIZE_UNIT%>");
13004 debug_tree (TYPE_SIZE_UNIT (t));
13005 return false;
13007 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13009 verify_variant_match (TYPE_PRECISION);
13010 if (RECORD_OR_UNION_TYPE_P (t))
13011 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13012 else if (TREE_CODE (t) == ARRAY_TYPE)
13013 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13014 /* During LTO we merge variant lists from diferent translation units
13015 that may differ BY TYPE_CONTEXT that in turn may point
13016 to TRANSLATION_UNIT_DECL.
13017 Ada also builds variants of types with different TYPE_CONTEXT. */
13018 #if 0
13019 if (!in_lto_p || !TYPE_FILE_SCOPE_P (t))
13020 verify_variant_match (TYPE_CONTEXT);
13021 #endif
13022 if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == INTEGER_TYPE)
13023 verify_variant_match (TYPE_STRING_FLAG);
13024 if (TREE_CODE (t) == RECORD_TYPE || TREE_CODE (t) == UNION_TYPE)
13025 verify_variant_match (TYPE_CXX_ODR_P);
13026 if (TYPE_ALIAS_SET_KNOWN_P (t))
13028 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13029 debug_tree (tv);
13030 return false;
13033 /* tree_type_non_common checks. */
13035 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13036 and dangle the pointer from time to time. */
13037 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13038 && (in_lto_p || !TYPE_VFIELD (tv)
13039 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13041 error ("type variant has different %<TYPE_VFIELD%>");
13042 debug_tree (tv);
13043 return false;
13045 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13046 || TREE_CODE (t) == INTEGER_TYPE
13047 || TREE_CODE (t) == BOOLEAN_TYPE
13048 || TREE_CODE (t) == REAL_TYPE
13049 || TREE_CODE (t) == FIXED_POINT_TYPE)
13051 verify_variant_match (TYPE_MAX_VALUE);
13052 verify_variant_match (TYPE_MIN_VALUE);
13054 if (TREE_CODE (t) == METHOD_TYPE)
13055 verify_variant_match (TYPE_METHOD_BASETYPE);
13056 if (TREE_CODE (t) == OFFSET_TYPE)
13057 verify_variant_match (TYPE_OFFSET_BASETYPE);
13058 if (TREE_CODE (t) == ARRAY_TYPE)
13059 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13060 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13061 or even type's main variant. This is needed to make bootstrap pass
13062 and the bug seems new in GCC 5.
13063 C++ FE should be updated to make this consistent and we should check
13064 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13065 is a match with main variant.
13067 Also disable the check for Java for now because of parser hack that builds
13068 first an dummy BINFO and then sometimes replace it by real BINFO in some
13069 of the copies. */
13070 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13071 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13072 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13073 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13074 at LTO time only. */
13075 && (in_lto_p && odr_type_p (t)))
13077 error ("type variant has different %<TYPE_BINFO%>");
13078 debug_tree (tv);
13079 error ("type variant%'s %<TYPE_BINFO%>");
13080 debug_tree (TYPE_BINFO (tv));
13081 error ("type%'s %<TYPE_BINFO%>");
13082 debug_tree (TYPE_BINFO (t));
13083 return false;
13086 /* Check various uses of TYPE_VALUES_RAW. */
13087 if (TREE_CODE (t) == ENUMERAL_TYPE
13088 && TYPE_VALUES (t))
13089 verify_variant_match (TYPE_VALUES);
13090 else if (TREE_CODE (t) == ARRAY_TYPE)
13091 verify_variant_match (TYPE_DOMAIN);
13092 /* Permit incomplete variants of complete type. While FEs may complete
13093 all variants, this does not happen for C++ templates in all cases. */
13094 else if (RECORD_OR_UNION_TYPE_P (t)
13095 && COMPLETE_TYPE_P (t)
13096 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13098 tree f1, f2;
13100 /* Fortran builds qualified variants as new records with items of
13101 qualified type. Verify that they looks same. */
13102 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13103 f1 && f2;
13104 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13105 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13106 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13107 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13108 /* FIXME: gfc_nonrestricted_type builds all types as variants
13109 with exception of pointer types. It deeply copies the type
13110 which means that we may end up with a variant type
13111 referring non-variant pointer. We may change it to
13112 produce types as variants, too, like
13113 objc_get_protocol_qualified_type does. */
13114 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13115 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13116 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13117 break;
13118 if (f1 || f2)
13120 error ("type variant has different %<TYPE_FIELDS%>");
13121 debug_tree (tv);
13122 error ("first mismatch is field");
13123 debug_tree (f1);
13124 error ("and field");
13125 debug_tree (f2);
13126 return false;
13129 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13130 verify_variant_match (TYPE_ARG_TYPES);
13131 /* For C++ the qualified variant of array type is really an array type
13132 of qualified TREE_TYPE.
13133 objc builds variants of pointer where pointer to type is a variant, too
13134 in objc_get_protocol_qualified_type. */
13135 if (TREE_TYPE (t) != TREE_TYPE (tv)
13136 && ((TREE_CODE (t) != ARRAY_TYPE
13137 && !POINTER_TYPE_P (t))
13138 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13139 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13141 error ("type variant has different %<TREE_TYPE%>");
13142 debug_tree (tv);
13143 error ("type variant%'s %<TREE_TYPE%>");
13144 debug_tree (TREE_TYPE (tv));
13145 error ("type%'s %<TREE_TYPE%>");
13146 debug_tree (TREE_TYPE (t));
13147 return false;
13149 if (type_with_alias_set_p (t)
13150 && !gimple_canonical_types_compatible_p (t, tv, false))
13152 error ("type is not compatible with its variant");
13153 debug_tree (tv);
13154 error ("type variant%'s %<TREE_TYPE%>");
13155 debug_tree (TREE_TYPE (tv));
13156 error ("type%'s %<TREE_TYPE%>");
13157 debug_tree (TREE_TYPE (t));
13158 return false;
13160 return true;
13161 #undef verify_variant_match
13165 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13166 the middle-end types_compatible_p function. It needs to avoid
13167 claiming types are different for types that should be treated
13168 the same with respect to TBAA. Canonical types are also used
13169 for IL consistency checks via the useless_type_conversion_p
13170 predicate which does not handle all type kinds itself but falls
13171 back to pointer-comparison of TYPE_CANONICAL for aggregates
13172 for example. */
13174 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13175 type calculation because we need to allow inter-operability between signed
13176 and unsigned variants. */
13178 bool
13179 type_with_interoperable_signedness (const_tree type)
13181 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13182 signed char and unsigned char. Similarly fortran FE builds
13183 C_SIZE_T as signed type, while C defines it unsigned. */
13185 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13186 == INTEGER_TYPE
13187 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13188 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13191 /* Return true iff T1 and T2 are structurally identical for what
13192 TBAA is concerned.
13193 This function is used both by lto.c canonical type merging and by the
13194 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13195 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13196 only for LTO because only in these cases TYPE_CANONICAL equivalence
13197 correspond to one defined by gimple_canonical_types_compatible_p. */
13199 bool
13200 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13201 bool trust_type_canonical)
13203 /* Type variants should be same as the main variant. When not doing sanity
13204 checking to verify this fact, go to main variants and save some work. */
13205 if (trust_type_canonical)
13207 t1 = TYPE_MAIN_VARIANT (t1);
13208 t2 = TYPE_MAIN_VARIANT (t2);
13211 /* Check first for the obvious case of pointer identity. */
13212 if (t1 == t2)
13213 return true;
13215 /* Check that we have two types to compare. */
13216 if (t1 == NULL_TREE || t2 == NULL_TREE)
13217 return false;
13219 /* We consider complete types always compatible with incomplete type.
13220 This does not make sense for canonical type calculation and thus we
13221 need to ensure that we are never called on it.
13223 FIXME: For more correctness the function probably should have three modes
13224 1) mode assuming that types are complete mathcing their structure
13225 2) mode allowing incomplete types but producing equivalence classes
13226 and thus ignoring all info from complete types
13227 3) mode allowing incomplete types to match complete but checking
13228 compatibility between complete types.
13230 1 and 2 can be used for canonical type calculation. 3 is the real
13231 definition of type compatibility that can be used i.e. for warnings during
13232 declaration merging. */
13234 gcc_assert (!trust_type_canonical
13235 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13237 /* If the types have been previously registered and found equal
13238 they still are. */
13240 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13241 && trust_type_canonical)
13243 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13244 they are always NULL, but they are set to non-NULL for types
13245 constructed by build_pointer_type and variants. In this case the
13246 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13247 all pointers are considered equal. Be sure to not return false
13248 negatives. */
13249 gcc_checking_assert (canonical_type_used_p (t1)
13250 && canonical_type_used_p (t2));
13251 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13254 /* For types where we do ODR based TBAA the canonical type is always
13255 set correctly, so we know that types are different if their
13256 canonical types does not match. */
13257 if (trust_type_canonical
13258 && (odr_type_p (t1) && odr_based_tbaa_p (t1))
13259 != (odr_type_p (t2) && odr_based_tbaa_p (t2)))
13260 return false;
13262 /* Can't be the same type if the types don't have the same code. */
13263 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13264 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13265 return false;
13267 /* Qualifiers do not matter for canonical type comparison purposes. */
13269 /* Void types and nullptr types are always the same. */
13270 if (TREE_CODE (t1) == VOID_TYPE
13271 || TREE_CODE (t1) == NULLPTR_TYPE)
13272 return true;
13274 /* Can't be the same type if they have different mode. */
13275 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13276 return false;
13278 /* Non-aggregate types can be handled cheaply. */
13279 if (INTEGRAL_TYPE_P (t1)
13280 || SCALAR_FLOAT_TYPE_P (t1)
13281 || FIXED_POINT_TYPE_P (t1)
13282 || TREE_CODE (t1) == VECTOR_TYPE
13283 || TREE_CODE (t1) == COMPLEX_TYPE
13284 || TREE_CODE (t1) == OFFSET_TYPE
13285 || POINTER_TYPE_P (t1))
13287 /* Can't be the same type if they have different recision. */
13288 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13289 return false;
13291 /* In some cases the signed and unsigned types are required to be
13292 inter-operable. */
13293 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13294 && !type_with_interoperable_signedness (t1))
13295 return false;
13297 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13298 interoperable with "signed char". Unless all frontends are revisited
13299 to agree on these types, we must ignore the flag completely. */
13301 /* Fortran standard define C_PTR type that is compatible with every
13302 C pointer. For this reason we need to glob all pointers into one.
13303 Still pointers in different address spaces are not compatible. */
13304 if (POINTER_TYPE_P (t1))
13306 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13307 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13308 return false;
13311 /* Tail-recurse to components. */
13312 if (TREE_CODE (t1) == VECTOR_TYPE
13313 || TREE_CODE (t1) == COMPLEX_TYPE)
13314 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13315 TREE_TYPE (t2),
13316 trust_type_canonical);
13318 return true;
13321 /* Do type-specific comparisons. */
13322 switch (TREE_CODE (t1))
13324 case ARRAY_TYPE:
13325 /* Array types are the same if the element types are the same and
13326 the number of elements are the same. */
13327 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13328 trust_type_canonical)
13329 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13330 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13331 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13332 return false;
13333 else
13335 tree i1 = TYPE_DOMAIN (t1);
13336 tree i2 = TYPE_DOMAIN (t2);
13338 /* For an incomplete external array, the type domain can be
13339 NULL_TREE. Check this condition also. */
13340 if (i1 == NULL_TREE && i2 == NULL_TREE)
13341 return true;
13342 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13343 return false;
13344 else
13346 tree min1 = TYPE_MIN_VALUE (i1);
13347 tree min2 = TYPE_MIN_VALUE (i2);
13348 tree max1 = TYPE_MAX_VALUE (i1);
13349 tree max2 = TYPE_MAX_VALUE (i2);
13351 /* The minimum/maximum values have to be the same. */
13352 if ((min1 == min2
13353 || (min1 && min2
13354 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13355 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13356 || operand_equal_p (min1, min2, 0))))
13357 && (max1 == max2
13358 || (max1 && max2
13359 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13360 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13361 || operand_equal_p (max1, max2, 0)))))
13362 return true;
13363 else
13364 return false;
13368 case METHOD_TYPE:
13369 case FUNCTION_TYPE:
13370 /* Function types are the same if the return type and arguments types
13371 are the same. */
13372 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13373 trust_type_canonical))
13374 return false;
13376 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13377 return true;
13378 else
13380 tree parms1, parms2;
13382 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13383 parms1 && parms2;
13384 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13386 if (!gimple_canonical_types_compatible_p
13387 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13388 trust_type_canonical))
13389 return false;
13392 if (parms1 || parms2)
13393 return false;
13395 return true;
13398 case RECORD_TYPE:
13399 case UNION_TYPE:
13400 case QUAL_UNION_TYPE:
13402 tree f1, f2;
13404 /* Don't try to compare variants of an incomplete type, before
13405 TYPE_FIELDS has been copied around. */
13406 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13407 return true;
13410 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13411 return false;
13413 /* For aggregate types, all the fields must be the same. */
13414 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13415 f1 || f2;
13416 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13418 /* Skip non-fields and zero-sized fields. */
13419 while (f1 && (TREE_CODE (f1) != FIELD_DECL
13420 || (DECL_SIZE (f1)
13421 && integer_zerop (DECL_SIZE (f1)))))
13422 f1 = TREE_CHAIN (f1);
13423 while (f2 && (TREE_CODE (f2) != FIELD_DECL
13424 || (DECL_SIZE (f2)
13425 && integer_zerop (DECL_SIZE (f2)))))
13426 f2 = TREE_CHAIN (f2);
13427 if (!f1 || !f2)
13428 break;
13429 /* The fields must have the same name, offset and type. */
13430 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13431 || !gimple_compare_field_offset (f1, f2)
13432 || !gimple_canonical_types_compatible_p
13433 (TREE_TYPE (f1), TREE_TYPE (f2),
13434 trust_type_canonical))
13435 return false;
13438 /* If one aggregate has more fields than the other, they
13439 are not the same. */
13440 if (f1 || f2)
13441 return false;
13443 return true;
13446 default:
13447 /* Consider all types with language specific trees in them mutually
13448 compatible. This is executed only from verify_type and false
13449 positives can be tolerated. */
13450 gcc_assert (!in_lto_p);
13451 return true;
13455 /* Verify type T. */
13457 void
13458 verify_type (const_tree t)
13460 bool error_found = false;
13461 tree mv = TYPE_MAIN_VARIANT (t);
13462 if (!mv)
13464 error ("main variant is not defined");
13465 error_found = true;
13467 else if (mv != TYPE_MAIN_VARIANT (mv))
13469 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
13470 debug_tree (mv);
13471 error_found = true;
13473 else if (t != mv && !verify_type_variant (t, mv))
13474 error_found = true;
13476 tree ct = TYPE_CANONICAL (t);
13477 if (!ct)
13479 else if (TYPE_CANONICAL (t) != ct)
13481 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
13482 debug_tree (ct);
13483 error_found = true;
13485 /* Method and function types cannot be used to address memory and thus
13486 TYPE_CANONICAL really matters only for determining useless conversions.
13488 FIXME: C++ FE produce declarations of builtin functions that are not
13489 compatible with main variants. */
13490 else if (TREE_CODE (t) == FUNCTION_TYPE)
13492 else if (t != ct
13493 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
13494 with variably sized arrays because their sizes possibly
13495 gimplified to different variables. */
13496 && !variably_modified_type_p (ct, NULL)
13497 && !gimple_canonical_types_compatible_p (t, ct, false)
13498 && COMPLETE_TYPE_P (t))
13500 error ("%<TYPE_CANONICAL%> is not compatible");
13501 debug_tree (ct);
13502 error_found = true;
13505 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13506 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13508 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
13509 debug_tree (ct);
13510 error_found = true;
13512 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
13514 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
13515 debug_tree (ct);
13516 debug_tree (TYPE_MAIN_VARIANT (ct));
13517 error_found = true;
13521 /* Check various uses of TYPE_MIN_VALUE_RAW. */
13522 if (RECORD_OR_UNION_TYPE_P (t))
13524 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13525 and danagle the pointer from time to time. */
13526 if (TYPE_VFIELD (t)
13527 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13528 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13530 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
13531 debug_tree (TYPE_VFIELD (t));
13532 error_found = true;
13535 else if (TREE_CODE (t) == POINTER_TYPE)
13537 if (TYPE_NEXT_PTR_TO (t)
13538 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13540 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
13541 debug_tree (TYPE_NEXT_PTR_TO (t));
13542 error_found = true;
13545 else if (TREE_CODE (t) == REFERENCE_TYPE)
13547 if (TYPE_NEXT_REF_TO (t)
13548 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13550 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
13551 debug_tree (TYPE_NEXT_REF_TO (t));
13552 error_found = true;
13555 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13556 || TREE_CODE (t) == FIXED_POINT_TYPE)
13558 /* FIXME: The following check should pass:
13559 useless_type_conversion_p (const_cast <tree> (t),
13560 TREE_TYPE (TYPE_MIN_VALUE (t))
13561 but does not for C sizetypes in LTO. */
13564 /* Check various uses of TYPE_MAXVAL_RAW. */
13565 if (RECORD_OR_UNION_TYPE_P (t))
13567 if (!TYPE_BINFO (t))
13569 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13571 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
13572 debug_tree (TYPE_BINFO (t));
13573 error_found = true;
13575 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
13577 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
13578 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13579 error_found = true;
13582 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13584 if (TYPE_METHOD_BASETYPE (t)
13585 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13586 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13588 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
13589 debug_tree (TYPE_METHOD_BASETYPE (t));
13590 error_found = true;
13593 else if (TREE_CODE (t) == OFFSET_TYPE)
13595 if (TYPE_OFFSET_BASETYPE (t)
13596 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13597 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13599 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
13600 debug_tree (TYPE_OFFSET_BASETYPE (t));
13601 error_found = true;
13604 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13605 || TREE_CODE (t) == FIXED_POINT_TYPE)
13607 /* FIXME: The following check should pass:
13608 useless_type_conversion_p (const_cast <tree> (t),
13609 TREE_TYPE (TYPE_MAX_VALUE (t))
13610 but does not for C sizetypes in LTO. */
13612 else if (TREE_CODE (t) == ARRAY_TYPE)
13614 if (TYPE_ARRAY_MAX_SIZE (t)
13615 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13617 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
13618 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13619 error_found = true;
13622 else if (TYPE_MAX_VALUE_RAW (t))
13624 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
13625 debug_tree (TYPE_MAX_VALUE_RAW (t));
13626 error_found = true;
13629 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13631 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
13632 debug_tree (TYPE_LANG_SLOT_1 (t));
13633 error_found = true;
13636 /* Check various uses of TYPE_VALUES_RAW. */
13637 if (TREE_CODE (t) == ENUMERAL_TYPE)
13638 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13640 tree value = TREE_VALUE (l);
13641 tree name = TREE_PURPOSE (l);
13643 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13644 CONST_DECL of ENUMERAL TYPE. */
13645 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13647 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
13648 debug_tree (value);
13649 debug_tree (name);
13650 error_found = true;
13652 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13653 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13655 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
13656 "to the enum");
13657 debug_tree (value);
13658 debug_tree (name);
13659 error_found = true;
13661 if (TREE_CODE (name) != IDENTIFIER_NODE)
13663 error ("enum value name is not %<IDENTIFIER_NODE%>");
13664 debug_tree (value);
13665 debug_tree (name);
13666 error_found = true;
13669 else if (TREE_CODE (t) == ARRAY_TYPE)
13671 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13673 error ("array %<TYPE_DOMAIN%> is not integer type");
13674 debug_tree (TYPE_DOMAIN (t));
13675 error_found = true;
13678 else if (RECORD_OR_UNION_TYPE_P (t))
13680 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
13682 error ("%<TYPE_FIELDS%> defined in incomplete type");
13683 error_found = true;
13685 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13687 /* TODO: verify properties of decls. */
13688 if (TREE_CODE (fld) == FIELD_DECL)
13690 else if (TREE_CODE (fld) == TYPE_DECL)
13692 else if (TREE_CODE (fld) == CONST_DECL)
13694 else if (VAR_P (fld))
13696 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13698 else if (TREE_CODE (fld) == USING_DECL)
13700 else if (TREE_CODE (fld) == FUNCTION_DECL)
13702 else
13704 error ("wrong tree in %<TYPE_FIELDS%> list");
13705 debug_tree (fld);
13706 error_found = true;
13710 else if (TREE_CODE (t) == INTEGER_TYPE
13711 || TREE_CODE (t) == BOOLEAN_TYPE
13712 || TREE_CODE (t) == OFFSET_TYPE
13713 || TREE_CODE (t) == REFERENCE_TYPE
13714 || TREE_CODE (t) == NULLPTR_TYPE
13715 || TREE_CODE (t) == POINTER_TYPE)
13717 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13719 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
13720 "is %p",
13721 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13722 error_found = true;
13724 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13726 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
13727 debug_tree (TYPE_CACHED_VALUES (t));
13728 error_found = true;
13730 /* Verify just enough of cache to ensure that no one copied it to new type.
13731 All copying should go by copy_node that should clear it. */
13732 else if (TYPE_CACHED_VALUES_P (t))
13734 int i;
13735 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13736 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13737 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13739 error ("wrong %<TYPE_CACHED_VALUES%> entry");
13740 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13741 error_found = true;
13742 break;
13746 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13747 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13749 /* C++ FE uses TREE_PURPOSE to store initial values. */
13750 if (TREE_PURPOSE (l) && in_lto_p)
13752 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
13753 debug_tree (l);
13754 error_found = true;
13756 if (!TYPE_P (TREE_VALUE (l)))
13758 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
13759 debug_tree (l);
13760 error_found = true;
13763 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13765 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
13766 debug_tree (TYPE_VALUES_RAW (t));
13767 error_found = true;
13769 if (TREE_CODE (t) != INTEGER_TYPE
13770 && TREE_CODE (t) != BOOLEAN_TYPE
13771 && TREE_CODE (t) != OFFSET_TYPE
13772 && TREE_CODE (t) != REFERENCE_TYPE
13773 && TREE_CODE (t) != NULLPTR_TYPE
13774 && TREE_CODE (t) != POINTER_TYPE
13775 && TYPE_CACHED_VALUES_P (t))
13777 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
13778 error_found = true;
13781 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13782 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13783 of a type. */
13784 if (TREE_CODE (t) == METHOD_TYPE
13785 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
13787 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
13788 error_found = true;
13791 if (error_found)
13793 debug_tree (const_cast <tree> (t));
13794 internal_error ("%qs failed", __func__);
13799 /* Return 1 if ARG interpreted as signed in its precision is known to be
13800 always positive or 2 if ARG is known to be always negative, or 3 if
13801 ARG may be positive or negative. */
13804 get_range_pos_neg (tree arg)
13806 if (arg == error_mark_node)
13807 return 3;
13809 int prec = TYPE_PRECISION (TREE_TYPE (arg));
13810 int cnt = 0;
13811 if (TREE_CODE (arg) == INTEGER_CST)
13813 wide_int w = wi::sext (wi::to_wide (arg), prec);
13814 if (wi::neg_p (w))
13815 return 2;
13816 else
13817 return 1;
13819 while (CONVERT_EXPR_P (arg)
13820 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
13821 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
13823 arg = TREE_OPERAND (arg, 0);
13824 /* Narrower value zero extended into wider type
13825 will always result in positive values. */
13826 if (TYPE_UNSIGNED (TREE_TYPE (arg))
13827 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
13828 return 1;
13829 prec = TYPE_PRECISION (TREE_TYPE (arg));
13830 if (++cnt > 30)
13831 return 3;
13834 if (TREE_CODE (arg) != SSA_NAME)
13835 return 3;
13836 value_range r;
13837 while (!get_global_range_query ()->range_of_expr (r, arg) || r.kind () != VR_RANGE)
13839 gimple *g = SSA_NAME_DEF_STMT (arg);
13840 if (is_gimple_assign (g)
13841 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
13843 tree t = gimple_assign_rhs1 (g);
13844 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
13845 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
13847 if (TYPE_UNSIGNED (TREE_TYPE (t))
13848 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
13849 return 1;
13850 prec = TYPE_PRECISION (TREE_TYPE (t));
13851 arg = t;
13852 if (++cnt > 30)
13853 return 3;
13854 continue;
13857 return 3;
13859 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
13861 /* For unsigned values, the "positive" range comes
13862 below the "negative" range. */
13863 if (!wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
13864 return 1;
13865 if (wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
13866 return 2;
13868 else
13870 if (!wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
13871 return 1;
13872 if (wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
13873 return 2;
13875 return 3;
13881 /* Return true if ARG is marked with the nonnull attribute in the
13882 current function signature. */
13884 bool
13885 nonnull_arg_p (const_tree arg)
13887 tree t, attrs, fntype;
13888 unsigned HOST_WIDE_INT arg_num;
13890 gcc_assert (TREE_CODE (arg) == PARM_DECL
13891 && (POINTER_TYPE_P (TREE_TYPE (arg))
13892 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
13894 /* The static chain decl is always non null. */
13895 if (arg == cfun->static_chain_decl)
13896 return true;
13898 /* THIS argument of method is always non-NULL. */
13899 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
13900 && arg == DECL_ARGUMENTS (cfun->decl)
13901 && flag_delete_null_pointer_checks)
13902 return true;
13904 /* Values passed by reference are always non-NULL. */
13905 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
13906 && flag_delete_null_pointer_checks)
13907 return true;
13909 fntype = TREE_TYPE (cfun->decl);
13910 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
13912 attrs = lookup_attribute ("nonnull", attrs);
13914 /* If "nonnull" wasn't specified, we know nothing about the argument. */
13915 if (attrs == NULL_TREE)
13916 return false;
13918 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
13919 if (TREE_VALUE (attrs) == NULL_TREE)
13920 return true;
13922 /* Get the position number for ARG in the function signature. */
13923 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
13925 t = DECL_CHAIN (t), arg_num++)
13927 if (t == arg)
13928 break;
13931 gcc_assert (t == arg);
13933 /* Now see if ARG_NUM is mentioned in the nonnull list. */
13934 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
13936 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
13937 return true;
13941 return false;
13944 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
13945 information. */
13947 location_t
13948 set_block (location_t loc, tree block)
13950 location_t pure_loc = get_pure_location (loc);
13951 source_range src_range = get_range_from_loc (line_table, loc);
13952 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
13955 location_t
13956 set_source_range (tree expr, location_t start, location_t finish)
13958 source_range src_range;
13959 src_range.m_start = start;
13960 src_range.m_finish = finish;
13961 return set_source_range (expr, src_range);
13964 location_t
13965 set_source_range (tree expr, source_range src_range)
13967 if (!EXPR_P (expr))
13968 return UNKNOWN_LOCATION;
13970 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
13971 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
13972 pure_loc,
13973 src_range,
13974 NULL);
13975 SET_EXPR_LOCATION (expr, adhoc);
13976 return adhoc;
13979 /* Return EXPR, potentially wrapped with a node expression LOC,
13980 if !CAN_HAVE_LOCATION_P (expr).
13982 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
13983 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
13985 Wrapper nodes can be identified using location_wrapper_p. */
13987 tree
13988 maybe_wrap_with_location (tree expr, location_t loc)
13990 if (expr == NULL)
13991 return NULL;
13992 if (loc == UNKNOWN_LOCATION)
13993 return expr;
13994 if (CAN_HAVE_LOCATION_P (expr))
13995 return expr;
13996 /* We should only be adding wrappers for constants and for decls,
13997 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
13998 gcc_assert (CONSTANT_CLASS_P (expr)
13999 || DECL_P (expr)
14000 || EXCEPTIONAL_CLASS_P (expr));
14002 /* For now, don't add wrappers to exceptional tree nodes, to minimize
14003 any impact of the wrapper nodes. */
14004 if (EXCEPTIONAL_CLASS_P (expr))
14005 return expr;
14007 /* Compiler-generated temporary variables don't need a wrapper. */
14008 if (DECL_P (expr) && DECL_ARTIFICIAL (expr) && DECL_IGNORED_P (expr))
14009 return expr;
14011 /* If any auto_suppress_location_wrappers are active, don't create
14012 wrappers. */
14013 if (suppress_location_wrappers > 0)
14014 return expr;
14016 tree_code code
14017 = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
14018 || (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
14019 ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
14020 tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
14021 /* Mark this node as being a wrapper. */
14022 EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
14023 return wrapper;
14026 int suppress_location_wrappers;
14028 /* Return the name of combined function FN, for debugging purposes. */
14030 const char *
14031 combined_fn_name (combined_fn fn)
14033 if (builtin_fn_p (fn))
14035 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14036 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14038 else
14039 return internal_fn_name (as_internal_fn (fn));
14042 /* Return a bitmap with a bit set corresponding to each argument in
14043 a function call type FNTYPE declared with attribute nonnull,
14044 or null if none of the function's argument are nonnull. The caller
14045 must free the bitmap. */
14047 bitmap
14048 get_nonnull_args (const_tree fntype)
14050 if (fntype == NULL_TREE)
14051 return NULL;
14053 bitmap argmap = NULL;
14054 if (TREE_CODE (fntype) == METHOD_TYPE)
14056 /* The this pointer in C++ non-static member functions is
14057 implicitly nonnull whether or not it's declared as such. */
14058 argmap = BITMAP_ALLOC (NULL);
14059 bitmap_set_bit (argmap, 0);
14062 tree attrs = TYPE_ATTRIBUTES (fntype);
14063 if (!attrs)
14064 return argmap;
14066 /* A function declaration can specify multiple attribute nonnull,
14067 each with zero or more arguments. The loop below creates a bitmap
14068 representing a union of all the arguments. An empty (but non-null)
14069 bitmap means that all arguments have been declaraed nonnull. */
14070 for ( ; attrs; attrs = TREE_CHAIN (attrs))
14072 attrs = lookup_attribute ("nonnull", attrs);
14073 if (!attrs)
14074 break;
14076 if (!argmap)
14077 argmap = BITMAP_ALLOC (NULL);
14079 if (!TREE_VALUE (attrs))
14081 /* Clear the bitmap in case a previous attribute nonnull
14082 set it and this one overrides it for all arguments. */
14083 bitmap_clear (argmap);
14084 return argmap;
14087 /* Iterate over the indices of the format arguments declared nonnull
14088 and set a bit for each. */
14089 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
14091 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
14092 bitmap_set_bit (argmap, val);
14096 return argmap;
14099 /* Returns true if TYPE is a type where it and all of its subobjects
14100 (recursively) are of structure, union, or array type. */
14102 bool
14103 is_empty_type (const_tree type)
14105 if (RECORD_OR_UNION_TYPE_P (type))
14107 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
14108 if (TREE_CODE (field) == FIELD_DECL
14109 && !DECL_PADDING_P (field)
14110 && !is_empty_type (TREE_TYPE (field)))
14111 return false;
14112 return true;
14114 else if (TREE_CODE (type) == ARRAY_TYPE)
14115 return (integer_minus_onep (array_type_nelts (type))
14116 || TYPE_DOMAIN (type) == NULL_TREE
14117 || is_empty_type (TREE_TYPE (type)));
14118 return false;
14121 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
14122 that shouldn't be passed via stack. */
14124 bool
14125 default_is_empty_record (const_tree type)
14127 if (!abi_version_at_least (12))
14128 return false;
14130 if (type == error_mark_node)
14131 return false;
14133 if (TREE_ADDRESSABLE (type))
14134 return false;
14136 return is_empty_type (TYPE_MAIN_VARIANT (type));
14139 /* Determine whether TYPE is a structure with a flexible array member,
14140 or a union containing such a structure (possibly recursively). */
14142 bool
14143 flexible_array_type_p (const_tree type)
14145 tree x, last;
14146 switch (TREE_CODE (type))
14148 case RECORD_TYPE:
14149 last = NULL_TREE;
14150 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14151 if (TREE_CODE (x) == FIELD_DECL)
14152 last = x;
14153 if (last == NULL_TREE)
14154 return false;
14155 if (TREE_CODE (TREE_TYPE (last)) == ARRAY_TYPE
14156 && TYPE_SIZE (TREE_TYPE (last)) == NULL_TREE
14157 && TYPE_DOMAIN (TREE_TYPE (last)) != NULL_TREE
14158 && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last))) == NULL_TREE)
14159 return true;
14160 return false;
14161 case UNION_TYPE:
14162 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14164 if (TREE_CODE (x) == FIELD_DECL
14165 && flexible_array_type_p (TREE_TYPE (x)))
14166 return true;
14168 return false;
14169 default:
14170 return false;
14174 /* Like int_size_in_bytes, but handle empty records specially. */
14176 HOST_WIDE_INT
14177 arg_int_size_in_bytes (const_tree type)
14179 return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
14182 /* Like size_in_bytes, but handle empty records specially. */
14184 tree
14185 arg_size_in_bytes (const_tree type)
14187 return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
14190 /* Return true if an expression with CODE has to have the same result type as
14191 its first operand. */
14193 bool
14194 expr_type_first_operand_type_p (tree_code code)
14196 switch (code)
14198 case NEGATE_EXPR:
14199 case ABS_EXPR:
14200 case BIT_NOT_EXPR:
14201 case PAREN_EXPR:
14202 case CONJ_EXPR:
14204 case PLUS_EXPR:
14205 case MINUS_EXPR:
14206 case MULT_EXPR:
14207 case TRUNC_DIV_EXPR:
14208 case CEIL_DIV_EXPR:
14209 case FLOOR_DIV_EXPR:
14210 case ROUND_DIV_EXPR:
14211 case TRUNC_MOD_EXPR:
14212 case CEIL_MOD_EXPR:
14213 case FLOOR_MOD_EXPR:
14214 case ROUND_MOD_EXPR:
14215 case RDIV_EXPR:
14216 case EXACT_DIV_EXPR:
14217 case MIN_EXPR:
14218 case MAX_EXPR:
14219 case BIT_IOR_EXPR:
14220 case BIT_XOR_EXPR:
14221 case BIT_AND_EXPR:
14223 case LSHIFT_EXPR:
14224 case RSHIFT_EXPR:
14225 case LROTATE_EXPR:
14226 case RROTATE_EXPR:
14227 return true;
14229 default:
14230 return false;
14234 /* Return a typenode for the "standard" C type with a given name. */
14235 tree
14236 get_typenode_from_name (const char *name)
14238 if (name == NULL || *name == '\0')
14239 return NULL_TREE;
14241 if (strcmp (name, "char") == 0)
14242 return char_type_node;
14243 if (strcmp (name, "unsigned char") == 0)
14244 return unsigned_char_type_node;
14245 if (strcmp (name, "signed char") == 0)
14246 return signed_char_type_node;
14248 if (strcmp (name, "short int") == 0)
14249 return short_integer_type_node;
14250 if (strcmp (name, "short unsigned int") == 0)
14251 return short_unsigned_type_node;
14253 if (strcmp (name, "int") == 0)
14254 return integer_type_node;
14255 if (strcmp (name, "unsigned int") == 0)
14256 return unsigned_type_node;
14258 if (strcmp (name, "long int") == 0)
14259 return long_integer_type_node;
14260 if (strcmp (name, "long unsigned int") == 0)
14261 return long_unsigned_type_node;
14263 if (strcmp (name, "long long int") == 0)
14264 return long_long_integer_type_node;
14265 if (strcmp (name, "long long unsigned int") == 0)
14266 return long_long_unsigned_type_node;
14268 gcc_unreachable ();
14271 /* List of pointer types used to declare builtins before we have seen their
14272 real declaration.
14274 Keep the size up to date in tree.h ! */
14275 const builtin_structptr_type builtin_structptr_types[6] =
14277 { fileptr_type_node, ptr_type_node, "FILE" },
14278 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
14279 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
14280 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
14281 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
14282 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
14285 /* Return the maximum object size. */
14287 tree
14288 max_object_size (void)
14290 /* To do: Make this a configurable parameter. */
14291 return TYPE_MAX_VALUE (ptrdiff_type_node);
14294 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
14295 parameter default to false and that weeds out error_mark_node. */
14297 bool
14298 verify_type_context (location_t loc, type_context_kind context,
14299 const_tree type, bool silent_p)
14301 if (type == error_mark_node)
14302 return true;
14304 gcc_assert (TYPE_P (type));
14305 return (!targetm.verify_type_context
14306 || targetm.verify_type_context (loc, context, type, silent_p));
14309 /* Return true if NEW_ASM and DELETE_ASM name a valid pair of new and
14310 delete operators. Return false if they may or may not name such
14311 a pair and, when nonnull, set *PCERTAIN to true if they certainly
14312 do not. */
14314 bool
14315 valid_new_delete_pair_p (tree new_asm, tree delete_asm,
14316 bool *pcertain /* = NULL */)
14318 bool certain;
14319 if (!pcertain)
14320 pcertain = &certain;
14322 const char *new_name = IDENTIFIER_POINTER (new_asm);
14323 const char *delete_name = IDENTIFIER_POINTER (delete_asm);
14324 unsigned int new_len = IDENTIFIER_LENGTH (new_asm);
14325 unsigned int delete_len = IDENTIFIER_LENGTH (delete_asm);
14327 /* The following failures are due to invalid names so they're not
14328 considered certain mismatches. */
14329 *pcertain = false;
14331 if (new_len < 5 || delete_len < 6)
14332 return false;
14333 if (new_name[0] == '_')
14334 ++new_name, --new_len;
14335 if (new_name[0] == '_')
14336 ++new_name, --new_len;
14337 if (delete_name[0] == '_')
14338 ++delete_name, --delete_len;
14339 if (delete_name[0] == '_')
14340 ++delete_name, --delete_len;
14341 if (new_len < 4 || delete_len < 5)
14342 return false;
14344 /* The following failures are due to names of user-defined operators
14345 so they're also not considered certain mismatches. */
14347 /* *_len is now just the length after initial underscores. */
14348 if (new_name[0] != 'Z' || new_name[1] != 'n')
14349 return false;
14350 if (delete_name[0] != 'Z' || delete_name[1] != 'd')
14351 return false;
14353 /* The following failures are certain mismatches. */
14354 *pcertain = true;
14356 /* _Znw must match _Zdl, _Zna must match _Zda. */
14357 if ((new_name[2] != 'w' || delete_name[2] != 'l')
14358 && (new_name[2] != 'a' || delete_name[2] != 'a'))
14359 return false;
14360 /* 'j', 'm' and 'y' correspond to size_t. */
14361 if (new_name[3] != 'j' && new_name[3] != 'm' && new_name[3] != 'y')
14362 return false;
14363 if (delete_name[3] != 'P' || delete_name[4] != 'v')
14364 return false;
14365 if (new_len == 4
14366 || (new_len == 18 && !memcmp (new_name + 4, "RKSt9nothrow_t", 14)))
14368 /* _ZnXY or _ZnXYRKSt9nothrow_t matches
14369 _ZdXPv, _ZdXPvY and _ZdXPvRKSt9nothrow_t. */
14370 if (delete_len == 5)
14371 return true;
14372 if (delete_len == 6 && delete_name[5] == new_name[3])
14373 return true;
14374 if (delete_len == 19 && !memcmp (delete_name + 5, "RKSt9nothrow_t", 14))
14375 return true;
14377 else if ((new_len == 19 && !memcmp (new_name + 4, "St11align_val_t", 15))
14378 || (new_len == 33
14379 && !memcmp (new_name + 4, "St11align_val_tRKSt9nothrow_t", 29)))
14381 /* _ZnXYSt11align_val_t or _ZnXYSt11align_val_tRKSt9nothrow_t matches
14382 _ZdXPvSt11align_val_t or _ZdXPvYSt11align_val_t or or
14383 _ZdXPvSt11align_val_tRKSt9nothrow_t. */
14384 if (delete_len == 20 && !memcmp (delete_name + 5, "St11align_val_t", 15))
14385 return true;
14386 if (delete_len == 21
14387 && delete_name[5] == new_name[3]
14388 && !memcmp (delete_name + 6, "St11align_val_t", 15))
14389 return true;
14390 if (delete_len == 34
14391 && !memcmp (delete_name + 5, "St11align_val_tRKSt9nothrow_t", 29))
14392 return true;
14395 /* The negative result is conservative. */
14396 *pcertain = false;
14397 return false;
14400 /* Return the zero-based number corresponding to the argument being
14401 deallocated if FNDECL is a deallocation function or an out-of-bounds
14402 value if it isn't. */
14404 unsigned
14405 fndecl_dealloc_argno (tree fndecl)
14407 /* A call to operator delete isn't recognized as one to a built-in. */
14408 if (DECL_IS_OPERATOR_DELETE_P (fndecl))
14410 if (DECL_IS_REPLACEABLE_OPERATOR (fndecl))
14411 return 0;
14413 /* Avoid placement delete that's not been inlined. */
14414 tree fname = DECL_ASSEMBLER_NAME (fndecl);
14415 if (id_equal (fname, "_ZdlPvS_") // ordinary form
14416 || id_equal (fname, "_ZdaPvS_")) // array form
14417 return UINT_MAX;
14418 return 0;
14421 /* TODO: Handle user-defined functions with attribute malloc? Handle
14422 known non-built-ins like fopen? */
14423 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14425 switch (DECL_FUNCTION_CODE (fndecl))
14427 case BUILT_IN_FREE:
14428 case BUILT_IN_REALLOC:
14429 return 0;
14430 default:
14431 break;
14433 return UINT_MAX;
14436 tree attrs = DECL_ATTRIBUTES (fndecl);
14437 if (!attrs)
14438 return UINT_MAX;
14440 for (tree atfree = attrs;
14441 (atfree = lookup_attribute ("*dealloc", atfree));
14442 atfree = TREE_CHAIN (atfree))
14444 tree alloc = TREE_VALUE (atfree);
14445 if (!alloc)
14446 continue;
14448 tree pos = TREE_CHAIN (alloc);
14449 if (!pos)
14450 return 0;
14452 pos = TREE_VALUE (pos);
14453 return TREE_INT_CST_LOW (pos) - 1;
14456 return UINT_MAX;
14459 /* If EXPR refers to a character array or pointer declared attribute
14460 nonstring, return a decl for that array or pointer and set *REF
14461 to the referenced enclosing object or pointer. Otherwise return
14462 null. */
14464 tree
14465 get_attr_nonstring_decl (tree expr, tree *ref)
14467 tree decl = expr;
14468 tree var = NULL_TREE;
14469 if (TREE_CODE (decl) == SSA_NAME)
14471 gimple *def = SSA_NAME_DEF_STMT (decl);
14473 if (is_gimple_assign (def))
14475 tree_code code = gimple_assign_rhs_code (def);
14476 if (code == ADDR_EXPR
14477 || code == COMPONENT_REF
14478 || code == VAR_DECL)
14479 decl = gimple_assign_rhs1 (def);
14481 else
14482 var = SSA_NAME_VAR (decl);
14485 if (TREE_CODE (decl) == ADDR_EXPR)
14486 decl = TREE_OPERAND (decl, 0);
14488 /* To simplify calling code, store the referenced DECL regardless of
14489 the attribute determined below, but avoid storing the SSA_NAME_VAR
14490 obtained above (it's not useful for dataflow purposes). */
14491 if (ref)
14492 *ref = decl;
14494 /* Use the SSA_NAME_VAR that was determined above to see if it's
14495 declared nonstring. Otherwise drill down into the referenced
14496 DECL. */
14497 if (var)
14498 decl = var;
14499 else if (TREE_CODE (decl) == ARRAY_REF)
14500 decl = TREE_OPERAND (decl, 0);
14501 else if (TREE_CODE (decl) == COMPONENT_REF)
14502 decl = TREE_OPERAND (decl, 1);
14503 else if (TREE_CODE (decl) == MEM_REF)
14504 return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
14506 if (DECL_P (decl)
14507 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
14508 return decl;
14510 return NULL_TREE;
14513 #if CHECKING_P
14515 namespace selftest {
14517 /* Selftests for tree. */
14519 /* Verify that integer constants are sane. */
14521 static void
14522 test_integer_constants ()
14524 ASSERT_TRUE (integer_type_node != NULL);
14525 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
14527 tree type = integer_type_node;
14529 tree zero = build_zero_cst (type);
14530 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
14531 ASSERT_EQ (type, TREE_TYPE (zero));
14533 tree one = build_int_cst (type, 1);
14534 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
14535 ASSERT_EQ (type, TREE_TYPE (zero));
14538 /* Verify identifiers. */
14540 static void
14541 test_identifiers ()
14543 tree identifier = get_identifier ("foo");
14544 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
14545 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
14548 /* Verify LABEL_DECL. */
14550 static void
14551 test_labels ()
14553 tree identifier = get_identifier ("err");
14554 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
14555 identifier, void_type_node);
14556 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
14557 ASSERT_FALSE (FORCED_LABEL (label_decl));
14560 /* Return a new VECTOR_CST node whose type is TYPE and whose values
14561 are given by VALS. */
14563 static tree
14564 build_vector (tree type, const vec<tree> &vals MEM_STAT_DECL)
14566 gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
14567 tree_vector_builder builder (type, vals.length (), 1);
14568 builder.splice (vals);
14569 return builder.build ();
14572 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
14574 static void
14575 check_vector_cst (const vec<tree> &expected, tree actual)
14577 ASSERT_KNOWN_EQ (expected.length (),
14578 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
14579 for (unsigned int i = 0; i < expected.length (); ++i)
14580 ASSERT_EQ (wi::to_wide (expected[i]),
14581 wi::to_wide (vector_cst_elt (actual, i)));
14584 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
14585 and that its elements match EXPECTED. */
14587 static void
14588 check_vector_cst_duplicate (const vec<tree> &expected, tree actual,
14589 unsigned int npatterns)
14591 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14592 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
14593 ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
14594 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
14595 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
14596 check_vector_cst (expected, actual);
14599 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
14600 and NPATTERNS background elements, and that its elements match
14601 EXPECTED. */
14603 static void
14604 check_vector_cst_fill (const vec<tree> &expected, tree actual,
14605 unsigned int npatterns)
14607 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14608 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
14609 ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
14610 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
14611 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
14612 check_vector_cst (expected, actual);
14615 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
14616 and that its elements match EXPECTED. */
14618 static void
14619 check_vector_cst_stepped (const vec<tree> &expected, tree actual,
14620 unsigned int npatterns)
14622 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14623 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
14624 ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
14625 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
14626 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
14627 check_vector_cst (expected, actual);
14630 /* Test the creation of VECTOR_CSTs. */
14632 static void
14633 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
14635 auto_vec<tree, 8> elements (8);
14636 elements.quick_grow (8);
14637 tree element_type = build_nonstandard_integer_type (16, true);
14638 tree vector_type = build_vector_type (element_type, 8);
14640 /* Test a simple linear series with a base of 0 and a step of 1:
14641 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
14642 for (unsigned int i = 0; i < 8; ++i)
14643 elements[i] = build_int_cst (element_type, i);
14644 tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
14645 check_vector_cst_stepped (elements, vector, 1);
14647 /* Try the same with the first element replaced by 100:
14648 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
14649 elements[0] = build_int_cst (element_type, 100);
14650 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14651 check_vector_cst_stepped (elements, vector, 1);
14653 /* Try a series that wraps around.
14654 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
14655 for (unsigned int i = 1; i < 8; ++i)
14656 elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
14657 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14658 check_vector_cst_stepped (elements, vector, 1);
14660 /* Try a downward series:
14661 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
14662 for (unsigned int i = 1; i < 8; ++i)
14663 elements[i] = build_int_cst (element_type, 80 - i);
14664 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14665 check_vector_cst_stepped (elements, vector, 1);
14667 /* Try two interleaved series with different bases and steps:
14668 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
14669 elements[1] = build_int_cst (element_type, 53);
14670 for (unsigned int i = 2; i < 8; i += 2)
14672 elements[i] = build_int_cst (element_type, 70 - i * 2);
14673 elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
14675 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14676 check_vector_cst_stepped (elements, vector, 2);
14678 /* Try a duplicated value:
14679 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
14680 for (unsigned int i = 1; i < 8; ++i)
14681 elements[i] = elements[0];
14682 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14683 check_vector_cst_duplicate (elements, vector, 1);
14685 /* Try an interleaved duplicated value:
14686 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
14687 elements[1] = build_int_cst (element_type, 55);
14688 for (unsigned int i = 2; i < 8; ++i)
14689 elements[i] = elements[i - 2];
14690 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14691 check_vector_cst_duplicate (elements, vector, 2);
14693 /* Try a duplicated value with 2 exceptions
14694 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
14695 elements[0] = build_int_cst (element_type, 41);
14696 elements[1] = build_int_cst (element_type, 97);
14697 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14698 check_vector_cst_fill (elements, vector, 2);
14700 /* Try with and without a step
14701 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
14702 for (unsigned int i = 3; i < 8; i += 2)
14703 elements[i] = build_int_cst (element_type, i * 7);
14704 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14705 check_vector_cst_stepped (elements, vector, 2);
14707 /* Try a fully-general constant:
14708 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
14709 elements[5] = build_int_cst (element_type, 9990);
14710 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14711 check_vector_cst_fill (elements, vector, 4);
14714 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
14715 Helper function for test_location_wrappers, to deal with STRIP_NOPS
14716 modifying its argument in-place. */
14718 static void
14719 check_strip_nops (tree node, tree expected)
14721 STRIP_NOPS (node);
14722 ASSERT_EQ (expected, node);
14725 /* Verify location wrappers. */
14727 static void
14728 test_location_wrappers ()
14730 location_t loc = BUILTINS_LOCATION;
14732 ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
14734 /* Wrapping a constant. */
14735 tree int_cst = build_int_cst (integer_type_node, 42);
14736 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
14737 ASSERT_FALSE (location_wrapper_p (int_cst));
14739 tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
14740 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
14741 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
14742 ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
14744 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
14745 ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
14747 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
14748 tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
14749 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
14750 ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
14752 /* Wrapping a STRING_CST. */
14753 tree string_cst = build_string (4, "foo");
14754 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
14755 ASSERT_FALSE (location_wrapper_p (string_cst));
14757 tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
14758 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
14759 ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
14760 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
14761 ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
14764 /* Wrapping a variable. */
14765 tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
14766 get_identifier ("some_int_var"),
14767 integer_type_node);
14768 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
14769 ASSERT_FALSE (location_wrapper_p (int_var));
14771 tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
14772 ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
14773 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
14774 ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
14776 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
14777 wrapper. */
14778 tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
14779 ASSERT_FALSE (location_wrapper_p (r_cast));
14780 ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
14782 /* Verify that STRIP_NOPS removes wrappers. */
14783 check_strip_nops (wrapped_int_cst, int_cst);
14784 check_strip_nops (wrapped_string_cst, string_cst);
14785 check_strip_nops (wrapped_int_var, int_var);
14788 /* Test various tree predicates. Verify that location wrappers don't
14789 affect the results. */
14791 static void
14792 test_predicates ()
14794 /* Build various constants and wrappers around them. */
14796 location_t loc = BUILTINS_LOCATION;
14798 tree i_0 = build_int_cst (integer_type_node, 0);
14799 tree wr_i_0 = maybe_wrap_with_location (i_0, loc);
14801 tree i_1 = build_int_cst (integer_type_node, 1);
14802 tree wr_i_1 = maybe_wrap_with_location (i_1, loc);
14804 tree i_m1 = build_int_cst (integer_type_node, -1);
14805 tree wr_i_m1 = maybe_wrap_with_location (i_m1, loc);
14807 tree f_0 = build_real_from_int_cst (float_type_node, i_0);
14808 tree wr_f_0 = maybe_wrap_with_location (f_0, loc);
14809 tree f_1 = build_real_from_int_cst (float_type_node, i_1);
14810 tree wr_f_1 = maybe_wrap_with_location (f_1, loc);
14811 tree f_m1 = build_real_from_int_cst (float_type_node, i_m1);
14812 tree wr_f_m1 = maybe_wrap_with_location (f_m1, loc);
14814 tree c_i_0 = build_complex (NULL_TREE, i_0, i_0);
14815 tree c_i_1 = build_complex (NULL_TREE, i_1, i_0);
14816 tree c_i_m1 = build_complex (NULL_TREE, i_m1, i_0);
14818 tree c_f_0 = build_complex (NULL_TREE, f_0, f_0);
14819 tree c_f_1 = build_complex (NULL_TREE, f_1, f_0);
14820 tree c_f_m1 = build_complex (NULL_TREE, f_m1, f_0);
14822 /* TODO: vector constants. */
14824 /* Test integer_onep. */
14825 ASSERT_FALSE (integer_onep (i_0));
14826 ASSERT_FALSE (integer_onep (wr_i_0));
14827 ASSERT_TRUE (integer_onep (i_1));
14828 ASSERT_TRUE (integer_onep (wr_i_1));
14829 ASSERT_FALSE (integer_onep (i_m1));
14830 ASSERT_FALSE (integer_onep (wr_i_m1));
14831 ASSERT_FALSE (integer_onep (f_0));
14832 ASSERT_FALSE (integer_onep (wr_f_0));
14833 ASSERT_FALSE (integer_onep (f_1));
14834 ASSERT_FALSE (integer_onep (wr_f_1));
14835 ASSERT_FALSE (integer_onep (f_m1));
14836 ASSERT_FALSE (integer_onep (wr_f_m1));
14837 ASSERT_FALSE (integer_onep (c_i_0));
14838 ASSERT_TRUE (integer_onep (c_i_1));
14839 ASSERT_FALSE (integer_onep (c_i_m1));
14840 ASSERT_FALSE (integer_onep (c_f_0));
14841 ASSERT_FALSE (integer_onep (c_f_1));
14842 ASSERT_FALSE (integer_onep (c_f_m1));
14844 /* Test integer_zerop. */
14845 ASSERT_TRUE (integer_zerop (i_0));
14846 ASSERT_TRUE (integer_zerop (wr_i_0));
14847 ASSERT_FALSE (integer_zerop (i_1));
14848 ASSERT_FALSE (integer_zerop (wr_i_1));
14849 ASSERT_FALSE (integer_zerop (i_m1));
14850 ASSERT_FALSE (integer_zerop (wr_i_m1));
14851 ASSERT_FALSE (integer_zerop (f_0));
14852 ASSERT_FALSE (integer_zerop (wr_f_0));
14853 ASSERT_FALSE (integer_zerop (f_1));
14854 ASSERT_FALSE (integer_zerop (wr_f_1));
14855 ASSERT_FALSE (integer_zerop (f_m1));
14856 ASSERT_FALSE (integer_zerop (wr_f_m1));
14857 ASSERT_TRUE (integer_zerop (c_i_0));
14858 ASSERT_FALSE (integer_zerop (c_i_1));
14859 ASSERT_FALSE (integer_zerop (c_i_m1));
14860 ASSERT_FALSE (integer_zerop (c_f_0));
14861 ASSERT_FALSE (integer_zerop (c_f_1));
14862 ASSERT_FALSE (integer_zerop (c_f_m1));
14864 /* Test integer_all_onesp. */
14865 ASSERT_FALSE (integer_all_onesp (i_0));
14866 ASSERT_FALSE (integer_all_onesp (wr_i_0));
14867 ASSERT_FALSE (integer_all_onesp (i_1));
14868 ASSERT_FALSE (integer_all_onesp (wr_i_1));
14869 ASSERT_TRUE (integer_all_onesp (i_m1));
14870 ASSERT_TRUE (integer_all_onesp (wr_i_m1));
14871 ASSERT_FALSE (integer_all_onesp (f_0));
14872 ASSERT_FALSE (integer_all_onesp (wr_f_0));
14873 ASSERT_FALSE (integer_all_onesp (f_1));
14874 ASSERT_FALSE (integer_all_onesp (wr_f_1));
14875 ASSERT_FALSE (integer_all_onesp (f_m1));
14876 ASSERT_FALSE (integer_all_onesp (wr_f_m1));
14877 ASSERT_FALSE (integer_all_onesp (c_i_0));
14878 ASSERT_FALSE (integer_all_onesp (c_i_1));
14879 ASSERT_FALSE (integer_all_onesp (c_i_m1));
14880 ASSERT_FALSE (integer_all_onesp (c_f_0));
14881 ASSERT_FALSE (integer_all_onesp (c_f_1));
14882 ASSERT_FALSE (integer_all_onesp (c_f_m1));
14884 /* Test integer_minus_onep. */
14885 ASSERT_FALSE (integer_minus_onep (i_0));
14886 ASSERT_FALSE (integer_minus_onep (wr_i_0));
14887 ASSERT_FALSE (integer_minus_onep (i_1));
14888 ASSERT_FALSE (integer_minus_onep (wr_i_1));
14889 ASSERT_TRUE (integer_minus_onep (i_m1));
14890 ASSERT_TRUE (integer_minus_onep (wr_i_m1));
14891 ASSERT_FALSE (integer_minus_onep (f_0));
14892 ASSERT_FALSE (integer_minus_onep (wr_f_0));
14893 ASSERT_FALSE (integer_minus_onep (f_1));
14894 ASSERT_FALSE (integer_minus_onep (wr_f_1));
14895 ASSERT_FALSE (integer_minus_onep (f_m1));
14896 ASSERT_FALSE (integer_minus_onep (wr_f_m1));
14897 ASSERT_FALSE (integer_minus_onep (c_i_0));
14898 ASSERT_FALSE (integer_minus_onep (c_i_1));
14899 ASSERT_TRUE (integer_minus_onep (c_i_m1));
14900 ASSERT_FALSE (integer_minus_onep (c_f_0));
14901 ASSERT_FALSE (integer_minus_onep (c_f_1));
14902 ASSERT_FALSE (integer_minus_onep (c_f_m1));
14904 /* Test integer_each_onep. */
14905 ASSERT_FALSE (integer_each_onep (i_0));
14906 ASSERT_FALSE (integer_each_onep (wr_i_0));
14907 ASSERT_TRUE (integer_each_onep (i_1));
14908 ASSERT_TRUE (integer_each_onep (wr_i_1));
14909 ASSERT_FALSE (integer_each_onep (i_m1));
14910 ASSERT_FALSE (integer_each_onep (wr_i_m1));
14911 ASSERT_FALSE (integer_each_onep (f_0));
14912 ASSERT_FALSE (integer_each_onep (wr_f_0));
14913 ASSERT_FALSE (integer_each_onep (f_1));
14914 ASSERT_FALSE (integer_each_onep (wr_f_1));
14915 ASSERT_FALSE (integer_each_onep (f_m1));
14916 ASSERT_FALSE (integer_each_onep (wr_f_m1));
14917 ASSERT_FALSE (integer_each_onep (c_i_0));
14918 ASSERT_FALSE (integer_each_onep (c_i_1));
14919 ASSERT_FALSE (integer_each_onep (c_i_m1));
14920 ASSERT_FALSE (integer_each_onep (c_f_0));
14921 ASSERT_FALSE (integer_each_onep (c_f_1));
14922 ASSERT_FALSE (integer_each_onep (c_f_m1));
14924 /* Test integer_truep. */
14925 ASSERT_FALSE (integer_truep (i_0));
14926 ASSERT_FALSE (integer_truep (wr_i_0));
14927 ASSERT_TRUE (integer_truep (i_1));
14928 ASSERT_TRUE (integer_truep (wr_i_1));
14929 ASSERT_FALSE (integer_truep (i_m1));
14930 ASSERT_FALSE (integer_truep (wr_i_m1));
14931 ASSERT_FALSE (integer_truep (f_0));
14932 ASSERT_FALSE (integer_truep (wr_f_0));
14933 ASSERT_FALSE (integer_truep (f_1));
14934 ASSERT_FALSE (integer_truep (wr_f_1));
14935 ASSERT_FALSE (integer_truep (f_m1));
14936 ASSERT_FALSE (integer_truep (wr_f_m1));
14937 ASSERT_FALSE (integer_truep (c_i_0));
14938 ASSERT_TRUE (integer_truep (c_i_1));
14939 ASSERT_FALSE (integer_truep (c_i_m1));
14940 ASSERT_FALSE (integer_truep (c_f_0));
14941 ASSERT_FALSE (integer_truep (c_f_1));
14942 ASSERT_FALSE (integer_truep (c_f_m1));
14944 /* Test integer_nonzerop. */
14945 ASSERT_FALSE (integer_nonzerop (i_0));
14946 ASSERT_FALSE (integer_nonzerop (wr_i_0));
14947 ASSERT_TRUE (integer_nonzerop (i_1));
14948 ASSERT_TRUE (integer_nonzerop (wr_i_1));
14949 ASSERT_TRUE (integer_nonzerop (i_m1));
14950 ASSERT_TRUE (integer_nonzerop (wr_i_m1));
14951 ASSERT_FALSE (integer_nonzerop (f_0));
14952 ASSERT_FALSE (integer_nonzerop (wr_f_0));
14953 ASSERT_FALSE (integer_nonzerop (f_1));
14954 ASSERT_FALSE (integer_nonzerop (wr_f_1));
14955 ASSERT_FALSE (integer_nonzerop (f_m1));
14956 ASSERT_FALSE (integer_nonzerop (wr_f_m1));
14957 ASSERT_FALSE (integer_nonzerop (c_i_0));
14958 ASSERT_TRUE (integer_nonzerop (c_i_1));
14959 ASSERT_TRUE (integer_nonzerop (c_i_m1));
14960 ASSERT_FALSE (integer_nonzerop (c_f_0));
14961 ASSERT_FALSE (integer_nonzerop (c_f_1));
14962 ASSERT_FALSE (integer_nonzerop (c_f_m1));
14964 /* Test real_zerop. */
14965 ASSERT_FALSE (real_zerop (i_0));
14966 ASSERT_FALSE (real_zerop (wr_i_0));
14967 ASSERT_FALSE (real_zerop (i_1));
14968 ASSERT_FALSE (real_zerop (wr_i_1));
14969 ASSERT_FALSE (real_zerop (i_m1));
14970 ASSERT_FALSE (real_zerop (wr_i_m1));
14971 ASSERT_TRUE (real_zerop (f_0));
14972 ASSERT_TRUE (real_zerop (wr_f_0));
14973 ASSERT_FALSE (real_zerop (f_1));
14974 ASSERT_FALSE (real_zerop (wr_f_1));
14975 ASSERT_FALSE (real_zerop (f_m1));
14976 ASSERT_FALSE (real_zerop (wr_f_m1));
14977 ASSERT_FALSE (real_zerop (c_i_0));
14978 ASSERT_FALSE (real_zerop (c_i_1));
14979 ASSERT_FALSE (real_zerop (c_i_m1));
14980 ASSERT_TRUE (real_zerop (c_f_0));
14981 ASSERT_FALSE (real_zerop (c_f_1));
14982 ASSERT_FALSE (real_zerop (c_f_m1));
14984 /* Test real_onep. */
14985 ASSERT_FALSE (real_onep (i_0));
14986 ASSERT_FALSE (real_onep (wr_i_0));
14987 ASSERT_FALSE (real_onep (i_1));
14988 ASSERT_FALSE (real_onep (wr_i_1));
14989 ASSERT_FALSE (real_onep (i_m1));
14990 ASSERT_FALSE (real_onep (wr_i_m1));
14991 ASSERT_FALSE (real_onep (f_0));
14992 ASSERT_FALSE (real_onep (wr_f_0));
14993 ASSERT_TRUE (real_onep (f_1));
14994 ASSERT_TRUE (real_onep (wr_f_1));
14995 ASSERT_FALSE (real_onep (f_m1));
14996 ASSERT_FALSE (real_onep (wr_f_m1));
14997 ASSERT_FALSE (real_onep (c_i_0));
14998 ASSERT_FALSE (real_onep (c_i_1));
14999 ASSERT_FALSE (real_onep (c_i_m1));
15000 ASSERT_FALSE (real_onep (c_f_0));
15001 ASSERT_TRUE (real_onep (c_f_1));
15002 ASSERT_FALSE (real_onep (c_f_m1));
15004 /* Test real_minus_onep. */
15005 ASSERT_FALSE (real_minus_onep (i_0));
15006 ASSERT_FALSE (real_minus_onep (wr_i_0));
15007 ASSERT_FALSE (real_minus_onep (i_1));
15008 ASSERT_FALSE (real_minus_onep (wr_i_1));
15009 ASSERT_FALSE (real_minus_onep (i_m1));
15010 ASSERT_FALSE (real_minus_onep (wr_i_m1));
15011 ASSERT_FALSE (real_minus_onep (f_0));
15012 ASSERT_FALSE (real_minus_onep (wr_f_0));
15013 ASSERT_FALSE (real_minus_onep (f_1));
15014 ASSERT_FALSE (real_minus_onep (wr_f_1));
15015 ASSERT_TRUE (real_minus_onep (f_m1));
15016 ASSERT_TRUE (real_minus_onep (wr_f_m1));
15017 ASSERT_FALSE (real_minus_onep (c_i_0));
15018 ASSERT_FALSE (real_minus_onep (c_i_1));
15019 ASSERT_FALSE (real_minus_onep (c_i_m1));
15020 ASSERT_FALSE (real_minus_onep (c_f_0));
15021 ASSERT_FALSE (real_minus_onep (c_f_1));
15022 ASSERT_TRUE (real_minus_onep (c_f_m1));
15024 /* Test zerop. */
15025 ASSERT_TRUE (zerop (i_0));
15026 ASSERT_TRUE (zerop (wr_i_0));
15027 ASSERT_FALSE (zerop (i_1));
15028 ASSERT_FALSE (zerop (wr_i_1));
15029 ASSERT_FALSE (zerop (i_m1));
15030 ASSERT_FALSE (zerop (wr_i_m1));
15031 ASSERT_TRUE (zerop (f_0));
15032 ASSERT_TRUE (zerop (wr_f_0));
15033 ASSERT_FALSE (zerop (f_1));
15034 ASSERT_FALSE (zerop (wr_f_1));
15035 ASSERT_FALSE (zerop (f_m1));
15036 ASSERT_FALSE (zerop (wr_f_m1));
15037 ASSERT_TRUE (zerop (c_i_0));
15038 ASSERT_FALSE (zerop (c_i_1));
15039 ASSERT_FALSE (zerop (c_i_m1));
15040 ASSERT_TRUE (zerop (c_f_0));
15041 ASSERT_FALSE (zerop (c_f_1));
15042 ASSERT_FALSE (zerop (c_f_m1));
15044 /* Test tree_expr_nonnegative_p. */
15045 ASSERT_TRUE (tree_expr_nonnegative_p (i_0));
15046 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0));
15047 ASSERT_TRUE (tree_expr_nonnegative_p (i_1));
15048 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1));
15049 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1));
15050 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1));
15051 ASSERT_TRUE (tree_expr_nonnegative_p (f_0));
15052 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0));
15053 ASSERT_TRUE (tree_expr_nonnegative_p (f_1));
15054 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1));
15055 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1));
15056 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1));
15057 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0));
15058 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1));
15059 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1));
15060 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0));
15061 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1));
15062 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1));
15064 /* Test tree_expr_nonzero_p. */
15065 ASSERT_FALSE (tree_expr_nonzero_p (i_0));
15066 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0));
15067 ASSERT_TRUE (tree_expr_nonzero_p (i_1));
15068 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1));
15069 ASSERT_TRUE (tree_expr_nonzero_p (i_m1));
15070 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1));
15072 /* Test integer_valued_real_p. */
15073 ASSERT_FALSE (integer_valued_real_p (i_0));
15074 ASSERT_TRUE (integer_valued_real_p (f_0));
15075 ASSERT_TRUE (integer_valued_real_p (wr_f_0));
15076 ASSERT_TRUE (integer_valued_real_p (f_1));
15077 ASSERT_TRUE (integer_valued_real_p (wr_f_1));
15079 /* Test integer_pow2p. */
15080 ASSERT_FALSE (integer_pow2p (i_0));
15081 ASSERT_TRUE (integer_pow2p (i_1));
15082 ASSERT_TRUE (integer_pow2p (wr_i_1));
15084 /* Test uniform_integer_cst_p. */
15085 ASSERT_TRUE (uniform_integer_cst_p (i_0));
15086 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0));
15087 ASSERT_TRUE (uniform_integer_cst_p (i_1));
15088 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1));
15089 ASSERT_TRUE (uniform_integer_cst_p (i_m1));
15090 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1));
15091 ASSERT_FALSE (uniform_integer_cst_p (f_0));
15092 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0));
15093 ASSERT_FALSE (uniform_integer_cst_p (f_1));
15094 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1));
15095 ASSERT_FALSE (uniform_integer_cst_p (f_m1));
15096 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1));
15097 ASSERT_FALSE (uniform_integer_cst_p (c_i_0));
15098 ASSERT_FALSE (uniform_integer_cst_p (c_i_1));
15099 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1));
15100 ASSERT_FALSE (uniform_integer_cst_p (c_f_0));
15101 ASSERT_FALSE (uniform_integer_cst_p (c_f_1));
15102 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1));
15105 /* Check that string escaping works correctly. */
15107 static void
15108 test_escaped_strings (void)
15110 int saved_cutoff;
15111 escaped_string msg;
15113 msg.escape (NULL);
15114 /* ASSERT_STREQ does not accept NULL as a valid test
15115 result, so we have to use ASSERT_EQ instead. */
15116 ASSERT_EQ (NULL, (const char *) msg);
15118 msg.escape ("");
15119 ASSERT_STREQ ("", (const char *) msg);
15121 msg.escape ("foobar");
15122 ASSERT_STREQ ("foobar", (const char *) msg);
15124 /* Ensure that we have -fmessage-length set to 0. */
15125 saved_cutoff = pp_line_cutoff (global_dc->printer);
15126 pp_line_cutoff (global_dc->printer) = 0;
15128 msg.escape ("foo\nbar");
15129 ASSERT_STREQ ("foo\\nbar", (const char *) msg);
15131 msg.escape ("\a\b\f\n\r\t\v");
15132 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
15134 /* Now repeat the tests with -fmessage-length set to 5. */
15135 pp_line_cutoff (global_dc->printer) = 5;
15137 /* Note that the newline is not translated into an escape. */
15138 msg.escape ("foo\nbar");
15139 ASSERT_STREQ ("foo\nbar", (const char *) msg);
15141 msg.escape ("\a\b\f\n\r\t\v");
15142 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
15144 /* Restore the original message length setting. */
15145 pp_line_cutoff (global_dc->printer) = saved_cutoff;
15148 /* Run all of the selftests within this file. */
15150 void
15151 tree_c_tests ()
15153 test_integer_constants ();
15154 test_identifiers ();
15155 test_labels ();
15156 test_vector_cst_patterns ();
15157 test_location_wrappers ();
15158 test_predicates ();
15159 test_escaped_strings ();
15162 } // namespace selftest
15164 #endif /* CHECKING_P */
15166 #include "gt-tree.h"