Revert DECL_USER_ALIGN part of r241959
[official-gcc.git] / gcc / tree.c
blob63084ac2d4f8e14c1d7759f9ea91ffd7c7a7a169
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "params.h"
58 #include "langhooks-def.h"
59 #include "tree-diagnostic.h"
60 #include "except.h"
61 #include "builtins.h"
62 #include "print-tree.h"
63 #include "ipa-utils.h"
64 #include "selftest.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "rtl.h"
68 #include "regs.h"
69 #include "tree-vector-builder.h"
71 /* Tree code classes. */
73 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
74 #define END_OF_BASE_TREE_CODES tcc_exceptional,
76 const enum tree_code_class tree_code_type[] = {
77 #include "all-tree.def"
80 #undef DEFTREECODE
81 #undef END_OF_BASE_TREE_CODES
83 /* Table indexed by tree code giving number of expression
84 operands beyond the fixed part of the node structure.
85 Not used for types or decls. */
87 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
88 #define END_OF_BASE_TREE_CODES 0,
90 const unsigned char tree_code_length[] = {
91 #include "all-tree.def"
94 #undef DEFTREECODE
95 #undef END_OF_BASE_TREE_CODES
97 /* Names of tree components.
98 Used for printing out the tree and error messages. */
99 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
100 #define END_OF_BASE_TREE_CODES "@dummy",
102 static const char *const tree_code_name[] = {
103 #include "all-tree.def"
106 #undef DEFTREECODE
107 #undef END_OF_BASE_TREE_CODES
109 /* Each tree code class has an associated string representation.
110 These must correspond to the tree_code_class entries. */
112 const char *const tree_code_class_strings[] =
114 "exceptional",
115 "constant",
116 "type",
117 "declaration",
118 "reference",
119 "comparison",
120 "unary",
121 "binary",
122 "statement",
123 "vl_exp",
124 "expression"
127 /* obstack.[ch] explicitly declined to prototype this. */
128 extern int _obstack_allocated_p (struct obstack *h, void *obj);
130 /* Statistics-gathering stuff. */
132 static int tree_code_counts[MAX_TREE_CODES];
133 int tree_node_counts[(int) all_kinds];
134 int tree_node_sizes[(int) all_kinds];
136 /* Keep in sync with tree.h:enum tree_node_kind. */
137 static const char * const tree_node_kind_names[] = {
138 "decls",
139 "types",
140 "blocks",
141 "stmts",
142 "refs",
143 "exprs",
144 "constants",
145 "identifiers",
146 "vecs",
147 "binfos",
148 "ssa names",
149 "constructors",
150 "random kinds",
151 "lang_decl kinds",
152 "lang_type kinds",
153 "omp clauses",
156 /* Unique id for next decl created. */
157 static GTY(()) int next_decl_uid;
158 /* Unique id for next type created. */
159 static GTY(()) unsigned next_type_uid = 1;
160 /* Unique id for next debug decl created. Use negative numbers,
161 to catch erroneous uses. */
162 static GTY(()) int next_debug_decl_uid;
164 /* Since we cannot rehash a type after it is in the table, we have to
165 keep the hash code. */
167 struct GTY((for_user)) type_hash {
168 unsigned long hash;
169 tree type;
172 /* Initial size of the hash table (rounded to next prime). */
173 #define TYPE_HASH_INITIAL_SIZE 1000
175 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
177 static hashval_t hash (type_hash *t) { return t->hash; }
178 static bool equal (type_hash *a, type_hash *b);
180 static int
181 keep_cache_entry (type_hash *&t)
183 return ggc_marked_p (t->type);
187 /* Now here is the hash table. When recording a type, it is added to
188 the slot whose index is the hash code. Note that the hash table is
189 used for several kinds of types (function types, array types and
190 array index range types, for now). While all these live in the
191 same table, they are completely independent, and the hash code is
192 computed differently for each of these. */
194 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
196 /* Hash table and temporary node for larger integer const values. */
197 static GTY (()) tree int_cst_node;
199 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
201 static hashval_t hash (tree t);
202 static bool equal (tree x, tree y);
205 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
207 /* Class and variable for making sure that there is a single POLY_INT_CST
208 for a given value. */
209 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
211 typedef std::pair<tree, const poly_wide_int *> compare_type;
212 static hashval_t hash (tree t);
213 static bool equal (tree x, const compare_type &y);
216 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
218 /* Hash table for optimization flags and target option flags. Use the same
219 hash table for both sets of options. Nodes for building the current
220 optimization and target option nodes. The assumption is most of the time
221 the options created will already be in the hash table, so we avoid
222 allocating and freeing up a node repeatably. */
223 static GTY (()) tree cl_optimization_node;
224 static GTY (()) tree cl_target_option_node;
226 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
228 static hashval_t hash (tree t);
229 static bool equal (tree x, tree y);
232 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
234 /* General tree->tree mapping structure for use in hash tables. */
237 static GTY ((cache))
238 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
240 static GTY ((cache))
241 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
243 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
245 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
247 static bool
248 equal (tree_vec_map *a, tree_vec_map *b)
250 return a->base.from == b->base.from;
253 static int
254 keep_cache_entry (tree_vec_map *&m)
256 return ggc_marked_p (m->base.from);
260 static GTY ((cache))
261 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
263 static void set_type_quals (tree, int);
264 static void print_type_hash_statistics (void);
265 static void print_debug_expr_statistics (void);
266 static void print_value_expr_statistics (void);
268 tree global_trees[TI_MAX];
269 tree integer_types[itk_none];
271 bool int_n_enabled_p[NUM_INT_N_ENTS];
272 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
274 bool tree_contains_struct[MAX_TREE_CODES][64];
276 /* Number of operands for each OpenMP clause. */
277 unsigned const char omp_clause_num_ops[] =
279 0, /* OMP_CLAUSE_ERROR */
280 1, /* OMP_CLAUSE_PRIVATE */
281 1, /* OMP_CLAUSE_SHARED */
282 1, /* OMP_CLAUSE_FIRSTPRIVATE */
283 2, /* OMP_CLAUSE_LASTPRIVATE */
284 5, /* OMP_CLAUSE_REDUCTION */
285 1, /* OMP_CLAUSE_COPYIN */
286 1, /* OMP_CLAUSE_COPYPRIVATE */
287 3, /* OMP_CLAUSE_LINEAR */
288 2, /* OMP_CLAUSE_ALIGNED */
289 1, /* OMP_CLAUSE_DEPEND */
290 1, /* OMP_CLAUSE_UNIFORM */
291 1, /* OMP_CLAUSE_TO_DECLARE */
292 1, /* OMP_CLAUSE_LINK */
293 2, /* OMP_CLAUSE_FROM */
294 2, /* OMP_CLAUSE_TO */
295 2, /* OMP_CLAUSE_MAP */
296 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
297 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
298 2, /* OMP_CLAUSE__CACHE_ */
299 2, /* OMP_CLAUSE_GANG */
300 1, /* OMP_CLAUSE_ASYNC */
301 1, /* OMP_CLAUSE_WAIT */
302 0, /* OMP_CLAUSE_AUTO */
303 0, /* OMP_CLAUSE_SEQ */
304 1, /* OMP_CLAUSE__LOOPTEMP_ */
305 1, /* OMP_CLAUSE_IF */
306 1, /* OMP_CLAUSE_NUM_THREADS */
307 1, /* OMP_CLAUSE_SCHEDULE */
308 0, /* OMP_CLAUSE_NOWAIT */
309 1, /* OMP_CLAUSE_ORDERED */
310 0, /* OMP_CLAUSE_DEFAULT */
311 3, /* OMP_CLAUSE_COLLAPSE */
312 0, /* OMP_CLAUSE_UNTIED */
313 1, /* OMP_CLAUSE_FINAL */
314 0, /* OMP_CLAUSE_MERGEABLE */
315 1, /* OMP_CLAUSE_DEVICE */
316 1, /* OMP_CLAUSE_DIST_SCHEDULE */
317 0, /* OMP_CLAUSE_INBRANCH */
318 0, /* OMP_CLAUSE_NOTINBRANCH */
319 1, /* OMP_CLAUSE_NUM_TEAMS */
320 1, /* OMP_CLAUSE_THREAD_LIMIT */
321 0, /* OMP_CLAUSE_PROC_BIND */
322 1, /* OMP_CLAUSE_SAFELEN */
323 1, /* OMP_CLAUSE_SIMDLEN */
324 0, /* OMP_CLAUSE_FOR */
325 0, /* OMP_CLAUSE_PARALLEL */
326 0, /* OMP_CLAUSE_SECTIONS */
327 0, /* OMP_CLAUSE_TASKGROUP */
328 1, /* OMP_CLAUSE_PRIORITY */
329 1, /* OMP_CLAUSE_GRAINSIZE */
330 1, /* OMP_CLAUSE_NUM_TASKS */
331 0, /* OMP_CLAUSE_NOGROUP */
332 0, /* OMP_CLAUSE_THREADS */
333 0, /* OMP_CLAUSE_SIMD */
334 1, /* OMP_CLAUSE_HINT */
335 0, /* OMP_CLAUSE_DEFALTMAP */
336 1, /* OMP_CLAUSE__SIMDUID_ */
337 0, /* OMP_CLAUSE__SIMT_ */
338 0, /* OMP_CLAUSE_INDEPENDENT */
339 1, /* OMP_CLAUSE_WORKER */
340 1, /* OMP_CLAUSE_VECTOR */
341 1, /* OMP_CLAUSE_NUM_GANGS */
342 1, /* OMP_CLAUSE_NUM_WORKERS */
343 1, /* OMP_CLAUSE_VECTOR_LENGTH */
344 3, /* OMP_CLAUSE_TILE */
345 2, /* OMP_CLAUSE__GRIDDIM_ */
348 const char * const omp_clause_code_name[] =
350 "error_clause",
351 "private",
352 "shared",
353 "firstprivate",
354 "lastprivate",
355 "reduction",
356 "copyin",
357 "copyprivate",
358 "linear",
359 "aligned",
360 "depend",
361 "uniform",
362 "to",
363 "link",
364 "from",
365 "to",
366 "map",
367 "use_device_ptr",
368 "is_device_ptr",
369 "_cache_",
370 "gang",
371 "async",
372 "wait",
373 "auto",
374 "seq",
375 "_looptemp_",
376 "if",
377 "num_threads",
378 "schedule",
379 "nowait",
380 "ordered",
381 "default",
382 "collapse",
383 "untied",
384 "final",
385 "mergeable",
386 "device",
387 "dist_schedule",
388 "inbranch",
389 "notinbranch",
390 "num_teams",
391 "thread_limit",
392 "proc_bind",
393 "safelen",
394 "simdlen",
395 "for",
396 "parallel",
397 "sections",
398 "taskgroup",
399 "priority",
400 "grainsize",
401 "num_tasks",
402 "nogroup",
403 "threads",
404 "simd",
405 "hint",
406 "defaultmap",
407 "_simduid_",
408 "_simt_",
409 "independent",
410 "worker",
411 "vector",
412 "num_gangs",
413 "num_workers",
414 "vector_length",
415 "tile",
416 "_griddim_"
420 /* Return the tree node structure used by tree code CODE. */
422 static inline enum tree_node_structure_enum
423 tree_node_structure_for_code (enum tree_code code)
425 switch (TREE_CODE_CLASS (code))
427 case tcc_declaration:
429 switch (code)
431 case FIELD_DECL:
432 return TS_FIELD_DECL;
433 case PARM_DECL:
434 return TS_PARM_DECL;
435 case VAR_DECL:
436 return TS_VAR_DECL;
437 case LABEL_DECL:
438 return TS_LABEL_DECL;
439 case RESULT_DECL:
440 return TS_RESULT_DECL;
441 case DEBUG_EXPR_DECL:
442 return TS_DECL_WRTL;
443 case CONST_DECL:
444 return TS_CONST_DECL;
445 case TYPE_DECL:
446 return TS_TYPE_DECL;
447 case FUNCTION_DECL:
448 return TS_FUNCTION_DECL;
449 case TRANSLATION_UNIT_DECL:
450 return TS_TRANSLATION_UNIT_DECL;
451 default:
452 return TS_DECL_NON_COMMON;
455 case tcc_type:
456 return TS_TYPE_NON_COMMON;
457 case tcc_reference:
458 case tcc_comparison:
459 case tcc_unary:
460 case tcc_binary:
461 case tcc_expression:
462 case tcc_statement:
463 case tcc_vl_exp:
464 return TS_EXP;
465 default: /* tcc_constant and tcc_exceptional */
466 break;
468 switch (code)
470 /* tcc_constant cases. */
471 case VOID_CST: return TS_TYPED;
472 case INTEGER_CST: return TS_INT_CST;
473 case POLY_INT_CST: return TS_POLY_INT_CST;
474 case REAL_CST: return TS_REAL_CST;
475 case FIXED_CST: return TS_FIXED_CST;
476 case COMPLEX_CST: return TS_COMPLEX;
477 case VECTOR_CST: return TS_VECTOR;
478 case STRING_CST: return TS_STRING;
479 /* tcc_exceptional cases. */
480 case ERROR_MARK: return TS_COMMON;
481 case IDENTIFIER_NODE: return TS_IDENTIFIER;
482 case TREE_LIST: return TS_LIST;
483 case TREE_VEC: return TS_VEC;
484 case SSA_NAME: return TS_SSA_NAME;
485 case PLACEHOLDER_EXPR: return TS_COMMON;
486 case STATEMENT_LIST: return TS_STATEMENT_LIST;
487 case BLOCK: return TS_BLOCK;
488 case CONSTRUCTOR: return TS_CONSTRUCTOR;
489 case TREE_BINFO: return TS_BINFO;
490 case OMP_CLAUSE: return TS_OMP_CLAUSE;
491 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
492 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
494 default:
495 gcc_unreachable ();
500 /* Initialize tree_contains_struct to describe the hierarchy of tree
501 nodes. */
503 static void
504 initialize_tree_contains_struct (void)
506 unsigned i;
508 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
510 enum tree_code code;
511 enum tree_node_structure_enum ts_code;
513 code = (enum tree_code) i;
514 ts_code = tree_node_structure_for_code (code);
516 /* Mark the TS structure itself. */
517 tree_contains_struct[code][ts_code] = 1;
519 /* Mark all the structures that TS is derived from. */
520 switch (ts_code)
522 case TS_TYPED:
523 case TS_BLOCK:
524 case TS_OPTIMIZATION:
525 case TS_TARGET_OPTION:
526 MARK_TS_BASE (code);
527 break;
529 case TS_COMMON:
530 case TS_INT_CST:
531 case TS_POLY_INT_CST:
532 case TS_REAL_CST:
533 case TS_FIXED_CST:
534 case TS_VECTOR:
535 case TS_STRING:
536 case TS_COMPLEX:
537 case TS_SSA_NAME:
538 case TS_CONSTRUCTOR:
539 case TS_EXP:
540 case TS_STATEMENT_LIST:
541 MARK_TS_TYPED (code);
542 break;
544 case TS_IDENTIFIER:
545 case TS_DECL_MINIMAL:
546 case TS_TYPE_COMMON:
547 case TS_LIST:
548 case TS_VEC:
549 case TS_BINFO:
550 case TS_OMP_CLAUSE:
551 MARK_TS_COMMON (code);
552 break;
554 case TS_TYPE_WITH_LANG_SPECIFIC:
555 MARK_TS_TYPE_COMMON (code);
556 break;
558 case TS_TYPE_NON_COMMON:
559 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
560 break;
562 case TS_DECL_COMMON:
563 MARK_TS_DECL_MINIMAL (code);
564 break;
566 case TS_DECL_WRTL:
567 case TS_CONST_DECL:
568 MARK_TS_DECL_COMMON (code);
569 break;
571 case TS_DECL_NON_COMMON:
572 MARK_TS_DECL_WITH_VIS (code);
573 break;
575 case TS_DECL_WITH_VIS:
576 case TS_PARM_DECL:
577 case TS_LABEL_DECL:
578 case TS_RESULT_DECL:
579 MARK_TS_DECL_WRTL (code);
580 break;
582 case TS_FIELD_DECL:
583 MARK_TS_DECL_COMMON (code);
584 break;
586 case TS_VAR_DECL:
587 MARK_TS_DECL_WITH_VIS (code);
588 break;
590 case TS_TYPE_DECL:
591 case TS_FUNCTION_DECL:
592 MARK_TS_DECL_NON_COMMON (code);
593 break;
595 case TS_TRANSLATION_UNIT_DECL:
596 MARK_TS_DECL_COMMON (code);
597 break;
599 default:
600 gcc_unreachable ();
604 /* Basic consistency checks for attributes used in fold. */
605 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
606 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
607 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
608 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
609 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
610 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
611 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
612 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
613 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
614 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
615 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
616 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
617 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
618 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
619 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
620 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
621 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
622 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
623 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
624 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
625 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
626 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
627 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
628 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
629 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
630 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
631 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
632 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
633 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
634 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
635 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
636 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
637 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
638 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
639 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
640 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
641 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
642 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
643 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
644 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
648 /* Init tree.c. */
650 void
651 init_ttree (void)
653 /* Initialize the hash table of types. */
654 type_hash_table
655 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
657 debug_expr_for_decl
658 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
660 value_expr_for_decl
661 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
663 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
665 poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
667 int_cst_node = make_int_cst (1, 1);
669 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
671 cl_optimization_node = make_node (OPTIMIZATION_NODE);
672 cl_target_option_node = make_node (TARGET_OPTION_NODE);
674 /* Initialize the tree_contains_struct array. */
675 initialize_tree_contains_struct ();
676 lang_hooks.init_ts ();
680 /* The name of the object as the assembler will see it (but before any
681 translations made by ASM_OUTPUT_LABELREF). Often this is the same
682 as DECL_NAME. It is an IDENTIFIER_NODE. */
683 tree
684 decl_assembler_name (tree decl)
686 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
687 lang_hooks.set_decl_assembler_name (decl);
688 return DECL_ASSEMBLER_NAME_RAW (decl);
691 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
692 (either of which may be NULL). Inform the FE, if this changes the
693 name. */
695 void
696 overwrite_decl_assembler_name (tree decl, tree name)
698 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
699 lang_hooks.overwrite_decl_assembler_name (decl, name);
702 /* When the target supports COMDAT groups, this indicates which group the
703 DECL is associated with. This can be either an IDENTIFIER_NODE or a
704 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
705 tree
706 decl_comdat_group (const_tree node)
708 struct symtab_node *snode = symtab_node::get (node);
709 if (!snode)
710 return NULL;
711 return snode->get_comdat_group ();
714 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
715 tree
716 decl_comdat_group_id (const_tree node)
718 struct symtab_node *snode = symtab_node::get (node);
719 if (!snode)
720 return NULL;
721 return snode->get_comdat_group_id ();
724 /* When the target supports named section, return its name as IDENTIFIER_NODE
725 or NULL if it is in no section. */
726 const char *
727 decl_section_name (const_tree node)
729 struct symtab_node *snode = symtab_node::get (node);
730 if (!snode)
731 return NULL;
732 return snode->get_section ();
735 /* Set section name of NODE to VALUE (that is expected to be
736 identifier node) */
737 void
738 set_decl_section_name (tree node, const char *value)
740 struct symtab_node *snode;
742 if (value == NULL)
744 snode = symtab_node::get (node);
745 if (!snode)
746 return;
748 else if (VAR_P (node))
749 snode = varpool_node::get_create (node);
750 else
751 snode = cgraph_node::get_create (node);
752 snode->set_section (value);
755 /* Return TLS model of a variable NODE. */
756 enum tls_model
757 decl_tls_model (const_tree node)
759 struct varpool_node *snode = varpool_node::get (node);
760 if (!snode)
761 return TLS_MODEL_NONE;
762 return snode->tls_model;
765 /* Set TLS model of variable NODE to MODEL. */
766 void
767 set_decl_tls_model (tree node, enum tls_model model)
769 struct varpool_node *vnode;
771 if (model == TLS_MODEL_NONE)
773 vnode = varpool_node::get (node);
774 if (!vnode)
775 return;
777 else
778 vnode = varpool_node::get_create (node);
779 vnode->tls_model = model;
782 /* Compute the number of bytes occupied by a tree with code CODE.
783 This function cannot be used for nodes that have variable sizes,
784 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
785 size_t
786 tree_code_size (enum tree_code code)
788 switch (TREE_CODE_CLASS (code))
790 case tcc_declaration: /* A decl node */
791 switch (code)
793 case FIELD_DECL: return sizeof (tree_field_decl);
794 case PARM_DECL: return sizeof (tree_parm_decl);
795 case VAR_DECL: return sizeof (tree_var_decl);
796 case LABEL_DECL: return sizeof (tree_label_decl);
797 case RESULT_DECL: return sizeof (tree_result_decl);
798 case CONST_DECL: return sizeof (tree_const_decl);
799 case TYPE_DECL: return sizeof (tree_type_decl);
800 case FUNCTION_DECL: return sizeof (tree_function_decl);
801 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
802 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
803 case NAMESPACE_DECL:
804 case IMPORTED_DECL:
805 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
806 default:
807 gcc_checking_assert (code >= NUM_TREE_CODES);
808 return lang_hooks.tree_size (code);
811 case tcc_type: /* a type node */
812 switch (code)
814 case OFFSET_TYPE:
815 case ENUMERAL_TYPE:
816 case BOOLEAN_TYPE:
817 case INTEGER_TYPE:
818 case REAL_TYPE:
819 case POINTER_TYPE:
820 case REFERENCE_TYPE:
821 case NULLPTR_TYPE:
822 case FIXED_POINT_TYPE:
823 case COMPLEX_TYPE:
824 case VECTOR_TYPE:
825 case ARRAY_TYPE:
826 case RECORD_TYPE:
827 case UNION_TYPE:
828 case QUAL_UNION_TYPE:
829 case VOID_TYPE:
830 case POINTER_BOUNDS_TYPE:
831 case FUNCTION_TYPE:
832 case METHOD_TYPE:
833 case LANG_TYPE: return sizeof (tree_type_non_common);
834 default:
835 gcc_checking_assert (code >= NUM_TREE_CODES);
836 return lang_hooks.tree_size (code);
839 case tcc_reference: /* a reference */
840 case tcc_expression: /* an expression */
841 case tcc_statement: /* an expression with side effects */
842 case tcc_comparison: /* a comparison expression */
843 case tcc_unary: /* a unary arithmetic expression */
844 case tcc_binary: /* a binary arithmetic expression */
845 return (sizeof (struct tree_exp)
846 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
848 case tcc_constant: /* a constant */
849 switch (code)
851 case VOID_CST: return sizeof (tree_typed);
852 case INTEGER_CST: gcc_unreachable ();
853 case POLY_INT_CST: return sizeof (tree_poly_int_cst);
854 case REAL_CST: return sizeof (tree_real_cst);
855 case FIXED_CST: return sizeof (tree_fixed_cst);
856 case COMPLEX_CST: return sizeof (tree_complex);
857 case VECTOR_CST: gcc_unreachable ();
858 case STRING_CST: gcc_unreachable ();
859 default:
860 gcc_checking_assert (code >= NUM_TREE_CODES);
861 return lang_hooks.tree_size (code);
864 case tcc_exceptional: /* something random, like an identifier. */
865 switch (code)
867 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
868 case TREE_LIST: return sizeof (tree_list);
870 case ERROR_MARK:
871 case PLACEHOLDER_EXPR: return sizeof (tree_common);
873 case TREE_VEC: gcc_unreachable ();
874 case OMP_CLAUSE: gcc_unreachable ();
876 case SSA_NAME: return sizeof (tree_ssa_name);
878 case STATEMENT_LIST: return sizeof (tree_statement_list);
879 case BLOCK: return sizeof (struct tree_block);
880 case CONSTRUCTOR: return sizeof (tree_constructor);
881 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
882 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
884 default:
885 gcc_checking_assert (code >= NUM_TREE_CODES);
886 return lang_hooks.tree_size (code);
889 default:
890 gcc_unreachable ();
894 /* Compute the number of bytes occupied by NODE. This routine only
895 looks at TREE_CODE, except for those nodes that have variable sizes. */
896 size_t
897 tree_size (const_tree node)
899 const enum tree_code code = TREE_CODE (node);
900 switch (code)
902 case INTEGER_CST:
903 return (sizeof (struct tree_int_cst)
904 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
906 case TREE_BINFO:
907 return (offsetof (struct tree_binfo, base_binfos)
908 + vec<tree, va_gc>
909 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
911 case TREE_VEC:
912 return (sizeof (struct tree_vec)
913 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
915 case VECTOR_CST:
916 return (sizeof (struct tree_vector)
917 + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
919 case STRING_CST:
920 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
922 case OMP_CLAUSE:
923 return (sizeof (struct tree_omp_clause)
924 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
925 * sizeof (tree));
927 default:
928 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
929 return (sizeof (struct tree_exp)
930 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
931 else
932 return tree_code_size (code);
936 /* Record interesting allocation statistics for a tree node with CODE
937 and LENGTH. */
939 static void
940 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
941 size_t length ATTRIBUTE_UNUSED)
943 enum tree_code_class type = TREE_CODE_CLASS (code);
944 tree_node_kind kind;
946 if (!GATHER_STATISTICS)
947 return;
949 switch (type)
951 case tcc_declaration: /* A decl node */
952 kind = d_kind;
953 break;
955 case tcc_type: /* a type node */
956 kind = t_kind;
957 break;
959 case tcc_statement: /* an expression with side effects */
960 kind = s_kind;
961 break;
963 case tcc_reference: /* a reference */
964 kind = r_kind;
965 break;
967 case tcc_expression: /* an expression */
968 case tcc_comparison: /* a comparison expression */
969 case tcc_unary: /* a unary arithmetic expression */
970 case tcc_binary: /* a binary arithmetic expression */
971 kind = e_kind;
972 break;
974 case tcc_constant: /* a constant */
975 kind = c_kind;
976 break;
978 case tcc_exceptional: /* something random, like an identifier. */
979 switch (code)
981 case IDENTIFIER_NODE:
982 kind = id_kind;
983 break;
985 case TREE_VEC:
986 kind = vec_kind;
987 break;
989 case TREE_BINFO:
990 kind = binfo_kind;
991 break;
993 case SSA_NAME:
994 kind = ssa_name_kind;
995 break;
997 case BLOCK:
998 kind = b_kind;
999 break;
1001 case CONSTRUCTOR:
1002 kind = constr_kind;
1003 break;
1005 case OMP_CLAUSE:
1006 kind = omp_clause_kind;
1007 break;
1009 default:
1010 kind = x_kind;
1011 break;
1013 break;
1015 case tcc_vl_exp:
1016 kind = e_kind;
1017 break;
1019 default:
1020 gcc_unreachable ();
1023 tree_code_counts[(int) code]++;
1024 tree_node_counts[(int) kind]++;
1025 tree_node_sizes[(int) kind] += length;
1028 /* Allocate and return a new UID from the DECL_UID namespace. */
1031 allocate_decl_uid (void)
1033 return next_decl_uid++;
1036 /* Return a newly allocated node of code CODE. For decl and type
1037 nodes, some other fields are initialized. The rest of the node is
1038 initialized to zero. This function cannot be used for TREE_VEC,
1039 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1040 tree_code_size.
1042 Achoo! I got a code in the node. */
1044 tree
1045 make_node (enum tree_code code MEM_STAT_DECL)
1047 tree t;
1048 enum tree_code_class type = TREE_CODE_CLASS (code);
1049 size_t length = tree_code_size (code);
1051 record_node_allocation_statistics (code, length);
1053 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1054 TREE_SET_CODE (t, code);
1056 switch (type)
1058 case tcc_statement:
1059 if (code != DEBUG_BEGIN_STMT)
1060 TREE_SIDE_EFFECTS (t) = 1;
1061 break;
1063 case tcc_declaration:
1064 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1066 if (code == FUNCTION_DECL)
1068 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1069 SET_DECL_MODE (t, FUNCTION_MODE);
1071 else
1072 SET_DECL_ALIGN (t, 1);
1074 DECL_SOURCE_LOCATION (t) = input_location;
1075 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1076 DECL_UID (t) = --next_debug_decl_uid;
1077 else
1079 DECL_UID (t) = allocate_decl_uid ();
1080 SET_DECL_PT_UID (t, -1);
1082 if (TREE_CODE (t) == LABEL_DECL)
1083 LABEL_DECL_UID (t) = -1;
1085 break;
1087 case tcc_type:
1088 TYPE_UID (t) = next_type_uid++;
1089 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1090 TYPE_USER_ALIGN (t) = 0;
1091 TYPE_MAIN_VARIANT (t) = t;
1092 TYPE_CANONICAL (t) = t;
1094 /* Default to no attributes for type, but let target change that. */
1095 TYPE_ATTRIBUTES (t) = NULL_TREE;
1096 targetm.set_default_type_attributes (t);
1098 /* We have not yet computed the alias set for this type. */
1099 TYPE_ALIAS_SET (t) = -1;
1100 break;
1102 case tcc_constant:
1103 TREE_CONSTANT (t) = 1;
1104 break;
1106 case tcc_expression:
1107 switch (code)
1109 case INIT_EXPR:
1110 case MODIFY_EXPR:
1111 case VA_ARG_EXPR:
1112 case PREDECREMENT_EXPR:
1113 case PREINCREMENT_EXPR:
1114 case POSTDECREMENT_EXPR:
1115 case POSTINCREMENT_EXPR:
1116 /* All of these have side-effects, no matter what their
1117 operands are. */
1118 TREE_SIDE_EFFECTS (t) = 1;
1119 break;
1121 default:
1122 break;
1124 break;
1126 case tcc_exceptional:
1127 switch (code)
1129 case TARGET_OPTION_NODE:
1130 TREE_TARGET_OPTION(t)
1131 = ggc_cleared_alloc<struct cl_target_option> ();
1132 break;
1134 case OPTIMIZATION_NODE:
1135 TREE_OPTIMIZATION (t)
1136 = ggc_cleared_alloc<struct cl_optimization> ();
1137 break;
1139 default:
1140 break;
1142 break;
1144 default:
1145 /* Other classes need no special treatment. */
1146 break;
1149 return t;
1152 /* Free tree node. */
1154 void
1155 free_node (tree node)
1157 enum tree_code code = TREE_CODE (node);
1158 if (GATHER_STATISTICS)
1160 tree_code_counts[(int) TREE_CODE (node)]--;
1161 tree_node_counts[(int) t_kind]--;
1162 tree_node_sizes[(int) t_kind] -= tree_size (node);
1164 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1165 vec_free (CONSTRUCTOR_ELTS (node));
1166 else if (code == BLOCK)
1167 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1168 else if (code == TREE_BINFO)
1169 vec_free (BINFO_BASE_ACCESSES (node));
1170 ggc_free (node);
1173 /* Return a new node with the same contents as NODE except that its
1174 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1176 tree
1177 copy_node (tree node MEM_STAT_DECL)
1179 tree t;
1180 enum tree_code code = TREE_CODE (node);
1181 size_t length;
1183 gcc_assert (code != STATEMENT_LIST);
1185 length = tree_size (node);
1186 record_node_allocation_statistics (code, length);
1187 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1188 memcpy (t, node, length);
1190 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1191 TREE_CHAIN (t) = 0;
1192 TREE_ASM_WRITTEN (t) = 0;
1193 TREE_VISITED (t) = 0;
1195 if (TREE_CODE_CLASS (code) == tcc_declaration)
1197 if (code == DEBUG_EXPR_DECL)
1198 DECL_UID (t) = --next_debug_decl_uid;
1199 else
1201 DECL_UID (t) = allocate_decl_uid ();
1202 if (DECL_PT_UID_SET_P (node))
1203 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1205 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1206 && DECL_HAS_VALUE_EXPR_P (node))
1208 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1209 DECL_HAS_VALUE_EXPR_P (t) = 1;
1211 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1212 if (VAR_P (node))
1214 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1215 t->decl_with_vis.symtab_node = NULL;
1217 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1219 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1220 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1222 if (TREE_CODE (node) == FUNCTION_DECL)
1224 DECL_STRUCT_FUNCTION (t) = NULL;
1225 t->decl_with_vis.symtab_node = NULL;
1228 else if (TREE_CODE_CLASS (code) == tcc_type)
1230 TYPE_UID (t) = next_type_uid++;
1231 /* The following is so that the debug code for
1232 the copy is different from the original type.
1233 The two statements usually duplicate each other
1234 (because they clear fields of the same union),
1235 but the optimizer should catch that. */
1236 TYPE_SYMTAB_ADDRESS (t) = 0;
1237 TYPE_SYMTAB_DIE (t) = 0;
1239 /* Do not copy the values cache. */
1240 if (TYPE_CACHED_VALUES_P (t))
1242 TYPE_CACHED_VALUES_P (t) = 0;
1243 TYPE_CACHED_VALUES (t) = NULL_TREE;
1246 else if (code == TARGET_OPTION_NODE)
1248 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1249 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1250 sizeof (struct cl_target_option));
1252 else if (code == OPTIMIZATION_NODE)
1254 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1255 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1256 sizeof (struct cl_optimization));
1259 return t;
1262 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1263 For example, this can copy a list made of TREE_LIST nodes. */
1265 tree
1266 copy_list (tree list)
1268 tree head;
1269 tree prev, next;
1271 if (list == 0)
1272 return 0;
1274 head = prev = copy_node (list);
1275 next = TREE_CHAIN (list);
1276 while (next)
1278 TREE_CHAIN (prev) = copy_node (next);
1279 prev = TREE_CHAIN (prev);
1280 next = TREE_CHAIN (next);
1282 return head;
1286 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1287 INTEGER_CST with value CST and type TYPE. */
1289 static unsigned int
1290 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1292 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1293 /* We need extra HWIs if CST is an unsigned integer with its
1294 upper bit set. */
1295 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1296 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1297 return cst.get_len ();
1300 /* Return a new INTEGER_CST with value CST and type TYPE. */
1302 static tree
1303 build_new_int_cst (tree type, const wide_int &cst)
1305 unsigned int len = cst.get_len ();
1306 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1307 tree nt = make_int_cst (len, ext_len);
1309 if (len < ext_len)
1311 --ext_len;
1312 TREE_INT_CST_ELT (nt, ext_len)
1313 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1314 for (unsigned int i = len; i < ext_len; ++i)
1315 TREE_INT_CST_ELT (nt, i) = -1;
1317 else if (TYPE_UNSIGNED (type)
1318 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1320 len--;
1321 TREE_INT_CST_ELT (nt, len)
1322 = zext_hwi (cst.elt (len),
1323 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1326 for (unsigned int i = 0; i < len; i++)
1327 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1328 TREE_TYPE (nt) = type;
1329 return nt;
1332 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1334 static tree
1335 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS])
1337 size_t length = sizeof (struct tree_poly_int_cst);
1338 record_node_allocation_statistics (POLY_INT_CST, length);
1340 tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1342 TREE_SET_CODE (t, POLY_INT_CST);
1343 TREE_CONSTANT (t) = 1;
1344 TREE_TYPE (t) = type;
1345 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1346 POLY_INT_CST_COEFF (t, i) = coeffs[i];
1347 return t;
1350 /* Create a constant tree that contains CST sign-extended to TYPE. */
1352 tree
1353 build_int_cst (tree type, poly_int64 cst)
1355 /* Support legacy code. */
1356 if (!type)
1357 type = integer_type_node;
1359 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1362 /* Create a constant tree that contains CST zero-extended to TYPE. */
1364 tree
1365 build_int_cstu (tree type, poly_uint64 cst)
1367 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1370 /* Create a constant tree that contains CST sign-extended to TYPE. */
1372 tree
1373 build_int_cst_type (tree type, poly_int64 cst)
1375 gcc_assert (type);
1376 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1379 /* Constructs tree in type TYPE from with value given by CST. Signedness
1380 of CST is assumed to be the same as the signedness of TYPE. */
1382 tree
1383 double_int_to_tree (tree type, double_int cst)
1385 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1388 /* We force the wide_int CST to the range of the type TYPE by sign or
1389 zero extending it. OVERFLOWABLE indicates if we are interested in
1390 overflow of the value, when >0 we are only interested in signed
1391 overflow, for <0 we are interested in any overflow. OVERFLOWED
1392 indicates whether overflow has already occurred. CONST_OVERFLOWED
1393 indicates whether constant overflow has already occurred. We force
1394 T's value to be within range of T's type (by setting to 0 or 1 all
1395 the bits outside the type's range). We set TREE_OVERFLOWED if,
1396 OVERFLOWED is nonzero,
1397 or OVERFLOWABLE is >0 and signed overflow occurs
1398 or OVERFLOWABLE is <0 and any overflow occurs
1399 We return a new tree node for the extended wide_int. The node
1400 is shared if no overflow flags are set. */
1403 tree
1404 force_fit_type (tree type, const poly_wide_int_ref &cst,
1405 int overflowable, bool overflowed)
1407 signop sign = TYPE_SIGN (type);
1409 /* If we need to set overflow flags, return a new unshared node. */
1410 if (overflowed || !wi::fits_to_tree_p (cst, type))
1412 if (overflowed
1413 || overflowable < 0
1414 || (overflowable > 0 && sign == SIGNED))
1416 poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1417 sign);
1418 tree t;
1419 if (tmp.is_constant ())
1420 t = build_new_int_cst (type, tmp.coeffs[0]);
1421 else
1423 tree coeffs[NUM_POLY_INT_COEFFS];
1424 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1426 coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1427 TREE_OVERFLOW (coeffs[i]) = 1;
1429 t = build_new_poly_int_cst (type, coeffs);
1431 TREE_OVERFLOW (t) = 1;
1432 return t;
1436 /* Else build a shared node. */
1437 return wide_int_to_tree (type, cst);
1440 /* These are the hash table functions for the hash table of INTEGER_CST
1441 nodes of a sizetype. */
1443 /* Return the hash code X, an INTEGER_CST. */
1445 hashval_t
1446 int_cst_hasher::hash (tree x)
1448 const_tree const t = x;
1449 hashval_t code = TYPE_UID (TREE_TYPE (t));
1450 int i;
1452 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1453 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1455 return code;
1458 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1459 is the same as that given by *Y, which is the same. */
1461 bool
1462 int_cst_hasher::equal (tree x, tree y)
1464 const_tree const xt = x;
1465 const_tree const yt = y;
1467 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1468 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1469 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1470 return false;
1472 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1473 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1474 return false;
1476 return true;
1479 /* Create an INT_CST node of TYPE and value CST.
1480 The returned node is always shared. For small integers we use a
1481 per-type vector cache, for larger ones we use a single hash table.
1482 The value is extended from its precision according to the sign of
1483 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1484 the upper bits and ensures that hashing and value equality based
1485 upon the underlying HOST_WIDE_INTs works without masking. */
1487 static tree
1488 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1490 tree t;
1491 int ix = -1;
1492 int limit = 0;
1494 gcc_assert (type);
1495 unsigned int prec = TYPE_PRECISION (type);
1496 signop sgn = TYPE_SIGN (type);
1498 /* Verify that everything is canonical. */
1499 int l = pcst.get_len ();
1500 if (l > 1)
1502 if (pcst.elt (l - 1) == 0)
1503 gcc_checking_assert (pcst.elt (l - 2) < 0);
1504 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1505 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1508 wide_int cst = wide_int::from (pcst, prec, sgn);
1509 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1511 if (ext_len == 1)
1513 /* We just need to store a single HOST_WIDE_INT. */
1514 HOST_WIDE_INT hwi;
1515 if (TYPE_UNSIGNED (type))
1516 hwi = cst.to_uhwi ();
1517 else
1518 hwi = cst.to_shwi ();
1520 switch (TREE_CODE (type))
1522 case NULLPTR_TYPE:
1523 gcc_assert (hwi == 0);
1524 /* Fallthru. */
1526 case POINTER_TYPE:
1527 case REFERENCE_TYPE:
1528 case POINTER_BOUNDS_TYPE:
1529 /* Cache NULL pointer and zero bounds. */
1530 if (hwi == 0)
1532 limit = 1;
1533 ix = 0;
1535 break;
1537 case BOOLEAN_TYPE:
1538 /* Cache false or true. */
1539 limit = 2;
1540 if (IN_RANGE (hwi, 0, 1))
1541 ix = hwi;
1542 break;
1544 case INTEGER_TYPE:
1545 case OFFSET_TYPE:
1546 if (TYPE_SIGN (type) == UNSIGNED)
1548 /* Cache [0, N). */
1549 limit = INTEGER_SHARE_LIMIT;
1550 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1551 ix = hwi;
1553 else
1555 /* Cache [-1, N). */
1556 limit = INTEGER_SHARE_LIMIT + 1;
1557 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1558 ix = hwi + 1;
1560 break;
1562 case ENUMERAL_TYPE:
1563 break;
1565 default:
1566 gcc_unreachable ();
1569 if (ix >= 0)
1571 /* Look for it in the type's vector of small shared ints. */
1572 if (!TYPE_CACHED_VALUES_P (type))
1574 TYPE_CACHED_VALUES_P (type) = 1;
1575 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1578 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1579 if (t)
1580 /* Make sure no one is clobbering the shared constant. */
1581 gcc_checking_assert (TREE_TYPE (t) == type
1582 && TREE_INT_CST_NUNITS (t) == 1
1583 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1584 && TREE_INT_CST_EXT_NUNITS (t) == 1
1585 && TREE_INT_CST_ELT (t, 0) == hwi);
1586 else
1588 /* Create a new shared int. */
1589 t = build_new_int_cst (type, cst);
1590 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1593 else
1595 /* Use the cache of larger shared ints, using int_cst_node as
1596 a temporary. */
1598 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1599 TREE_TYPE (int_cst_node) = type;
1601 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1602 t = *slot;
1603 if (!t)
1605 /* Insert this one into the hash table. */
1606 t = int_cst_node;
1607 *slot = t;
1608 /* Make a new node for next time round. */
1609 int_cst_node = make_int_cst (1, 1);
1613 else
1615 /* The value either hashes properly or we drop it on the floor
1616 for the gc to take care of. There will not be enough of them
1617 to worry about. */
1619 tree nt = build_new_int_cst (type, cst);
1620 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1621 t = *slot;
1622 if (!t)
1624 /* Insert this one into the hash table. */
1625 t = nt;
1626 *slot = t;
1628 else
1629 ggc_free (nt);
1632 return t;
1635 hashval_t
1636 poly_int_cst_hasher::hash (tree t)
1638 inchash::hash hstate;
1640 hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1641 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1642 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1644 return hstate.end ();
1647 bool
1648 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1650 if (TREE_TYPE (x) != y.first)
1651 return false;
1652 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1653 if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1654 return false;
1655 return true;
1658 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1659 The elements must also have type TYPE. */
1661 tree
1662 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1664 unsigned int prec = TYPE_PRECISION (type);
1665 gcc_assert (prec <= values.coeffs[0].get_precision ());
1666 poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1668 inchash::hash h;
1669 h.add_int (TYPE_UID (type));
1670 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1671 h.add_wide_int (c.coeffs[i]);
1672 poly_int_cst_hasher::compare_type comp (type, &c);
1673 tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1674 INSERT);
1675 if (*slot == NULL_TREE)
1677 tree coeffs[NUM_POLY_INT_COEFFS];
1678 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1679 coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1680 *slot = build_new_poly_int_cst (type, coeffs);
1682 return *slot;
1685 /* Create a constant tree with value VALUE in type TYPE. */
1687 tree
1688 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1690 if (value.is_constant ())
1691 return wide_int_to_tree_1 (type, value.coeffs[0]);
1692 return build_poly_int_cst (type, value);
1695 void
1696 cache_integer_cst (tree t)
1698 tree type = TREE_TYPE (t);
1699 int ix = -1;
1700 int limit = 0;
1701 int prec = TYPE_PRECISION (type);
1703 gcc_assert (!TREE_OVERFLOW (t));
1705 switch (TREE_CODE (type))
1707 case NULLPTR_TYPE:
1708 gcc_assert (integer_zerop (t));
1709 /* Fallthru. */
1711 case POINTER_TYPE:
1712 case REFERENCE_TYPE:
1713 /* Cache NULL pointer. */
1714 if (integer_zerop (t))
1716 limit = 1;
1717 ix = 0;
1719 break;
1721 case BOOLEAN_TYPE:
1722 /* Cache false or true. */
1723 limit = 2;
1724 if (wi::ltu_p (wi::to_wide (t), 2))
1725 ix = TREE_INT_CST_ELT (t, 0);
1726 break;
1728 case INTEGER_TYPE:
1729 case OFFSET_TYPE:
1730 if (TYPE_UNSIGNED (type))
1732 /* Cache 0..N */
1733 limit = INTEGER_SHARE_LIMIT;
1735 /* This is a little hokie, but if the prec is smaller than
1736 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1737 obvious test will not get the correct answer. */
1738 if (prec < HOST_BITS_PER_WIDE_INT)
1740 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1741 ix = tree_to_uhwi (t);
1743 else if (wi::ltu_p (wi::to_wide (t), INTEGER_SHARE_LIMIT))
1744 ix = tree_to_uhwi (t);
1746 else
1748 /* Cache -1..N */
1749 limit = INTEGER_SHARE_LIMIT + 1;
1751 if (integer_minus_onep (t))
1752 ix = 0;
1753 else if (!wi::neg_p (wi::to_wide (t)))
1755 if (prec < HOST_BITS_PER_WIDE_INT)
1757 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1758 ix = tree_to_shwi (t) + 1;
1760 else if (wi::ltu_p (wi::to_wide (t), INTEGER_SHARE_LIMIT))
1761 ix = tree_to_shwi (t) + 1;
1764 break;
1766 case ENUMERAL_TYPE:
1767 break;
1769 default:
1770 gcc_unreachable ();
1773 if (ix >= 0)
1775 /* Look for it in the type's vector of small shared ints. */
1776 if (!TYPE_CACHED_VALUES_P (type))
1778 TYPE_CACHED_VALUES_P (type) = 1;
1779 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1782 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1783 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1785 else
1787 /* Use the cache of larger shared ints. */
1788 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1789 /* If there is already an entry for the number verify it's the
1790 same. */
1791 if (*slot)
1792 gcc_assert (wi::to_wide (tree (*slot)) == wi::to_wide (t));
1793 else
1794 /* Otherwise insert this one into the hash table. */
1795 *slot = t;
1800 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1801 and the rest are zeros. */
1803 tree
1804 build_low_bits_mask (tree type, unsigned bits)
1806 gcc_assert (bits <= TYPE_PRECISION (type));
1808 return wide_int_to_tree (type, wi::mask (bits, false,
1809 TYPE_PRECISION (type)));
1812 /* Checks that X is integer constant that can be expressed in (unsigned)
1813 HOST_WIDE_INT without loss of precision. */
1815 bool
1816 cst_and_fits_in_hwi (const_tree x)
1818 return (TREE_CODE (x) == INTEGER_CST
1819 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
1822 /* Build a newly constructed VECTOR_CST with the given values of
1823 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
1825 tree
1826 make_vector (unsigned log2_npatterns,
1827 unsigned int nelts_per_pattern MEM_STAT_DECL)
1829 gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
1830 tree t;
1831 unsigned npatterns = 1 << log2_npatterns;
1832 unsigned encoded_nelts = npatterns * nelts_per_pattern;
1833 unsigned length = (sizeof (struct tree_vector)
1834 + (encoded_nelts - 1) * sizeof (tree));
1836 record_node_allocation_statistics (VECTOR_CST, length);
1838 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1840 TREE_SET_CODE (t, VECTOR_CST);
1841 TREE_CONSTANT (t) = 1;
1842 VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
1843 VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
1845 return t;
1848 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1849 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1851 tree
1852 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1854 unsigned HOST_WIDE_INT idx, nelts;
1855 tree value;
1857 /* We can't construct a VECTOR_CST for a variable number of elements. */
1858 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
1859 tree_vector_builder vec (type, nelts, 1);
1860 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1862 if (TREE_CODE (value) == VECTOR_CST)
1864 /* If NELTS is constant then this must be too. */
1865 unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
1866 for (unsigned i = 0; i < sub_nelts; ++i)
1867 vec.quick_push (VECTOR_CST_ELT (value, i));
1869 else
1870 vec.quick_push (value);
1872 while (vec.length () < nelts)
1873 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
1875 return vec.build ();
1878 /* Build a vector of type VECTYPE where all the elements are SCs. */
1879 tree
1880 build_vector_from_val (tree vectype, tree sc)
1882 unsigned HOST_WIDE_INT i, nunits;
1884 if (sc == error_mark_node)
1885 return sc;
1887 /* Verify that the vector type is suitable for SC. Note that there
1888 is some inconsistency in the type-system with respect to restrict
1889 qualifications of pointers. Vector types always have a main-variant
1890 element type and the qualification is applied to the vector-type.
1891 So TREE_TYPE (vector-type) does not return a properly qualified
1892 vector element-type. */
1893 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1894 TREE_TYPE (vectype)));
1896 if (CONSTANT_CLASS_P (sc))
1898 tree_vector_builder v (vectype, 1, 1);
1899 v.quick_push (sc);
1900 return v.build ();
1902 else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
1903 return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
1904 else
1906 vec<constructor_elt, va_gc> *v;
1907 vec_alloc (v, nunits);
1908 for (i = 0; i < nunits; ++i)
1909 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1910 return build_constructor (vectype, v);
1914 /* Build a vector series of type TYPE in which element I has the value
1915 BASE + I * STEP. The result is a constant if BASE and STEP are constant
1916 and a VEC_SERIES_EXPR otherwise. */
1918 tree
1919 build_vec_series (tree type, tree base, tree step)
1921 if (integer_zerop (step))
1922 return build_vector_from_val (type, base);
1923 if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
1925 tree_vector_builder builder (type, 1, 3);
1926 tree elt1 = wide_int_to_tree (TREE_TYPE (base),
1927 wi::to_wide (base) + wi::to_wide (step));
1928 tree elt2 = wide_int_to_tree (TREE_TYPE (base),
1929 wi::to_wide (elt1) + wi::to_wide (step));
1930 builder.quick_push (base);
1931 builder.quick_push (elt1);
1932 builder.quick_push (elt2);
1933 return builder.build ();
1935 return build2 (VEC_SERIES_EXPR, type, base, step);
1938 /* Return a vector with the same number of units and number of bits
1939 as VEC_TYPE, but in which the elements are a linear series of unsigned
1940 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
1942 tree
1943 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
1945 tree index_vec_type = vec_type;
1946 tree index_elt_type = TREE_TYPE (vec_type);
1947 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
1948 if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
1950 index_elt_type = build_nonstandard_integer_type
1951 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
1952 index_vec_type = build_vector_type (index_elt_type, nunits);
1955 tree_vector_builder v (index_vec_type, 1, 3);
1956 for (unsigned int i = 0; i < 3; ++i)
1957 v.quick_push (build_int_cstu (index_elt_type, base + i * step));
1958 return v.build ();
1961 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
1962 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
1964 void
1965 recompute_constructor_flags (tree c)
1967 unsigned int i;
1968 tree val;
1969 bool constant_p = true;
1970 bool side_effects_p = false;
1971 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
1973 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
1975 /* Mostly ctors will have elts that don't have side-effects, so
1976 the usual case is to scan all the elements. Hence a single
1977 loop for both const and side effects, rather than one loop
1978 each (with early outs). */
1979 if (!TREE_CONSTANT (val))
1980 constant_p = false;
1981 if (TREE_SIDE_EFFECTS (val))
1982 side_effects_p = true;
1985 TREE_SIDE_EFFECTS (c) = side_effects_p;
1986 TREE_CONSTANT (c) = constant_p;
1989 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
1990 CONSTRUCTOR C. */
1992 void
1993 verify_constructor_flags (tree c)
1995 unsigned int i;
1996 tree val;
1997 bool constant_p = TREE_CONSTANT (c);
1998 bool side_effects_p = TREE_SIDE_EFFECTS (c);
1999 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2001 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2003 if (constant_p && !TREE_CONSTANT (val))
2004 internal_error ("non-constant element in constant CONSTRUCTOR");
2005 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
2006 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2010 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2011 are in the vec pointed to by VALS. */
2012 tree
2013 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
2015 tree c = make_node (CONSTRUCTOR);
2017 TREE_TYPE (c) = type;
2018 CONSTRUCTOR_ELTS (c) = vals;
2020 recompute_constructor_flags (c);
2022 return c;
2025 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2026 INDEX and VALUE. */
2027 tree
2028 build_constructor_single (tree type, tree index, tree value)
2030 vec<constructor_elt, va_gc> *v;
2031 constructor_elt elt = {index, value};
2033 vec_alloc (v, 1);
2034 v->quick_push (elt);
2036 return build_constructor (type, v);
2040 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2041 are in a list pointed to by VALS. */
2042 tree
2043 build_constructor_from_list (tree type, tree vals)
2045 tree t;
2046 vec<constructor_elt, va_gc> *v = NULL;
2048 if (vals)
2050 vec_alloc (v, list_length (vals));
2051 for (t = vals; t; t = TREE_CHAIN (t))
2052 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2055 return build_constructor (type, v);
2058 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2059 of elements, provided as index/value pairs. */
2061 tree
2062 build_constructor_va (tree type, int nelts, ...)
2064 vec<constructor_elt, va_gc> *v = NULL;
2065 va_list p;
2067 va_start (p, nelts);
2068 vec_alloc (v, nelts);
2069 while (nelts--)
2071 tree index = va_arg (p, tree);
2072 tree value = va_arg (p, tree);
2073 CONSTRUCTOR_APPEND_ELT (v, index, value);
2075 va_end (p);
2076 return build_constructor (type, v);
2079 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2081 tree
2082 build_fixed (tree type, FIXED_VALUE_TYPE f)
2084 tree v;
2085 FIXED_VALUE_TYPE *fp;
2087 v = make_node (FIXED_CST);
2088 fp = ggc_alloc<fixed_value> ();
2089 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2091 TREE_TYPE (v) = type;
2092 TREE_FIXED_CST_PTR (v) = fp;
2093 return v;
2096 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2098 tree
2099 build_real (tree type, REAL_VALUE_TYPE d)
2101 tree v;
2102 REAL_VALUE_TYPE *dp;
2103 int overflow = 0;
2105 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2106 Consider doing it via real_convert now. */
2108 v = make_node (REAL_CST);
2109 dp = ggc_alloc<real_value> ();
2110 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
2112 TREE_TYPE (v) = type;
2113 TREE_REAL_CST_PTR (v) = dp;
2114 TREE_OVERFLOW (v) = overflow;
2115 return v;
2118 /* Like build_real, but first truncate D to the type. */
2120 tree
2121 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2123 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2126 /* Return a new REAL_CST node whose type is TYPE
2127 and whose value is the integer value of the INTEGER_CST node I. */
2129 REAL_VALUE_TYPE
2130 real_value_from_int_cst (const_tree type, const_tree i)
2132 REAL_VALUE_TYPE d;
2134 /* Clear all bits of the real value type so that we can later do
2135 bitwise comparisons to see if two values are the same. */
2136 memset (&d, 0, sizeof d);
2138 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2139 TYPE_SIGN (TREE_TYPE (i)));
2140 return d;
2143 /* Given a tree representing an integer constant I, return a tree
2144 representing the same value as a floating-point constant of type TYPE. */
2146 tree
2147 build_real_from_int_cst (tree type, const_tree i)
2149 tree v;
2150 int overflow = TREE_OVERFLOW (i);
2152 v = build_real (type, real_value_from_int_cst (type, i));
2154 TREE_OVERFLOW (v) |= overflow;
2155 return v;
2158 /* Return a newly constructed STRING_CST node whose value is
2159 the LEN characters at STR.
2160 Note that for a C string literal, LEN should include the trailing NUL.
2161 The TREE_TYPE is not initialized. */
2163 tree
2164 build_string (int len, const char *str)
2166 tree s;
2167 size_t length;
2169 /* Do not waste bytes provided by padding of struct tree_string. */
2170 length = len + offsetof (struct tree_string, str) + 1;
2172 record_node_allocation_statistics (STRING_CST, length);
2174 s = (tree) ggc_internal_alloc (length);
2176 memset (s, 0, sizeof (struct tree_typed));
2177 TREE_SET_CODE (s, STRING_CST);
2178 TREE_CONSTANT (s) = 1;
2179 TREE_STRING_LENGTH (s) = len;
2180 memcpy (s->string.str, str, len);
2181 s->string.str[len] = '\0';
2183 return s;
2186 /* Return a newly constructed COMPLEX_CST node whose value is
2187 specified by the real and imaginary parts REAL and IMAG.
2188 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2189 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2191 tree
2192 build_complex (tree type, tree real, tree imag)
2194 tree t = make_node (COMPLEX_CST);
2196 TREE_REALPART (t) = real;
2197 TREE_IMAGPART (t) = imag;
2198 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2199 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2200 return t;
2203 /* Build a complex (inf +- 0i), such as for the result of cproj.
2204 TYPE is the complex tree type of the result. If NEG is true, the
2205 imaginary zero is negative. */
2207 tree
2208 build_complex_inf (tree type, bool neg)
2210 REAL_VALUE_TYPE rinf, rzero = dconst0;
2212 real_inf (&rinf);
2213 rzero.sign = neg;
2214 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2215 build_real (TREE_TYPE (type), rzero));
2218 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2219 element is set to 1. In particular, this is 1 + i for complex types. */
2221 tree
2222 build_each_one_cst (tree type)
2224 if (TREE_CODE (type) == COMPLEX_TYPE)
2226 tree scalar = build_one_cst (TREE_TYPE (type));
2227 return build_complex (type, scalar, scalar);
2229 else
2230 return build_one_cst (type);
2233 /* Return a constant of arithmetic type TYPE which is the
2234 multiplicative identity of the set TYPE. */
2236 tree
2237 build_one_cst (tree type)
2239 switch (TREE_CODE (type))
2241 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2242 case POINTER_TYPE: case REFERENCE_TYPE:
2243 case OFFSET_TYPE:
2244 return build_int_cst (type, 1);
2246 case REAL_TYPE:
2247 return build_real (type, dconst1);
2249 case FIXED_POINT_TYPE:
2250 /* We can only generate 1 for accum types. */
2251 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2252 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2254 case VECTOR_TYPE:
2256 tree scalar = build_one_cst (TREE_TYPE (type));
2258 return build_vector_from_val (type, scalar);
2261 case COMPLEX_TYPE:
2262 return build_complex (type,
2263 build_one_cst (TREE_TYPE (type)),
2264 build_zero_cst (TREE_TYPE (type)));
2266 default:
2267 gcc_unreachable ();
2271 /* Return an integer of type TYPE containing all 1's in as much precision as
2272 it contains, or a complex or vector whose subparts are such integers. */
2274 tree
2275 build_all_ones_cst (tree type)
2277 if (TREE_CODE (type) == COMPLEX_TYPE)
2279 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2280 return build_complex (type, scalar, scalar);
2282 else
2283 return build_minus_one_cst (type);
2286 /* Return a constant of arithmetic type TYPE which is the
2287 opposite of the multiplicative identity of the set TYPE. */
2289 tree
2290 build_minus_one_cst (tree type)
2292 switch (TREE_CODE (type))
2294 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2295 case POINTER_TYPE: case REFERENCE_TYPE:
2296 case OFFSET_TYPE:
2297 return build_int_cst (type, -1);
2299 case REAL_TYPE:
2300 return build_real (type, dconstm1);
2302 case FIXED_POINT_TYPE:
2303 /* We can only generate 1 for accum types. */
2304 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2305 return build_fixed (type,
2306 fixed_from_double_int (double_int_minus_one,
2307 SCALAR_TYPE_MODE (type)));
2309 case VECTOR_TYPE:
2311 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2313 return build_vector_from_val (type, scalar);
2316 case COMPLEX_TYPE:
2317 return build_complex (type,
2318 build_minus_one_cst (TREE_TYPE (type)),
2319 build_zero_cst (TREE_TYPE (type)));
2321 default:
2322 gcc_unreachable ();
2326 /* Build 0 constant of type TYPE. This is used by constructor folding
2327 and thus the constant should be represented in memory by
2328 zero(es). */
2330 tree
2331 build_zero_cst (tree type)
2333 switch (TREE_CODE (type))
2335 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2336 case POINTER_TYPE: case REFERENCE_TYPE:
2337 case OFFSET_TYPE: case NULLPTR_TYPE:
2338 return build_int_cst (type, 0);
2340 case REAL_TYPE:
2341 return build_real (type, dconst0);
2343 case FIXED_POINT_TYPE:
2344 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2346 case VECTOR_TYPE:
2348 tree scalar = build_zero_cst (TREE_TYPE (type));
2350 return build_vector_from_val (type, scalar);
2353 case COMPLEX_TYPE:
2355 tree zero = build_zero_cst (TREE_TYPE (type));
2357 return build_complex (type, zero, zero);
2360 default:
2361 if (!AGGREGATE_TYPE_P (type))
2362 return fold_convert (type, integer_zero_node);
2363 return build_constructor (type, NULL);
2368 /* Build a BINFO with LEN language slots. */
2370 tree
2371 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2373 tree t;
2374 size_t length = (offsetof (struct tree_binfo, base_binfos)
2375 + vec<tree, va_gc>::embedded_size (base_binfos));
2377 record_node_allocation_statistics (TREE_BINFO, length);
2379 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2381 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2383 TREE_SET_CODE (t, TREE_BINFO);
2385 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2387 return t;
2390 /* Create a CASE_LABEL_EXPR tree node and return it. */
2392 tree
2393 build_case_label (tree low_value, tree high_value, tree label_decl)
2395 tree t = make_node (CASE_LABEL_EXPR);
2397 TREE_TYPE (t) = void_type_node;
2398 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2400 CASE_LOW (t) = low_value;
2401 CASE_HIGH (t) = high_value;
2402 CASE_LABEL (t) = label_decl;
2403 CASE_CHAIN (t) = NULL_TREE;
2405 return t;
2408 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2409 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2410 The latter determines the length of the HOST_WIDE_INT vector. */
2412 tree
2413 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2415 tree t;
2416 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2417 + sizeof (struct tree_int_cst));
2419 gcc_assert (len);
2420 record_node_allocation_statistics (INTEGER_CST, length);
2422 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2424 TREE_SET_CODE (t, INTEGER_CST);
2425 TREE_INT_CST_NUNITS (t) = len;
2426 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2427 /* to_offset can only be applied to trees that are offset_int-sized
2428 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2429 must be exactly the precision of offset_int and so LEN is correct. */
2430 if (ext_len <= OFFSET_INT_ELTS)
2431 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2432 else
2433 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2435 TREE_CONSTANT (t) = 1;
2437 return t;
2440 /* Build a newly constructed TREE_VEC node of length LEN. */
2442 tree
2443 make_tree_vec (int len MEM_STAT_DECL)
2445 tree t;
2446 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2448 record_node_allocation_statistics (TREE_VEC, length);
2450 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2452 TREE_SET_CODE (t, TREE_VEC);
2453 TREE_VEC_LENGTH (t) = len;
2455 return t;
2458 /* Grow a TREE_VEC node to new length LEN. */
2460 tree
2461 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2463 gcc_assert (TREE_CODE (v) == TREE_VEC);
2465 int oldlen = TREE_VEC_LENGTH (v);
2466 gcc_assert (len > oldlen);
2468 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2469 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2471 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2473 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2475 TREE_VEC_LENGTH (v) = len;
2477 return v;
2480 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2481 fixed, and scalar, complex or vector. */
2484 zerop (const_tree expr)
2486 return (integer_zerop (expr)
2487 || real_zerop (expr)
2488 || fixed_zerop (expr));
2491 /* Return 1 if EXPR is the integer constant zero or a complex constant
2492 of zero. */
2495 integer_zerop (const_tree expr)
2497 switch (TREE_CODE (expr))
2499 case INTEGER_CST:
2500 return wi::to_wide (expr) == 0;
2501 case COMPLEX_CST:
2502 return (integer_zerop (TREE_REALPART (expr))
2503 && integer_zerop (TREE_IMAGPART (expr)));
2504 case VECTOR_CST:
2505 return (VECTOR_CST_NPATTERNS (expr) == 1
2506 && VECTOR_CST_DUPLICATE_P (expr)
2507 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2508 default:
2509 return false;
2513 /* Return 1 if EXPR is the integer constant one or the corresponding
2514 complex constant. */
2517 integer_onep (const_tree expr)
2519 switch (TREE_CODE (expr))
2521 case INTEGER_CST:
2522 return wi::eq_p (wi::to_widest (expr), 1);
2523 case COMPLEX_CST:
2524 return (integer_onep (TREE_REALPART (expr))
2525 && integer_zerop (TREE_IMAGPART (expr)));
2526 case VECTOR_CST:
2527 return (VECTOR_CST_NPATTERNS (expr) == 1
2528 && VECTOR_CST_DUPLICATE_P (expr)
2529 && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2530 default:
2531 return false;
2535 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2536 return 1 if every piece is the integer constant one. */
2539 integer_each_onep (const_tree expr)
2541 if (TREE_CODE (expr) == COMPLEX_CST)
2542 return (integer_onep (TREE_REALPART (expr))
2543 && integer_onep (TREE_IMAGPART (expr)));
2544 else
2545 return integer_onep (expr);
2548 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2549 it contains, or a complex or vector whose subparts are such integers. */
2552 integer_all_onesp (const_tree expr)
2554 if (TREE_CODE (expr) == COMPLEX_CST
2555 && integer_all_onesp (TREE_REALPART (expr))
2556 && integer_all_onesp (TREE_IMAGPART (expr)))
2557 return 1;
2559 else if (TREE_CODE (expr) == VECTOR_CST)
2560 return (VECTOR_CST_NPATTERNS (expr) == 1
2561 && VECTOR_CST_DUPLICATE_P (expr)
2562 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2564 else if (TREE_CODE (expr) != INTEGER_CST)
2565 return 0;
2567 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2568 == wi::to_wide (expr));
2571 /* Return 1 if EXPR is the integer constant minus one. */
2574 integer_minus_onep (const_tree expr)
2576 if (TREE_CODE (expr) == COMPLEX_CST)
2577 return (integer_all_onesp (TREE_REALPART (expr))
2578 && integer_zerop (TREE_IMAGPART (expr)));
2579 else
2580 return integer_all_onesp (expr);
2583 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2584 one bit on). */
2587 integer_pow2p (const_tree expr)
2589 if (TREE_CODE (expr) == COMPLEX_CST
2590 && integer_pow2p (TREE_REALPART (expr))
2591 && integer_zerop (TREE_IMAGPART (expr)))
2592 return 1;
2594 if (TREE_CODE (expr) != INTEGER_CST)
2595 return 0;
2597 return wi::popcount (wi::to_wide (expr)) == 1;
2600 /* Return 1 if EXPR is an integer constant other than zero or a
2601 complex constant other than zero. */
2604 integer_nonzerop (const_tree expr)
2606 return ((TREE_CODE (expr) == INTEGER_CST
2607 && wi::to_wide (expr) != 0)
2608 || (TREE_CODE (expr) == COMPLEX_CST
2609 && (integer_nonzerop (TREE_REALPART (expr))
2610 || integer_nonzerop (TREE_IMAGPART (expr)))));
2613 /* Return 1 if EXPR is the integer constant one. For vector,
2614 return 1 if every piece is the integer constant minus one
2615 (representing the value TRUE). */
2618 integer_truep (const_tree expr)
2620 if (TREE_CODE (expr) == VECTOR_CST)
2621 return integer_all_onesp (expr);
2622 return integer_onep (expr);
2625 /* Return 1 if EXPR is the fixed-point constant zero. */
2628 fixed_zerop (const_tree expr)
2630 return (TREE_CODE (expr) == FIXED_CST
2631 && TREE_FIXED_CST (expr).data.is_zero ());
2634 /* Return the power of two represented by a tree node known to be a
2635 power of two. */
2638 tree_log2 (const_tree expr)
2640 if (TREE_CODE (expr) == COMPLEX_CST)
2641 return tree_log2 (TREE_REALPART (expr));
2643 return wi::exact_log2 (wi::to_wide (expr));
2646 /* Similar, but return the largest integer Y such that 2 ** Y is less
2647 than or equal to EXPR. */
2650 tree_floor_log2 (const_tree expr)
2652 if (TREE_CODE (expr) == COMPLEX_CST)
2653 return tree_log2 (TREE_REALPART (expr));
2655 return wi::floor_log2 (wi::to_wide (expr));
2658 /* Return number of known trailing zero bits in EXPR, or, if the value of
2659 EXPR is known to be zero, the precision of it's type. */
2661 unsigned int
2662 tree_ctz (const_tree expr)
2664 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2665 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2666 return 0;
2668 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2669 switch (TREE_CODE (expr))
2671 case INTEGER_CST:
2672 ret1 = wi::ctz (wi::to_wide (expr));
2673 return MIN (ret1, prec);
2674 case SSA_NAME:
2675 ret1 = wi::ctz (get_nonzero_bits (expr));
2676 return MIN (ret1, prec);
2677 case PLUS_EXPR:
2678 case MINUS_EXPR:
2679 case BIT_IOR_EXPR:
2680 case BIT_XOR_EXPR:
2681 case MIN_EXPR:
2682 case MAX_EXPR:
2683 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2684 if (ret1 == 0)
2685 return ret1;
2686 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2687 return MIN (ret1, ret2);
2688 case POINTER_PLUS_EXPR:
2689 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2690 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2691 /* Second operand is sizetype, which could be in theory
2692 wider than pointer's precision. Make sure we never
2693 return more than prec. */
2694 ret2 = MIN (ret2, prec);
2695 return MIN (ret1, ret2);
2696 case BIT_AND_EXPR:
2697 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2698 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2699 return MAX (ret1, ret2);
2700 case MULT_EXPR:
2701 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2702 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2703 return MIN (ret1 + ret2, prec);
2704 case LSHIFT_EXPR:
2705 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2706 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2707 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2709 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2710 return MIN (ret1 + ret2, prec);
2712 return ret1;
2713 case RSHIFT_EXPR:
2714 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2715 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2717 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2718 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2719 if (ret1 > ret2)
2720 return ret1 - ret2;
2722 return 0;
2723 case TRUNC_DIV_EXPR:
2724 case CEIL_DIV_EXPR:
2725 case FLOOR_DIV_EXPR:
2726 case ROUND_DIV_EXPR:
2727 case EXACT_DIV_EXPR:
2728 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2729 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2731 int l = tree_log2 (TREE_OPERAND (expr, 1));
2732 if (l >= 0)
2734 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2735 ret2 = l;
2736 if (ret1 > ret2)
2737 return ret1 - ret2;
2740 return 0;
2741 CASE_CONVERT:
2742 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2743 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2744 ret1 = prec;
2745 return MIN (ret1, prec);
2746 case SAVE_EXPR:
2747 return tree_ctz (TREE_OPERAND (expr, 0));
2748 case COND_EXPR:
2749 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2750 if (ret1 == 0)
2751 return 0;
2752 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2753 return MIN (ret1, ret2);
2754 case COMPOUND_EXPR:
2755 return tree_ctz (TREE_OPERAND (expr, 1));
2756 case ADDR_EXPR:
2757 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2758 if (ret1 > BITS_PER_UNIT)
2760 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2761 return MIN (ret1, prec);
2763 return 0;
2764 default:
2765 return 0;
2769 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2770 decimal float constants, so don't return 1 for them. */
2773 real_zerop (const_tree expr)
2775 switch (TREE_CODE (expr))
2777 case REAL_CST:
2778 return real_equal (&TREE_REAL_CST (expr), &dconst0)
2779 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2780 case COMPLEX_CST:
2781 return real_zerop (TREE_REALPART (expr))
2782 && real_zerop (TREE_IMAGPART (expr));
2783 case VECTOR_CST:
2785 /* Don't simply check for a duplicate because the predicate
2786 accepts both +0.0 and -0.0. */
2787 unsigned count = vector_cst_encoded_nelts (expr);
2788 for (unsigned int i = 0; i < count; ++i)
2789 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
2790 return false;
2791 return true;
2793 default:
2794 return false;
2798 /* Return 1 if EXPR is the real constant one in real or complex form.
2799 Trailing zeroes matter for decimal float constants, so don't return
2800 1 for them. */
2803 real_onep (const_tree expr)
2805 switch (TREE_CODE (expr))
2807 case REAL_CST:
2808 return real_equal (&TREE_REAL_CST (expr), &dconst1)
2809 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2810 case COMPLEX_CST:
2811 return real_onep (TREE_REALPART (expr))
2812 && real_zerop (TREE_IMAGPART (expr));
2813 case VECTOR_CST:
2814 return (VECTOR_CST_NPATTERNS (expr) == 1
2815 && VECTOR_CST_DUPLICATE_P (expr)
2816 && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2817 default:
2818 return false;
2822 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2823 matter for decimal float constants, so don't return 1 for them. */
2826 real_minus_onep (const_tree expr)
2828 switch (TREE_CODE (expr))
2830 case REAL_CST:
2831 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2832 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2833 case COMPLEX_CST:
2834 return real_minus_onep (TREE_REALPART (expr))
2835 && real_zerop (TREE_IMAGPART (expr));
2836 case VECTOR_CST:
2837 return (VECTOR_CST_NPATTERNS (expr) == 1
2838 && VECTOR_CST_DUPLICATE_P (expr)
2839 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2840 default:
2841 return false;
2845 /* Nonzero if EXP is a constant or a cast of a constant. */
2848 really_constant_p (const_tree exp)
2850 /* This is not quite the same as STRIP_NOPS. It does more. */
2851 while (CONVERT_EXPR_P (exp)
2852 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2853 exp = TREE_OPERAND (exp, 0);
2854 return TREE_CONSTANT (exp);
2857 /* Return true if T holds a polynomial pointer difference, storing it in
2858 *VALUE if so. A true return means that T's precision is no greater
2859 than 64 bits, which is the largest address space we support, so *VALUE
2860 never loses precision. However, the signedness of the result does
2861 not necessarily match the signedness of T: sometimes an unsigned type
2862 like sizetype is used to encode a value that is actually negative. */
2864 bool
2865 ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
2867 if (!t)
2868 return false;
2869 if (TREE_CODE (t) == INTEGER_CST)
2871 if (!cst_and_fits_in_hwi (t))
2872 return false;
2873 *value = int_cst_value (t);
2874 return true;
2876 if (POLY_INT_CST_P (t))
2878 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2879 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
2880 return false;
2881 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
2882 value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
2883 return true;
2885 return false;
2888 poly_int64
2889 tree_to_poly_int64 (const_tree t)
2891 gcc_assert (tree_fits_poly_int64_p (t));
2892 if (POLY_INT_CST_P (t))
2893 return poly_int_cst_value (t).force_shwi ();
2894 return TREE_INT_CST_LOW (t);
2897 poly_uint64
2898 tree_to_poly_uint64 (const_tree t)
2900 gcc_assert (tree_fits_poly_uint64_p (t));
2901 if (POLY_INT_CST_P (t))
2902 return poly_int_cst_value (t).force_uhwi ();
2903 return TREE_INT_CST_LOW (t);
2906 /* Return first list element whose TREE_VALUE is ELEM.
2907 Return 0 if ELEM is not in LIST. */
2909 tree
2910 value_member (tree elem, tree list)
2912 while (list)
2914 if (elem == TREE_VALUE (list))
2915 return list;
2916 list = TREE_CHAIN (list);
2918 return NULL_TREE;
2921 /* Return first list element whose TREE_PURPOSE is ELEM.
2922 Return 0 if ELEM is not in LIST. */
2924 tree
2925 purpose_member (const_tree elem, tree list)
2927 while (list)
2929 if (elem == TREE_PURPOSE (list))
2930 return list;
2931 list = TREE_CHAIN (list);
2933 return NULL_TREE;
2936 /* Return true if ELEM is in V. */
2938 bool
2939 vec_member (const_tree elem, vec<tree, va_gc> *v)
2941 unsigned ix;
2942 tree t;
2943 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2944 if (elem == t)
2945 return true;
2946 return false;
2949 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2950 NULL_TREE. */
2952 tree
2953 chain_index (int idx, tree chain)
2955 for (; chain && idx > 0; --idx)
2956 chain = TREE_CHAIN (chain);
2957 return chain;
2960 /* Return nonzero if ELEM is part of the chain CHAIN. */
2963 chain_member (const_tree elem, const_tree chain)
2965 while (chain)
2967 if (elem == chain)
2968 return 1;
2969 chain = DECL_CHAIN (chain);
2972 return 0;
2975 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2976 We expect a null pointer to mark the end of the chain.
2977 This is the Lisp primitive `length'. */
2980 list_length (const_tree t)
2982 const_tree p = t;
2983 #ifdef ENABLE_TREE_CHECKING
2984 const_tree q = t;
2985 #endif
2986 int len = 0;
2988 while (p)
2990 p = TREE_CHAIN (p);
2991 #ifdef ENABLE_TREE_CHECKING
2992 if (len % 2)
2993 q = TREE_CHAIN (q);
2994 gcc_assert (p != q);
2995 #endif
2996 len++;
2999 return len;
3002 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3003 UNION_TYPE TYPE, or NULL_TREE if none. */
3005 tree
3006 first_field (const_tree type)
3008 tree t = TYPE_FIELDS (type);
3009 while (t && TREE_CODE (t) != FIELD_DECL)
3010 t = TREE_CHAIN (t);
3011 return t;
3014 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3015 by modifying the last node in chain 1 to point to chain 2.
3016 This is the Lisp primitive `nconc'. */
3018 tree
3019 chainon (tree op1, tree op2)
3021 tree t1;
3023 if (!op1)
3024 return op2;
3025 if (!op2)
3026 return op1;
3028 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3029 continue;
3030 TREE_CHAIN (t1) = op2;
3032 #ifdef ENABLE_TREE_CHECKING
3034 tree t2;
3035 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3036 gcc_assert (t2 != t1);
3038 #endif
3040 return op1;
3043 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3045 tree
3046 tree_last (tree chain)
3048 tree next;
3049 if (chain)
3050 while ((next = TREE_CHAIN (chain)))
3051 chain = next;
3052 return chain;
3055 /* Reverse the order of elements in the chain T,
3056 and return the new head of the chain (old last element). */
3058 tree
3059 nreverse (tree t)
3061 tree prev = 0, decl, next;
3062 for (decl = t; decl; decl = next)
3064 /* We shouldn't be using this function to reverse BLOCK chains; we
3065 have blocks_nreverse for that. */
3066 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3067 next = TREE_CHAIN (decl);
3068 TREE_CHAIN (decl) = prev;
3069 prev = decl;
3071 return prev;
3074 /* Return a newly created TREE_LIST node whose
3075 purpose and value fields are PARM and VALUE. */
3077 tree
3078 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3080 tree t = make_node (TREE_LIST PASS_MEM_STAT);
3081 TREE_PURPOSE (t) = parm;
3082 TREE_VALUE (t) = value;
3083 return t;
3086 /* Build a chain of TREE_LIST nodes from a vector. */
3088 tree
3089 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3091 tree ret = NULL_TREE;
3092 tree *pp = &ret;
3093 unsigned int i;
3094 tree t;
3095 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3097 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3098 pp = &TREE_CHAIN (*pp);
3100 return ret;
3103 /* Return a newly created TREE_LIST node whose
3104 purpose and value fields are PURPOSE and VALUE
3105 and whose TREE_CHAIN is CHAIN. */
3107 tree
3108 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3110 tree node;
3112 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3113 memset (node, 0, sizeof (struct tree_common));
3115 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3117 TREE_SET_CODE (node, TREE_LIST);
3118 TREE_CHAIN (node) = chain;
3119 TREE_PURPOSE (node) = purpose;
3120 TREE_VALUE (node) = value;
3121 return node;
3124 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3125 trees. */
3127 vec<tree, va_gc> *
3128 ctor_to_vec (tree ctor)
3130 vec<tree, va_gc> *vec;
3131 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3132 unsigned int ix;
3133 tree val;
3135 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3136 vec->quick_push (val);
3138 return vec;
3141 /* Return the size nominally occupied by an object of type TYPE
3142 when it resides in memory. The value is measured in units of bytes,
3143 and its data type is that normally used for type sizes
3144 (which is the first type created by make_signed_type or
3145 make_unsigned_type). */
3147 tree
3148 size_in_bytes_loc (location_t loc, const_tree type)
3150 tree t;
3152 if (type == error_mark_node)
3153 return integer_zero_node;
3155 type = TYPE_MAIN_VARIANT (type);
3156 t = TYPE_SIZE_UNIT (type);
3158 if (t == 0)
3160 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3161 return size_zero_node;
3164 return t;
3167 /* Return the size of TYPE (in bytes) as a wide integer
3168 or return -1 if the size can vary or is larger than an integer. */
3170 HOST_WIDE_INT
3171 int_size_in_bytes (const_tree type)
3173 tree t;
3175 if (type == error_mark_node)
3176 return 0;
3178 type = TYPE_MAIN_VARIANT (type);
3179 t = TYPE_SIZE_UNIT (type);
3181 if (t && tree_fits_uhwi_p (t))
3182 return TREE_INT_CST_LOW (t);
3183 else
3184 return -1;
3187 /* Return the maximum size of TYPE (in bytes) as a wide integer
3188 or return -1 if the size can vary or is larger than an integer. */
3190 HOST_WIDE_INT
3191 max_int_size_in_bytes (const_tree type)
3193 HOST_WIDE_INT size = -1;
3194 tree size_tree;
3196 /* If this is an array type, check for a possible MAX_SIZE attached. */
3198 if (TREE_CODE (type) == ARRAY_TYPE)
3200 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3202 if (size_tree && tree_fits_uhwi_p (size_tree))
3203 size = tree_to_uhwi (size_tree);
3206 /* If we still haven't been able to get a size, see if the language
3207 can compute a maximum size. */
3209 if (size == -1)
3211 size_tree = lang_hooks.types.max_size (type);
3213 if (size_tree && tree_fits_uhwi_p (size_tree))
3214 size = tree_to_uhwi (size_tree);
3217 return size;
3220 /* Return the bit position of FIELD, in bits from the start of the record.
3221 This is a tree of type bitsizetype. */
3223 tree
3224 bit_position (const_tree field)
3226 return bit_from_pos (DECL_FIELD_OFFSET (field),
3227 DECL_FIELD_BIT_OFFSET (field));
3230 /* Return the byte position of FIELD, in bytes from the start of the record.
3231 This is a tree of type sizetype. */
3233 tree
3234 byte_position (const_tree field)
3236 return byte_from_pos (DECL_FIELD_OFFSET (field),
3237 DECL_FIELD_BIT_OFFSET (field));
3240 /* Likewise, but return as an integer. It must be representable in
3241 that way (since it could be a signed value, we don't have the
3242 option of returning -1 like int_size_in_byte can. */
3244 HOST_WIDE_INT
3245 int_byte_position (const_tree field)
3247 return tree_to_shwi (byte_position (field));
3250 /* Return the strictest alignment, in bits, that T is known to have. */
3252 unsigned int
3253 expr_align (const_tree t)
3255 unsigned int align0, align1;
3257 switch (TREE_CODE (t))
3259 CASE_CONVERT: case NON_LVALUE_EXPR:
3260 /* If we have conversions, we know that the alignment of the
3261 object must meet each of the alignments of the types. */
3262 align0 = expr_align (TREE_OPERAND (t, 0));
3263 align1 = TYPE_ALIGN (TREE_TYPE (t));
3264 return MAX (align0, align1);
3266 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
3267 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
3268 case CLEANUP_POINT_EXPR:
3269 /* These don't change the alignment of an object. */
3270 return expr_align (TREE_OPERAND (t, 0));
3272 case COND_EXPR:
3273 /* The best we can do is say that the alignment is the least aligned
3274 of the two arms. */
3275 align0 = expr_align (TREE_OPERAND (t, 1));
3276 align1 = expr_align (TREE_OPERAND (t, 2));
3277 return MIN (align0, align1);
3279 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3280 meaningfully, it's always 1. */
3281 case LABEL_DECL: case CONST_DECL:
3282 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3283 case FUNCTION_DECL:
3284 gcc_assert (DECL_ALIGN (t) != 0);
3285 return DECL_ALIGN (t);
3287 default:
3288 break;
3291 /* Otherwise take the alignment from that of the type. */
3292 return TYPE_ALIGN (TREE_TYPE (t));
3295 /* Return, as a tree node, the number of elements for TYPE (which is an
3296 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3298 tree
3299 array_type_nelts (const_tree type)
3301 tree index_type, min, max;
3303 /* If they did it with unspecified bounds, then we should have already
3304 given an error about it before we got here. */
3305 if (! TYPE_DOMAIN (type))
3306 return error_mark_node;
3308 index_type = TYPE_DOMAIN (type);
3309 min = TYPE_MIN_VALUE (index_type);
3310 max = TYPE_MAX_VALUE (index_type);
3312 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3313 if (!max)
3314 return error_mark_node;
3316 return (integer_zerop (min)
3317 ? max
3318 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3321 /* If arg is static -- a reference to an object in static storage -- then
3322 return the object. This is not the same as the C meaning of `static'.
3323 If arg isn't static, return NULL. */
3325 tree
3326 staticp (tree arg)
3328 switch (TREE_CODE (arg))
3330 case FUNCTION_DECL:
3331 /* Nested functions are static, even though taking their address will
3332 involve a trampoline as we unnest the nested function and create
3333 the trampoline on the tree level. */
3334 return arg;
3336 case VAR_DECL:
3337 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3338 && ! DECL_THREAD_LOCAL_P (arg)
3339 && ! DECL_DLLIMPORT_P (arg)
3340 ? arg : NULL);
3342 case CONST_DECL:
3343 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3344 ? arg : NULL);
3346 case CONSTRUCTOR:
3347 return TREE_STATIC (arg) ? arg : NULL;
3349 case LABEL_DECL:
3350 case STRING_CST:
3351 return arg;
3353 case COMPONENT_REF:
3354 /* If the thing being referenced is not a field, then it is
3355 something language specific. */
3356 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3358 /* If we are referencing a bitfield, we can't evaluate an
3359 ADDR_EXPR at compile time and so it isn't a constant. */
3360 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3361 return NULL;
3363 return staticp (TREE_OPERAND (arg, 0));
3365 case BIT_FIELD_REF:
3366 return NULL;
3368 case INDIRECT_REF:
3369 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3371 case ARRAY_REF:
3372 case ARRAY_RANGE_REF:
3373 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3374 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3375 return staticp (TREE_OPERAND (arg, 0));
3376 else
3377 return NULL;
3379 case COMPOUND_LITERAL_EXPR:
3380 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3382 default:
3383 return NULL;
3390 /* Return whether OP is a DECL whose address is function-invariant. */
3392 bool
3393 decl_address_invariant_p (const_tree op)
3395 /* The conditions below are slightly less strict than the one in
3396 staticp. */
3398 switch (TREE_CODE (op))
3400 case PARM_DECL:
3401 case RESULT_DECL:
3402 case LABEL_DECL:
3403 case FUNCTION_DECL:
3404 return true;
3406 case VAR_DECL:
3407 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3408 || DECL_THREAD_LOCAL_P (op)
3409 || DECL_CONTEXT (op) == current_function_decl
3410 || decl_function_context (op) == current_function_decl)
3411 return true;
3412 break;
3414 case CONST_DECL:
3415 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3416 || decl_function_context (op) == current_function_decl)
3417 return true;
3418 break;
3420 default:
3421 break;
3424 return false;
3427 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3429 bool
3430 decl_address_ip_invariant_p (const_tree op)
3432 /* The conditions below are slightly less strict than the one in
3433 staticp. */
3435 switch (TREE_CODE (op))
3437 case LABEL_DECL:
3438 case FUNCTION_DECL:
3439 case STRING_CST:
3440 return true;
3442 case VAR_DECL:
3443 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3444 && !DECL_DLLIMPORT_P (op))
3445 || DECL_THREAD_LOCAL_P (op))
3446 return true;
3447 break;
3449 case CONST_DECL:
3450 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3451 return true;
3452 break;
3454 default:
3455 break;
3458 return false;
3462 /* Return true if T is function-invariant (internal function, does
3463 not handle arithmetic; that's handled in skip_simple_arithmetic and
3464 tree_invariant_p). */
3466 static bool
3467 tree_invariant_p_1 (tree t)
3469 tree op;
3471 if (TREE_CONSTANT (t)
3472 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3473 return true;
3475 switch (TREE_CODE (t))
3477 case SAVE_EXPR:
3478 return true;
3480 case ADDR_EXPR:
3481 op = TREE_OPERAND (t, 0);
3482 while (handled_component_p (op))
3484 switch (TREE_CODE (op))
3486 case ARRAY_REF:
3487 case ARRAY_RANGE_REF:
3488 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3489 || TREE_OPERAND (op, 2) != NULL_TREE
3490 || TREE_OPERAND (op, 3) != NULL_TREE)
3491 return false;
3492 break;
3494 case COMPONENT_REF:
3495 if (TREE_OPERAND (op, 2) != NULL_TREE)
3496 return false;
3497 break;
3499 default:;
3501 op = TREE_OPERAND (op, 0);
3504 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3506 default:
3507 break;
3510 return false;
3513 /* Return true if T is function-invariant. */
3515 bool
3516 tree_invariant_p (tree t)
3518 tree inner = skip_simple_arithmetic (t);
3519 return tree_invariant_p_1 (inner);
3522 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3523 Do this to any expression which may be used in more than one place,
3524 but must be evaluated only once.
3526 Normally, expand_expr would reevaluate the expression each time.
3527 Calling save_expr produces something that is evaluated and recorded
3528 the first time expand_expr is called on it. Subsequent calls to
3529 expand_expr just reuse the recorded value.
3531 The call to expand_expr that generates code that actually computes
3532 the value is the first call *at compile time*. Subsequent calls
3533 *at compile time* generate code to use the saved value.
3534 This produces correct result provided that *at run time* control
3535 always flows through the insns made by the first expand_expr
3536 before reaching the other places where the save_expr was evaluated.
3537 You, the caller of save_expr, must make sure this is so.
3539 Constants, and certain read-only nodes, are returned with no
3540 SAVE_EXPR because that is safe. Expressions containing placeholders
3541 are not touched; see tree.def for an explanation of what these
3542 are used for. */
3544 tree
3545 save_expr (tree expr)
3547 tree inner;
3549 /* If the tree evaluates to a constant, then we don't want to hide that
3550 fact (i.e. this allows further folding, and direct checks for constants).
3551 However, a read-only object that has side effects cannot be bypassed.
3552 Since it is no problem to reevaluate literals, we just return the
3553 literal node. */
3554 inner = skip_simple_arithmetic (expr);
3555 if (TREE_CODE (inner) == ERROR_MARK)
3556 return inner;
3558 if (tree_invariant_p_1 (inner))
3559 return expr;
3561 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3562 it means that the size or offset of some field of an object depends on
3563 the value within another field.
3565 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3566 and some variable since it would then need to be both evaluated once and
3567 evaluated more than once. Front-ends must assure this case cannot
3568 happen by surrounding any such subexpressions in their own SAVE_EXPR
3569 and forcing evaluation at the proper time. */
3570 if (contains_placeholder_p (inner))
3571 return expr;
3573 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3575 /* This expression might be placed ahead of a jump to ensure that the
3576 value was computed on both sides of the jump. So make sure it isn't
3577 eliminated as dead. */
3578 TREE_SIDE_EFFECTS (expr) = 1;
3579 return expr;
3582 /* Look inside EXPR into any simple arithmetic operations. Return the
3583 outermost non-arithmetic or non-invariant node. */
3585 tree
3586 skip_simple_arithmetic (tree expr)
3588 /* We don't care about whether this can be used as an lvalue in this
3589 context. */
3590 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3591 expr = TREE_OPERAND (expr, 0);
3593 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3594 a constant, it will be more efficient to not make another SAVE_EXPR since
3595 it will allow better simplification and GCSE will be able to merge the
3596 computations if they actually occur. */
3597 while (true)
3599 if (UNARY_CLASS_P (expr))
3600 expr = TREE_OPERAND (expr, 0);
3601 else if (BINARY_CLASS_P (expr))
3603 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3604 expr = TREE_OPERAND (expr, 0);
3605 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3606 expr = TREE_OPERAND (expr, 1);
3607 else
3608 break;
3610 else
3611 break;
3614 return expr;
3617 /* Look inside EXPR into simple arithmetic operations involving constants.
3618 Return the outermost non-arithmetic or non-constant node. */
3620 tree
3621 skip_simple_constant_arithmetic (tree expr)
3623 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3624 expr = TREE_OPERAND (expr, 0);
3626 while (true)
3628 if (UNARY_CLASS_P (expr))
3629 expr = TREE_OPERAND (expr, 0);
3630 else if (BINARY_CLASS_P (expr))
3632 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3633 expr = TREE_OPERAND (expr, 0);
3634 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3635 expr = TREE_OPERAND (expr, 1);
3636 else
3637 break;
3639 else
3640 break;
3643 return expr;
3646 /* Return which tree structure is used by T. */
3648 enum tree_node_structure_enum
3649 tree_node_structure (const_tree t)
3651 const enum tree_code code = TREE_CODE (t);
3652 return tree_node_structure_for_code (code);
3655 /* Set various status flags when building a CALL_EXPR object T. */
3657 static void
3658 process_call_operands (tree t)
3660 bool side_effects = TREE_SIDE_EFFECTS (t);
3661 bool read_only = false;
3662 int i = call_expr_flags (t);
3664 /* Calls have side-effects, except those to const or pure functions. */
3665 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3666 side_effects = true;
3667 /* Propagate TREE_READONLY of arguments for const functions. */
3668 if (i & ECF_CONST)
3669 read_only = true;
3671 if (!side_effects || read_only)
3672 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3674 tree op = TREE_OPERAND (t, i);
3675 if (op && TREE_SIDE_EFFECTS (op))
3676 side_effects = true;
3677 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3678 read_only = false;
3681 TREE_SIDE_EFFECTS (t) = side_effects;
3682 TREE_READONLY (t) = read_only;
3685 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3686 size or offset that depends on a field within a record. */
3688 bool
3689 contains_placeholder_p (const_tree exp)
3691 enum tree_code code;
3693 if (!exp)
3694 return 0;
3696 code = TREE_CODE (exp);
3697 if (code == PLACEHOLDER_EXPR)
3698 return 1;
3700 switch (TREE_CODE_CLASS (code))
3702 case tcc_reference:
3703 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3704 position computations since they will be converted into a
3705 WITH_RECORD_EXPR involving the reference, which will assume
3706 here will be valid. */
3707 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3709 case tcc_exceptional:
3710 if (code == TREE_LIST)
3711 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3712 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3713 break;
3715 case tcc_unary:
3716 case tcc_binary:
3717 case tcc_comparison:
3718 case tcc_expression:
3719 switch (code)
3721 case COMPOUND_EXPR:
3722 /* Ignoring the first operand isn't quite right, but works best. */
3723 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3725 case COND_EXPR:
3726 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3727 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3728 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3730 case SAVE_EXPR:
3731 /* The save_expr function never wraps anything containing
3732 a PLACEHOLDER_EXPR. */
3733 return 0;
3735 default:
3736 break;
3739 switch (TREE_CODE_LENGTH (code))
3741 case 1:
3742 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3743 case 2:
3744 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3745 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3746 default:
3747 return 0;
3750 case tcc_vl_exp:
3751 switch (code)
3753 case CALL_EXPR:
3755 const_tree arg;
3756 const_call_expr_arg_iterator iter;
3757 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3758 if (CONTAINS_PLACEHOLDER_P (arg))
3759 return 1;
3760 return 0;
3762 default:
3763 return 0;
3766 default:
3767 return 0;
3769 return 0;
3772 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3773 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3774 field positions. */
3776 static bool
3777 type_contains_placeholder_1 (const_tree type)
3779 /* If the size contains a placeholder or the parent type (component type in
3780 the case of arrays) type involves a placeholder, this type does. */
3781 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3782 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3783 || (!POINTER_TYPE_P (type)
3784 && TREE_TYPE (type)
3785 && type_contains_placeholder_p (TREE_TYPE (type))))
3786 return true;
3788 /* Now do type-specific checks. Note that the last part of the check above
3789 greatly limits what we have to do below. */
3790 switch (TREE_CODE (type))
3792 case VOID_TYPE:
3793 case POINTER_BOUNDS_TYPE:
3794 case COMPLEX_TYPE:
3795 case ENUMERAL_TYPE:
3796 case BOOLEAN_TYPE:
3797 case POINTER_TYPE:
3798 case OFFSET_TYPE:
3799 case REFERENCE_TYPE:
3800 case METHOD_TYPE:
3801 case FUNCTION_TYPE:
3802 case VECTOR_TYPE:
3803 case NULLPTR_TYPE:
3804 return false;
3806 case INTEGER_TYPE:
3807 case REAL_TYPE:
3808 case FIXED_POINT_TYPE:
3809 /* Here we just check the bounds. */
3810 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3811 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3813 case ARRAY_TYPE:
3814 /* We have already checked the component type above, so just check
3815 the domain type. Flexible array members have a null domain. */
3816 return TYPE_DOMAIN (type) ?
3817 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
3819 case RECORD_TYPE:
3820 case UNION_TYPE:
3821 case QUAL_UNION_TYPE:
3823 tree field;
3825 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3826 if (TREE_CODE (field) == FIELD_DECL
3827 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3828 || (TREE_CODE (type) == QUAL_UNION_TYPE
3829 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3830 || type_contains_placeholder_p (TREE_TYPE (field))))
3831 return true;
3833 return false;
3836 default:
3837 gcc_unreachable ();
3841 /* Wrapper around above function used to cache its result. */
3843 bool
3844 type_contains_placeholder_p (tree type)
3846 bool result;
3848 /* If the contains_placeholder_bits field has been initialized,
3849 then we know the answer. */
3850 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3851 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3853 /* Indicate that we've seen this type node, and the answer is false.
3854 This is what we want to return if we run into recursion via fields. */
3855 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3857 /* Compute the real value. */
3858 result = type_contains_placeholder_1 (type);
3860 /* Store the real value. */
3861 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3863 return result;
3866 /* Push tree EXP onto vector QUEUE if it is not already present. */
3868 static void
3869 push_without_duplicates (tree exp, vec<tree> *queue)
3871 unsigned int i;
3872 tree iter;
3874 FOR_EACH_VEC_ELT (*queue, i, iter)
3875 if (simple_cst_equal (iter, exp) == 1)
3876 break;
3878 if (!iter)
3879 queue->safe_push (exp);
3882 /* Given a tree EXP, find all occurrences of references to fields
3883 in a PLACEHOLDER_EXPR and place them in vector REFS without
3884 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3885 we assume here that EXP contains only arithmetic expressions
3886 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3887 argument list. */
3889 void
3890 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3892 enum tree_code code = TREE_CODE (exp);
3893 tree inner;
3894 int i;
3896 /* We handle TREE_LIST and COMPONENT_REF separately. */
3897 if (code == TREE_LIST)
3899 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3900 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3902 else if (code == COMPONENT_REF)
3904 for (inner = TREE_OPERAND (exp, 0);
3905 REFERENCE_CLASS_P (inner);
3906 inner = TREE_OPERAND (inner, 0))
3909 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3910 push_without_duplicates (exp, refs);
3911 else
3912 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3914 else
3915 switch (TREE_CODE_CLASS (code))
3917 case tcc_constant:
3918 break;
3920 case tcc_declaration:
3921 /* Variables allocated to static storage can stay. */
3922 if (!TREE_STATIC (exp))
3923 push_without_duplicates (exp, refs);
3924 break;
3926 case tcc_expression:
3927 /* This is the pattern built in ada/make_aligning_type. */
3928 if (code == ADDR_EXPR
3929 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3931 push_without_duplicates (exp, refs);
3932 break;
3935 /* Fall through. */
3937 case tcc_exceptional:
3938 case tcc_unary:
3939 case tcc_binary:
3940 case tcc_comparison:
3941 case tcc_reference:
3942 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3943 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3944 break;
3946 case tcc_vl_exp:
3947 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3948 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3949 break;
3951 default:
3952 gcc_unreachable ();
3956 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3957 return a tree with all occurrences of references to F in a
3958 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3959 CONST_DECLs. Note that we assume here that EXP contains only
3960 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3961 occurring only in their argument list. */
3963 tree
3964 substitute_in_expr (tree exp, tree f, tree r)
3966 enum tree_code code = TREE_CODE (exp);
3967 tree op0, op1, op2, op3;
3968 tree new_tree;
3970 /* We handle TREE_LIST and COMPONENT_REF separately. */
3971 if (code == TREE_LIST)
3973 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3974 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3975 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3976 return exp;
3978 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3980 else if (code == COMPONENT_REF)
3982 tree inner;
3984 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3985 and it is the right field, replace it with R. */
3986 for (inner = TREE_OPERAND (exp, 0);
3987 REFERENCE_CLASS_P (inner);
3988 inner = TREE_OPERAND (inner, 0))
3991 /* The field. */
3992 op1 = TREE_OPERAND (exp, 1);
3994 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3995 return r;
3997 /* If this expression hasn't been completed let, leave it alone. */
3998 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3999 return exp;
4001 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4002 if (op0 == TREE_OPERAND (exp, 0))
4003 return exp;
4005 new_tree
4006 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4008 else
4009 switch (TREE_CODE_CLASS (code))
4011 case tcc_constant:
4012 return exp;
4014 case tcc_declaration:
4015 if (exp == f)
4016 return r;
4017 else
4018 return exp;
4020 case tcc_expression:
4021 if (exp == f)
4022 return r;
4024 /* Fall through. */
4026 case tcc_exceptional:
4027 case tcc_unary:
4028 case tcc_binary:
4029 case tcc_comparison:
4030 case tcc_reference:
4031 switch (TREE_CODE_LENGTH (code))
4033 case 0:
4034 return exp;
4036 case 1:
4037 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4038 if (op0 == TREE_OPERAND (exp, 0))
4039 return exp;
4041 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4042 break;
4044 case 2:
4045 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4046 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4048 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4049 return exp;
4051 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4052 break;
4054 case 3:
4055 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4056 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4057 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4059 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4060 && op2 == TREE_OPERAND (exp, 2))
4061 return exp;
4063 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4064 break;
4066 case 4:
4067 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4068 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4069 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4070 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4072 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4073 && op2 == TREE_OPERAND (exp, 2)
4074 && op3 == TREE_OPERAND (exp, 3))
4075 return exp;
4077 new_tree
4078 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4079 break;
4081 default:
4082 gcc_unreachable ();
4084 break;
4086 case tcc_vl_exp:
4088 int i;
4090 new_tree = NULL_TREE;
4092 /* If we are trying to replace F with a constant or with another
4093 instance of one of the arguments of the call, inline back
4094 functions which do nothing else than computing a value from
4095 the arguments they are passed. This makes it possible to
4096 fold partially or entirely the replacement expression. */
4097 if (code == CALL_EXPR)
4099 bool maybe_inline = false;
4100 if (CONSTANT_CLASS_P (r))
4101 maybe_inline = true;
4102 else
4103 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4104 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4106 maybe_inline = true;
4107 break;
4109 if (maybe_inline)
4111 tree t = maybe_inline_call_in_expr (exp);
4112 if (t)
4113 return SUBSTITUTE_IN_EXPR (t, f, r);
4117 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4119 tree op = TREE_OPERAND (exp, i);
4120 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4121 if (new_op != op)
4123 if (!new_tree)
4124 new_tree = copy_node (exp);
4125 TREE_OPERAND (new_tree, i) = new_op;
4129 if (new_tree)
4131 new_tree = fold (new_tree);
4132 if (TREE_CODE (new_tree) == CALL_EXPR)
4133 process_call_operands (new_tree);
4135 else
4136 return exp;
4138 break;
4140 default:
4141 gcc_unreachable ();
4144 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4146 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4147 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4149 return new_tree;
4152 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4153 for it within OBJ, a tree that is an object or a chain of references. */
4155 tree
4156 substitute_placeholder_in_expr (tree exp, tree obj)
4158 enum tree_code code = TREE_CODE (exp);
4159 tree op0, op1, op2, op3;
4160 tree new_tree;
4162 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4163 in the chain of OBJ. */
4164 if (code == PLACEHOLDER_EXPR)
4166 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4167 tree elt;
4169 for (elt = obj; elt != 0;
4170 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4171 || TREE_CODE (elt) == COND_EXPR)
4172 ? TREE_OPERAND (elt, 1)
4173 : (REFERENCE_CLASS_P (elt)
4174 || UNARY_CLASS_P (elt)
4175 || BINARY_CLASS_P (elt)
4176 || VL_EXP_CLASS_P (elt)
4177 || EXPRESSION_CLASS_P (elt))
4178 ? TREE_OPERAND (elt, 0) : 0))
4179 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4180 return elt;
4182 for (elt = obj; elt != 0;
4183 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4184 || TREE_CODE (elt) == COND_EXPR)
4185 ? TREE_OPERAND (elt, 1)
4186 : (REFERENCE_CLASS_P (elt)
4187 || UNARY_CLASS_P (elt)
4188 || BINARY_CLASS_P (elt)
4189 || VL_EXP_CLASS_P (elt)
4190 || EXPRESSION_CLASS_P (elt))
4191 ? TREE_OPERAND (elt, 0) : 0))
4192 if (POINTER_TYPE_P (TREE_TYPE (elt))
4193 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4194 == need_type))
4195 return fold_build1 (INDIRECT_REF, need_type, elt);
4197 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4198 survives until RTL generation, there will be an error. */
4199 return exp;
4202 /* TREE_LIST is special because we need to look at TREE_VALUE
4203 and TREE_CHAIN, not TREE_OPERANDS. */
4204 else if (code == TREE_LIST)
4206 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4207 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4208 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4209 return exp;
4211 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4213 else
4214 switch (TREE_CODE_CLASS (code))
4216 case tcc_constant:
4217 case tcc_declaration:
4218 return exp;
4220 case tcc_exceptional:
4221 case tcc_unary:
4222 case tcc_binary:
4223 case tcc_comparison:
4224 case tcc_expression:
4225 case tcc_reference:
4226 case tcc_statement:
4227 switch (TREE_CODE_LENGTH (code))
4229 case 0:
4230 return exp;
4232 case 1:
4233 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4234 if (op0 == TREE_OPERAND (exp, 0))
4235 return exp;
4237 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4238 break;
4240 case 2:
4241 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4242 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4244 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4245 return exp;
4247 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4248 break;
4250 case 3:
4251 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4252 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4253 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4255 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4256 && op2 == TREE_OPERAND (exp, 2))
4257 return exp;
4259 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4260 break;
4262 case 4:
4263 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4264 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4265 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4266 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4268 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4269 && op2 == TREE_OPERAND (exp, 2)
4270 && op3 == TREE_OPERAND (exp, 3))
4271 return exp;
4273 new_tree
4274 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4275 break;
4277 default:
4278 gcc_unreachable ();
4280 break;
4282 case tcc_vl_exp:
4284 int i;
4286 new_tree = NULL_TREE;
4288 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4290 tree op = TREE_OPERAND (exp, i);
4291 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4292 if (new_op != op)
4294 if (!new_tree)
4295 new_tree = copy_node (exp);
4296 TREE_OPERAND (new_tree, i) = new_op;
4300 if (new_tree)
4302 new_tree = fold (new_tree);
4303 if (TREE_CODE (new_tree) == CALL_EXPR)
4304 process_call_operands (new_tree);
4306 else
4307 return exp;
4309 break;
4311 default:
4312 gcc_unreachable ();
4315 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4317 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4318 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4320 return new_tree;
4324 /* Subroutine of stabilize_reference; this is called for subtrees of
4325 references. Any expression with side-effects must be put in a SAVE_EXPR
4326 to ensure that it is only evaluated once.
4328 We don't put SAVE_EXPR nodes around everything, because assigning very
4329 simple expressions to temporaries causes us to miss good opportunities
4330 for optimizations. Among other things, the opportunity to fold in the
4331 addition of a constant into an addressing mode often gets lost, e.g.
4332 "y[i+1] += x;". In general, we take the approach that we should not make
4333 an assignment unless we are forced into it - i.e., that any non-side effect
4334 operator should be allowed, and that cse should take care of coalescing
4335 multiple utterances of the same expression should that prove fruitful. */
4337 static tree
4338 stabilize_reference_1 (tree e)
4340 tree result;
4341 enum tree_code code = TREE_CODE (e);
4343 /* We cannot ignore const expressions because it might be a reference
4344 to a const array but whose index contains side-effects. But we can
4345 ignore things that are actual constant or that already have been
4346 handled by this function. */
4348 if (tree_invariant_p (e))
4349 return e;
4351 switch (TREE_CODE_CLASS (code))
4353 case tcc_exceptional:
4354 case tcc_type:
4355 case tcc_declaration:
4356 case tcc_comparison:
4357 case tcc_statement:
4358 case tcc_expression:
4359 case tcc_reference:
4360 case tcc_vl_exp:
4361 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4362 so that it will only be evaluated once. */
4363 /* The reference (r) and comparison (<) classes could be handled as
4364 below, but it is generally faster to only evaluate them once. */
4365 if (TREE_SIDE_EFFECTS (e))
4366 return save_expr (e);
4367 return e;
4369 case tcc_constant:
4370 /* Constants need no processing. In fact, we should never reach
4371 here. */
4372 return e;
4374 case tcc_binary:
4375 /* Division is slow and tends to be compiled with jumps,
4376 especially the division by powers of 2 that is often
4377 found inside of an array reference. So do it just once. */
4378 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4379 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4380 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4381 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4382 return save_expr (e);
4383 /* Recursively stabilize each operand. */
4384 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4385 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4386 break;
4388 case tcc_unary:
4389 /* Recursively stabilize each operand. */
4390 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4391 break;
4393 default:
4394 gcc_unreachable ();
4397 TREE_TYPE (result) = TREE_TYPE (e);
4398 TREE_READONLY (result) = TREE_READONLY (e);
4399 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4400 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4402 return result;
4405 /* Stabilize a reference so that we can use it any number of times
4406 without causing its operands to be evaluated more than once.
4407 Returns the stabilized reference. This works by means of save_expr,
4408 so see the caveats in the comments about save_expr.
4410 Also allows conversion expressions whose operands are references.
4411 Any other kind of expression is returned unchanged. */
4413 tree
4414 stabilize_reference (tree ref)
4416 tree result;
4417 enum tree_code code = TREE_CODE (ref);
4419 switch (code)
4421 case VAR_DECL:
4422 case PARM_DECL:
4423 case RESULT_DECL:
4424 /* No action is needed in this case. */
4425 return ref;
4427 CASE_CONVERT:
4428 case FLOAT_EXPR:
4429 case FIX_TRUNC_EXPR:
4430 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4431 break;
4433 case INDIRECT_REF:
4434 result = build_nt (INDIRECT_REF,
4435 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4436 break;
4438 case COMPONENT_REF:
4439 result = build_nt (COMPONENT_REF,
4440 stabilize_reference (TREE_OPERAND (ref, 0)),
4441 TREE_OPERAND (ref, 1), NULL_TREE);
4442 break;
4444 case BIT_FIELD_REF:
4445 result = build_nt (BIT_FIELD_REF,
4446 stabilize_reference (TREE_OPERAND (ref, 0)),
4447 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4448 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4449 break;
4451 case ARRAY_REF:
4452 result = build_nt (ARRAY_REF,
4453 stabilize_reference (TREE_OPERAND (ref, 0)),
4454 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4455 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4456 break;
4458 case ARRAY_RANGE_REF:
4459 result = build_nt (ARRAY_RANGE_REF,
4460 stabilize_reference (TREE_OPERAND (ref, 0)),
4461 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4462 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4463 break;
4465 case COMPOUND_EXPR:
4466 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4467 it wouldn't be ignored. This matters when dealing with
4468 volatiles. */
4469 return stabilize_reference_1 (ref);
4471 /* If arg isn't a kind of lvalue we recognize, make no change.
4472 Caller should recognize the error for an invalid lvalue. */
4473 default:
4474 return ref;
4476 case ERROR_MARK:
4477 return error_mark_node;
4480 TREE_TYPE (result) = TREE_TYPE (ref);
4481 TREE_READONLY (result) = TREE_READONLY (ref);
4482 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4483 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4485 return result;
4488 /* Low-level constructors for expressions. */
4490 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4491 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4493 void
4494 recompute_tree_invariant_for_addr_expr (tree t)
4496 tree node;
4497 bool tc = true, se = false;
4499 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4501 /* We started out assuming this address is both invariant and constant, but
4502 does not have side effects. Now go down any handled components and see if
4503 any of them involve offsets that are either non-constant or non-invariant.
4504 Also check for side-effects.
4506 ??? Note that this code makes no attempt to deal with the case where
4507 taking the address of something causes a copy due to misalignment. */
4509 #define UPDATE_FLAGS(NODE) \
4510 do { tree _node = (NODE); \
4511 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4512 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4514 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4515 node = TREE_OPERAND (node, 0))
4517 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4518 array reference (probably made temporarily by the G++ front end),
4519 so ignore all the operands. */
4520 if ((TREE_CODE (node) == ARRAY_REF
4521 || TREE_CODE (node) == ARRAY_RANGE_REF)
4522 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4524 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4525 if (TREE_OPERAND (node, 2))
4526 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4527 if (TREE_OPERAND (node, 3))
4528 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4530 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4531 FIELD_DECL, apparently. The G++ front end can put something else
4532 there, at least temporarily. */
4533 else if (TREE_CODE (node) == COMPONENT_REF
4534 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4536 if (TREE_OPERAND (node, 2))
4537 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4541 node = lang_hooks.expr_to_decl (node, &tc, &se);
4543 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4544 the address, since &(*a)->b is a form of addition. If it's a constant, the
4545 address is constant too. If it's a decl, its address is constant if the
4546 decl is static. Everything else is not constant and, furthermore,
4547 taking the address of a volatile variable is not volatile. */
4548 if (TREE_CODE (node) == INDIRECT_REF
4549 || TREE_CODE (node) == MEM_REF)
4550 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4551 else if (CONSTANT_CLASS_P (node))
4553 else if (DECL_P (node))
4554 tc &= (staticp (node) != NULL_TREE);
4555 else
4557 tc = false;
4558 se |= TREE_SIDE_EFFECTS (node);
4562 TREE_CONSTANT (t) = tc;
4563 TREE_SIDE_EFFECTS (t) = se;
4564 #undef UPDATE_FLAGS
4567 /* Build an expression of code CODE, data type TYPE, and operands as
4568 specified. Expressions and reference nodes can be created this way.
4569 Constants, decls, types and misc nodes cannot be.
4571 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4572 enough for all extant tree codes. */
4574 tree
4575 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4577 tree t;
4579 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4581 t = make_node (code PASS_MEM_STAT);
4582 TREE_TYPE (t) = tt;
4584 return t;
4587 tree
4588 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4590 int length = sizeof (struct tree_exp);
4591 tree t;
4593 record_node_allocation_statistics (code, length);
4595 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4597 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4599 memset (t, 0, sizeof (struct tree_common));
4601 TREE_SET_CODE (t, code);
4603 TREE_TYPE (t) = type;
4604 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4605 TREE_OPERAND (t, 0) = node;
4606 if (node && !TYPE_P (node))
4608 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4609 TREE_READONLY (t) = TREE_READONLY (node);
4612 if (TREE_CODE_CLASS (code) == tcc_statement)
4614 if (code != DEBUG_BEGIN_STMT)
4615 TREE_SIDE_EFFECTS (t) = 1;
4617 else switch (code)
4619 case VA_ARG_EXPR:
4620 /* All of these have side-effects, no matter what their
4621 operands are. */
4622 TREE_SIDE_EFFECTS (t) = 1;
4623 TREE_READONLY (t) = 0;
4624 break;
4626 case INDIRECT_REF:
4627 /* Whether a dereference is readonly has nothing to do with whether
4628 its operand is readonly. */
4629 TREE_READONLY (t) = 0;
4630 break;
4632 case ADDR_EXPR:
4633 if (node)
4634 recompute_tree_invariant_for_addr_expr (t);
4635 break;
4637 default:
4638 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4639 && node && !TYPE_P (node)
4640 && TREE_CONSTANT (node))
4641 TREE_CONSTANT (t) = 1;
4642 if (TREE_CODE_CLASS (code) == tcc_reference
4643 && node && TREE_THIS_VOLATILE (node))
4644 TREE_THIS_VOLATILE (t) = 1;
4645 break;
4648 return t;
4651 #define PROCESS_ARG(N) \
4652 do { \
4653 TREE_OPERAND (t, N) = arg##N; \
4654 if (arg##N &&!TYPE_P (arg##N)) \
4656 if (TREE_SIDE_EFFECTS (arg##N)) \
4657 side_effects = 1; \
4658 if (!TREE_READONLY (arg##N) \
4659 && !CONSTANT_CLASS_P (arg##N)) \
4660 (void) (read_only = 0); \
4661 if (!TREE_CONSTANT (arg##N)) \
4662 (void) (constant = 0); \
4664 } while (0)
4666 tree
4667 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4669 bool constant, read_only, side_effects, div_by_zero;
4670 tree t;
4672 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4674 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4675 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4676 /* When sizetype precision doesn't match that of pointers
4677 we need to be able to build explicit extensions or truncations
4678 of the offset argument. */
4679 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4680 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4681 && TREE_CODE (arg1) == INTEGER_CST);
4683 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4684 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4685 && ptrofftype_p (TREE_TYPE (arg1)));
4687 t = make_node (code PASS_MEM_STAT);
4688 TREE_TYPE (t) = tt;
4690 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4691 result based on those same flags for the arguments. But if the
4692 arguments aren't really even `tree' expressions, we shouldn't be trying
4693 to do this. */
4695 /* Expressions without side effects may be constant if their
4696 arguments are as well. */
4697 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4698 || TREE_CODE_CLASS (code) == tcc_binary);
4699 read_only = 1;
4700 side_effects = TREE_SIDE_EFFECTS (t);
4702 switch (code)
4704 case TRUNC_DIV_EXPR:
4705 case CEIL_DIV_EXPR:
4706 case FLOOR_DIV_EXPR:
4707 case ROUND_DIV_EXPR:
4708 case EXACT_DIV_EXPR:
4709 case CEIL_MOD_EXPR:
4710 case FLOOR_MOD_EXPR:
4711 case ROUND_MOD_EXPR:
4712 case TRUNC_MOD_EXPR:
4713 div_by_zero = integer_zerop (arg1);
4714 break;
4715 default:
4716 div_by_zero = false;
4719 PROCESS_ARG (0);
4720 PROCESS_ARG (1);
4722 TREE_SIDE_EFFECTS (t) = side_effects;
4723 if (code == MEM_REF)
4725 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4727 tree o = TREE_OPERAND (arg0, 0);
4728 TREE_READONLY (t) = TREE_READONLY (o);
4729 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4732 else
4734 TREE_READONLY (t) = read_only;
4735 /* Don't mark X / 0 as constant. */
4736 TREE_CONSTANT (t) = constant && !div_by_zero;
4737 TREE_THIS_VOLATILE (t)
4738 = (TREE_CODE_CLASS (code) == tcc_reference
4739 && arg0 && TREE_THIS_VOLATILE (arg0));
4742 return t;
4746 tree
4747 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
4748 tree arg2 MEM_STAT_DECL)
4750 bool constant, read_only, side_effects;
4751 tree t;
4753 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4754 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4756 t = make_node (code PASS_MEM_STAT);
4757 TREE_TYPE (t) = tt;
4759 read_only = 1;
4761 /* As a special exception, if COND_EXPR has NULL branches, we
4762 assume that it is a gimple statement and always consider
4763 it to have side effects. */
4764 if (code == COND_EXPR
4765 && tt == void_type_node
4766 && arg1 == NULL_TREE
4767 && arg2 == NULL_TREE)
4768 side_effects = true;
4769 else
4770 side_effects = TREE_SIDE_EFFECTS (t);
4772 PROCESS_ARG (0);
4773 PROCESS_ARG (1);
4774 PROCESS_ARG (2);
4776 if (code == COND_EXPR)
4777 TREE_READONLY (t) = read_only;
4779 TREE_SIDE_EFFECTS (t) = side_effects;
4780 TREE_THIS_VOLATILE (t)
4781 = (TREE_CODE_CLASS (code) == tcc_reference
4782 && arg0 && TREE_THIS_VOLATILE (arg0));
4784 return t;
4787 tree
4788 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
4789 tree arg2, tree arg3 MEM_STAT_DECL)
4791 bool constant, read_only, side_effects;
4792 tree t;
4794 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4796 t = make_node (code PASS_MEM_STAT);
4797 TREE_TYPE (t) = tt;
4799 side_effects = TREE_SIDE_EFFECTS (t);
4801 PROCESS_ARG (0);
4802 PROCESS_ARG (1);
4803 PROCESS_ARG (2);
4804 PROCESS_ARG (3);
4806 TREE_SIDE_EFFECTS (t) = side_effects;
4807 TREE_THIS_VOLATILE (t)
4808 = (TREE_CODE_CLASS (code) == tcc_reference
4809 && arg0 && TREE_THIS_VOLATILE (arg0));
4811 return t;
4814 tree
4815 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
4816 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4818 bool constant, read_only, side_effects;
4819 tree t;
4821 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4823 t = make_node (code PASS_MEM_STAT);
4824 TREE_TYPE (t) = tt;
4826 side_effects = TREE_SIDE_EFFECTS (t);
4828 PROCESS_ARG (0);
4829 PROCESS_ARG (1);
4830 PROCESS_ARG (2);
4831 PROCESS_ARG (3);
4832 PROCESS_ARG (4);
4834 TREE_SIDE_EFFECTS (t) = side_effects;
4835 if (code == TARGET_MEM_REF)
4837 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4839 tree o = TREE_OPERAND (arg0, 0);
4840 TREE_READONLY (t) = TREE_READONLY (o);
4841 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4844 else
4845 TREE_THIS_VOLATILE (t)
4846 = (TREE_CODE_CLASS (code) == tcc_reference
4847 && arg0 && TREE_THIS_VOLATILE (arg0));
4849 return t;
4852 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4853 on the pointer PTR. */
4855 tree
4856 build_simple_mem_ref_loc (location_t loc, tree ptr)
4858 poly_int64 offset = 0;
4859 tree ptype = TREE_TYPE (ptr);
4860 tree tem;
4861 /* For convenience allow addresses that collapse to a simple base
4862 and offset. */
4863 if (TREE_CODE (ptr) == ADDR_EXPR
4864 && (handled_component_p (TREE_OPERAND (ptr, 0))
4865 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4867 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4868 gcc_assert (ptr);
4869 if (TREE_CODE (ptr) == MEM_REF)
4871 offset += mem_ref_offset (ptr).force_shwi ();
4872 ptr = TREE_OPERAND (ptr, 0);
4874 else
4875 ptr = build_fold_addr_expr (ptr);
4876 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4878 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4879 ptr, build_int_cst (ptype, offset));
4880 SET_EXPR_LOCATION (tem, loc);
4881 return tem;
4884 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4886 poly_offset_int
4887 mem_ref_offset (const_tree t)
4889 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
4890 SIGNED);
4893 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4894 offsetted by OFFSET units. */
4896 tree
4897 build_invariant_address (tree type, tree base, poly_int64 offset)
4899 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4900 build_fold_addr_expr (base),
4901 build_int_cst (ptr_type_node, offset));
4902 tree addr = build1 (ADDR_EXPR, type, ref);
4903 recompute_tree_invariant_for_addr_expr (addr);
4904 return addr;
4907 /* Similar except don't specify the TREE_TYPE
4908 and leave the TREE_SIDE_EFFECTS as 0.
4909 It is permissible for arguments to be null,
4910 or even garbage if their values do not matter. */
4912 tree
4913 build_nt (enum tree_code code, ...)
4915 tree t;
4916 int length;
4917 int i;
4918 va_list p;
4920 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4922 va_start (p, code);
4924 t = make_node (code);
4925 length = TREE_CODE_LENGTH (code);
4927 for (i = 0; i < length; i++)
4928 TREE_OPERAND (t, i) = va_arg (p, tree);
4930 va_end (p);
4931 return t;
4934 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4935 tree vec. */
4937 tree
4938 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4940 tree ret, t;
4941 unsigned int ix;
4943 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4944 CALL_EXPR_FN (ret) = fn;
4945 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4946 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4947 CALL_EXPR_ARG (ret, ix) = t;
4948 return ret;
4951 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4952 We do NOT enter this node in any sort of symbol table.
4954 LOC is the location of the decl.
4956 layout_decl is used to set up the decl's storage layout.
4957 Other slots are initialized to 0 or null pointers. */
4959 tree
4960 build_decl (location_t loc, enum tree_code code, tree name,
4961 tree type MEM_STAT_DECL)
4963 tree t;
4965 t = make_node (code PASS_MEM_STAT);
4966 DECL_SOURCE_LOCATION (t) = loc;
4968 /* if (type == error_mark_node)
4969 type = integer_type_node; */
4970 /* That is not done, deliberately, so that having error_mark_node
4971 as the type can suppress useless errors in the use of this variable. */
4973 DECL_NAME (t) = name;
4974 TREE_TYPE (t) = type;
4976 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4977 layout_decl (t, 0);
4979 return t;
4982 /* Builds and returns function declaration with NAME and TYPE. */
4984 tree
4985 build_fn_decl (const char *name, tree type)
4987 tree id = get_identifier (name);
4988 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4990 DECL_EXTERNAL (decl) = 1;
4991 TREE_PUBLIC (decl) = 1;
4992 DECL_ARTIFICIAL (decl) = 1;
4993 TREE_NOTHROW (decl) = 1;
4995 return decl;
4998 vec<tree, va_gc> *all_translation_units;
5000 /* Builds a new translation-unit decl with name NAME, queues it in the
5001 global list of translation-unit decls and returns it. */
5003 tree
5004 build_translation_unit_decl (tree name)
5006 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5007 name, NULL_TREE);
5008 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5009 vec_safe_push (all_translation_units, tu);
5010 return tu;
5014 /* BLOCK nodes are used to represent the structure of binding contours
5015 and declarations, once those contours have been exited and their contents
5016 compiled. This information is used for outputting debugging info. */
5018 tree
5019 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5021 tree block = make_node (BLOCK);
5023 BLOCK_VARS (block) = vars;
5024 BLOCK_SUBBLOCKS (block) = subblocks;
5025 BLOCK_SUPERCONTEXT (block) = supercontext;
5026 BLOCK_CHAIN (block) = chain;
5027 return block;
5031 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5033 LOC is the location to use in tree T. */
5035 void
5036 protected_set_expr_location (tree t, location_t loc)
5038 if (CAN_HAVE_LOCATION_P (t))
5039 SET_EXPR_LOCATION (t, loc);
5042 /* Reset the expression *EXPR_P, a size or position.
5044 ??? We could reset all non-constant sizes or positions. But it's cheap
5045 enough to not do so and refrain from adding workarounds to dwarf2out.c.
5047 We need to reset self-referential sizes or positions because they cannot
5048 be gimplified and thus can contain a CALL_EXPR after the gimplification
5049 is finished, which will run afoul of LTO streaming. And they need to be
5050 reset to something essentially dummy but not constant, so as to preserve
5051 the properties of the object they are attached to. */
5053 static inline void
5054 free_lang_data_in_one_sizepos (tree *expr_p)
5056 tree expr = *expr_p;
5057 if (CONTAINS_PLACEHOLDER_P (expr))
5058 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5062 /* Reset all the fields in a binfo node BINFO. We only keep
5063 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
5065 static void
5066 free_lang_data_in_binfo (tree binfo)
5068 unsigned i;
5069 tree t;
5071 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5073 BINFO_VIRTUALS (binfo) = NULL_TREE;
5074 BINFO_BASE_ACCESSES (binfo) = NULL;
5075 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5076 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5078 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5079 free_lang_data_in_binfo (t);
5083 /* Reset all language specific information still present in TYPE. */
5085 static void
5086 free_lang_data_in_type (tree type)
5088 gcc_assert (TYPE_P (type));
5090 /* Give the FE a chance to remove its own data first. */
5091 lang_hooks.free_lang_data (type);
5093 TREE_LANG_FLAG_0 (type) = 0;
5094 TREE_LANG_FLAG_1 (type) = 0;
5095 TREE_LANG_FLAG_2 (type) = 0;
5096 TREE_LANG_FLAG_3 (type) = 0;
5097 TREE_LANG_FLAG_4 (type) = 0;
5098 TREE_LANG_FLAG_5 (type) = 0;
5099 TREE_LANG_FLAG_6 (type) = 0;
5101 if (TREE_CODE (type) == FUNCTION_TYPE)
5103 /* Remove the const and volatile qualifiers from arguments. The
5104 C++ front end removes them, but the C front end does not,
5105 leading to false ODR violation errors when merging two
5106 instances of the same function signature compiled by
5107 different front ends. */
5108 for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5110 tree arg_type = TREE_VALUE (p);
5112 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5114 int quals = TYPE_QUALS (arg_type)
5115 & ~TYPE_QUAL_CONST
5116 & ~TYPE_QUAL_VOLATILE;
5117 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5118 free_lang_data_in_type (TREE_VALUE (p));
5120 /* C++ FE uses TREE_PURPOSE to store initial values. */
5121 TREE_PURPOSE (p) = NULL;
5124 else if (TREE_CODE (type) == METHOD_TYPE)
5125 for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5126 /* C++ FE uses TREE_PURPOSE to store initial values. */
5127 TREE_PURPOSE (p) = NULL;
5128 else if (RECORD_OR_UNION_TYPE_P (type))
5130 /* Remove members that are not FIELD_DECLs (and maybe
5131 TYPE_DECLs) from the field list of an aggregate. These occur
5132 in C++. */
5133 for (tree *prev = &TYPE_FIELDS (type), member; (member = *prev);)
5134 if (TREE_CODE (member) == FIELD_DECL
5135 || (TREE_CODE (member) == TYPE_DECL
5136 && !DECL_IGNORED_P (member)
5137 && debug_info_level > DINFO_LEVEL_TERSE
5138 && !is_redundant_typedef (member)))
5139 prev = &DECL_CHAIN (member);
5140 else
5141 *prev = DECL_CHAIN (member);
5143 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
5144 and danagle the pointer from time to time. */
5145 if (TYPE_VFIELD (type) && TREE_CODE (TYPE_VFIELD (type)) != FIELD_DECL)
5146 TYPE_VFIELD (type) = NULL_TREE;
5148 if (TYPE_BINFO (type))
5150 free_lang_data_in_binfo (TYPE_BINFO (type));
5151 /* We need to preserve link to bases and virtual table for all
5152 polymorphic types to make devirtualization machinery working.
5153 Debug output cares only about bases, but output also
5154 virtual table pointers so merging of -fdevirtualize and
5155 -fno-devirtualize units is easier. */
5156 if ((!BINFO_VTABLE (TYPE_BINFO (type))
5157 || !flag_devirtualize)
5158 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
5159 && !BINFO_VTABLE (TYPE_BINFO (type)))
5160 || debug_info_level != DINFO_LEVEL_NONE))
5161 TYPE_BINFO (type) = NULL;
5164 else if (INTEGRAL_TYPE_P (type)
5165 || SCALAR_FLOAT_TYPE_P (type)
5166 || FIXED_POINT_TYPE_P (type))
5168 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5169 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5172 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5174 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5175 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5177 if (TYPE_CONTEXT (type)
5178 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5180 tree ctx = TYPE_CONTEXT (type);
5183 ctx = BLOCK_SUPERCONTEXT (ctx);
5185 while (ctx && TREE_CODE (ctx) == BLOCK);
5186 TYPE_CONTEXT (type) = ctx;
5191 /* Return true if DECL may need an assembler name to be set. */
5193 static inline bool
5194 need_assembler_name_p (tree decl)
5196 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5197 Rule merging. This makes type_odr_p to return true on those types during
5198 LTO and by comparing the mangled name, we can say what types are intended
5199 to be equivalent across compilation unit.
5201 We do not store names of type_in_anonymous_namespace_p.
5203 Record, union and enumeration type have linkage that allows use
5204 to check type_in_anonymous_namespace_p. We do not mangle compound types
5205 that always can be compared structurally.
5207 Similarly for builtin types, we compare properties of their main variant.
5208 A special case are integer types where mangling do make differences
5209 between char/signed char/unsigned char etc. Storing name for these makes
5210 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5211 See cp/mangle.c:write_builtin_type for details. */
5213 if (flag_lto_odr_type_mering
5214 && TREE_CODE (decl) == TYPE_DECL
5215 && DECL_NAME (decl)
5216 && decl == TYPE_NAME (TREE_TYPE (decl))
5217 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
5218 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5219 && (type_with_linkage_p (TREE_TYPE (decl))
5220 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5221 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5222 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5223 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5224 if (!VAR_OR_FUNCTION_DECL_P (decl))
5225 return false;
5227 /* If DECL already has its assembler name set, it does not need a
5228 new one. */
5229 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5230 || DECL_ASSEMBLER_NAME_SET_P (decl))
5231 return false;
5233 /* Abstract decls do not need an assembler name. */
5234 if (DECL_ABSTRACT_P (decl))
5235 return false;
5237 /* For VAR_DECLs, only static, public and external symbols need an
5238 assembler name. */
5239 if (VAR_P (decl)
5240 && !TREE_STATIC (decl)
5241 && !TREE_PUBLIC (decl)
5242 && !DECL_EXTERNAL (decl))
5243 return false;
5245 if (TREE_CODE (decl) == FUNCTION_DECL)
5247 /* Do not set assembler name on builtins. Allow RTL expansion to
5248 decide whether to expand inline or via a regular call. */
5249 if (DECL_BUILT_IN (decl)
5250 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5251 return false;
5253 /* Functions represented in the callgraph need an assembler name. */
5254 if (cgraph_node::get (decl) != NULL)
5255 return true;
5257 /* Unused and not public functions don't need an assembler name. */
5258 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5259 return false;
5262 return true;
5266 /* Reset all language specific information still present in symbol
5267 DECL. */
5269 static void
5270 free_lang_data_in_decl (tree decl)
5272 gcc_assert (DECL_P (decl));
5274 /* Give the FE a chance to remove its own data first. */
5275 lang_hooks.free_lang_data (decl);
5277 TREE_LANG_FLAG_0 (decl) = 0;
5278 TREE_LANG_FLAG_1 (decl) = 0;
5279 TREE_LANG_FLAG_2 (decl) = 0;
5280 TREE_LANG_FLAG_3 (decl) = 0;
5281 TREE_LANG_FLAG_4 (decl) = 0;
5282 TREE_LANG_FLAG_5 (decl) = 0;
5283 TREE_LANG_FLAG_6 (decl) = 0;
5285 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5286 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5287 if (TREE_CODE (decl) == FIELD_DECL)
5289 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5290 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5291 DECL_QUALIFIER (decl) = NULL_TREE;
5294 if (TREE_CODE (decl) == FUNCTION_DECL)
5296 struct cgraph_node *node;
5297 if (!(node = cgraph_node::get (decl))
5298 || (!node->definition && !node->clones))
5300 if (node)
5301 node->release_body ();
5302 else
5304 release_function_body (decl);
5305 DECL_ARGUMENTS (decl) = NULL;
5306 DECL_RESULT (decl) = NULL;
5307 DECL_INITIAL (decl) = error_mark_node;
5310 if (gimple_has_body_p (decl) || (node && node->thunk.thunk_p))
5312 tree t;
5314 /* If DECL has a gimple body, then the context for its
5315 arguments must be DECL. Otherwise, it doesn't really
5316 matter, as we will not be emitting any code for DECL. In
5317 general, there may be other instances of DECL created by
5318 the front end and since PARM_DECLs are generally shared,
5319 their DECL_CONTEXT changes as the replicas of DECL are
5320 created. The only time where DECL_CONTEXT is important
5321 is for the FUNCTION_DECLs that have a gimple body (since
5322 the PARM_DECL will be used in the function's body). */
5323 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5324 DECL_CONTEXT (t) = decl;
5325 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5326 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5327 = target_option_default_node;
5328 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5329 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5330 = optimization_default_node;
5333 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5334 At this point, it is not needed anymore. */
5335 DECL_SAVED_TREE (decl) = NULL_TREE;
5337 /* Clear the abstract origin if it refers to a method.
5338 Otherwise dwarf2out.c will ICE as we splice functions out of
5339 TYPE_FIELDS and thus the origin will not be output
5340 correctly. */
5341 if (DECL_ABSTRACT_ORIGIN (decl)
5342 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5343 && RECORD_OR_UNION_TYPE_P
5344 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5345 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5347 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5348 DECL_VINDEX referring to itself into a vtable slot number as it
5349 should. Happens with functions that are copied and then forgotten
5350 about. Just clear it, it won't matter anymore. */
5351 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5352 DECL_VINDEX (decl) = NULL_TREE;
5354 else if (VAR_P (decl))
5356 if ((DECL_EXTERNAL (decl)
5357 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5358 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5359 DECL_INITIAL (decl) = NULL_TREE;
5361 else if (TREE_CODE (decl) == TYPE_DECL)
5363 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5364 DECL_VISIBILITY_SPECIFIED (decl) = 0;
5365 DECL_INITIAL (decl) = NULL_TREE;
5367 else if (TREE_CODE (decl) == FIELD_DECL)
5368 DECL_INITIAL (decl) = NULL_TREE;
5369 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5370 && DECL_INITIAL (decl)
5371 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5373 /* Strip builtins from the translation-unit BLOCK. We still have targets
5374 without builtin_decl_explicit support and also builtins are shared
5375 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5376 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5377 while (*nextp)
5379 tree var = *nextp;
5380 if (TREE_CODE (var) == FUNCTION_DECL
5381 && DECL_BUILT_IN (var))
5382 *nextp = TREE_CHAIN (var);
5383 else
5384 nextp = &TREE_CHAIN (var);
5390 /* Data used when collecting DECLs and TYPEs for language data removal. */
5392 struct free_lang_data_d
5394 free_lang_data_d () : decls (100), types (100) {}
5396 /* Worklist to avoid excessive recursion. */
5397 auto_vec<tree> worklist;
5399 /* Set of traversed objects. Used to avoid duplicate visits. */
5400 hash_set<tree> pset;
5402 /* Array of symbols to process with free_lang_data_in_decl. */
5403 auto_vec<tree> decls;
5405 /* Array of types to process with free_lang_data_in_type. */
5406 auto_vec<tree> types;
5410 /* Save all language fields needed to generate proper debug information
5411 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5413 static void
5414 save_debug_info_for_decl (tree t)
5416 /*struct saved_debug_info_d *sdi;*/
5418 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5420 /* FIXME. Partial implementation for saving debug info removed. */
5424 /* Save all language fields needed to generate proper debug information
5425 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5427 static void
5428 save_debug_info_for_type (tree t)
5430 /*struct saved_debug_info_d *sdi;*/
5432 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5434 /* FIXME. Partial implementation for saving debug info removed. */
5438 /* Add type or decl T to one of the list of tree nodes that need their
5439 language data removed. The lists are held inside FLD. */
5441 static void
5442 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5444 if (DECL_P (t))
5446 fld->decls.safe_push (t);
5447 if (debug_info_level > DINFO_LEVEL_TERSE)
5448 save_debug_info_for_decl (t);
5450 else if (TYPE_P (t))
5452 fld->types.safe_push (t);
5453 if (debug_info_level > DINFO_LEVEL_TERSE)
5454 save_debug_info_for_type (t);
5456 else
5457 gcc_unreachable ();
5460 /* Push tree node T into FLD->WORKLIST. */
5462 static inline void
5463 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5465 if (t && !is_lang_specific (t) && !fld->pset.contains (t))
5466 fld->worklist.safe_push ((t));
5470 /* Operand callback helper for free_lang_data_in_node. *TP is the
5471 subtree operand being considered. */
5473 static tree
5474 find_decls_types_r (tree *tp, int *ws, void *data)
5476 tree t = *tp;
5477 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5479 if (TREE_CODE (t) == TREE_LIST)
5480 return NULL_TREE;
5482 /* Language specific nodes will be removed, so there is no need
5483 to gather anything under them. */
5484 if (is_lang_specific (t))
5486 *ws = 0;
5487 return NULL_TREE;
5490 if (DECL_P (t))
5492 /* Note that walk_tree does not traverse every possible field in
5493 decls, so we have to do our own traversals here. */
5494 add_tree_to_fld_list (t, fld);
5496 fld_worklist_push (DECL_NAME (t), fld);
5497 fld_worklist_push (DECL_CONTEXT (t), fld);
5498 fld_worklist_push (DECL_SIZE (t), fld);
5499 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5501 /* We are going to remove everything under DECL_INITIAL for
5502 TYPE_DECLs. No point walking them. */
5503 if (TREE_CODE (t) != TYPE_DECL)
5504 fld_worklist_push (DECL_INITIAL (t), fld);
5506 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5507 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5509 if (TREE_CODE (t) == FUNCTION_DECL)
5511 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5512 fld_worklist_push (DECL_RESULT (t), fld);
5514 else if (TREE_CODE (t) == TYPE_DECL)
5516 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5518 else if (TREE_CODE (t) == FIELD_DECL)
5520 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5521 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5522 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5523 fld_worklist_push (DECL_FCONTEXT (t), fld);
5526 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL)
5527 && DECL_HAS_VALUE_EXPR_P (t))
5528 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5530 if (TREE_CODE (t) != FIELD_DECL
5531 && TREE_CODE (t) != TYPE_DECL)
5532 fld_worklist_push (TREE_CHAIN (t), fld);
5533 *ws = 0;
5535 else if (TYPE_P (t))
5537 /* Note that walk_tree does not traverse every possible field in
5538 types, so we have to do our own traversals here. */
5539 add_tree_to_fld_list (t, fld);
5541 if (!RECORD_OR_UNION_TYPE_P (t))
5542 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5543 fld_worklist_push (TYPE_SIZE (t), fld);
5544 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5545 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5546 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5547 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5548 fld_worklist_push (TYPE_NAME (t), fld);
5549 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5550 them and thus do not and want not to reach unused pointer types
5551 this way. */
5552 if (!POINTER_TYPE_P (t))
5553 fld_worklist_push (TYPE_MIN_VALUE_RAW (t), fld);
5554 /* TYPE_MAX_VALUE_RAW is TYPE_BINFO for record types. */
5555 if (!RECORD_OR_UNION_TYPE_P (t))
5556 fld_worklist_push (TYPE_MAX_VALUE_RAW (t), fld);
5557 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5558 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5559 do not and want not to reach unused variants this way. */
5560 if (TYPE_CONTEXT (t))
5562 tree ctx = TYPE_CONTEXT (t);
5563 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5564 So push that instead. */
5565 while (ctx && TREE_CODE (ctx) == BLOCK)
5566 ctx = BLOCK_SUPERCONTEXT (ctx);
5567 fld_worklist_push (ctx, fld);
5569 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5570 and want not to reach unused types this way. */
5572 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5574 unsigned i;
5575 tree tem;
5576 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5577 fld_worklist_push (TREE_TYPE (tem), fld);
5578 fld_worklist_push (BINFO_VIRTUALS (TYPE_BINFO (t)), fld);
5580 if (RECORD_OR_UNION_TYPE_P (t))
5582 tree tem;
5583 /* Push all TYPE_FIELDS - there can be interleaving interesting
5584 and non-interesting things. */
5585 tem = TYPE_FIELDS (t);
5586 while (tem)
5588 if (TREE_CODE (tem) == FIELD_DECL
5589 || (TREE_CODE (tem) == TYPE_DECL
5590 && !DECL_IGNORED_P (tem)
5591 && debug_info_level > DINFO_LEVEL_TERSE
5592 && !is_redundant_typedef (tem)))
5593 fld_worklist_push (tem, fld);
5594 tem = TREE_CHAIN (tem);
5598 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5599 *ws = 0;
5601 else if (TREE_CODE (t) == BLOCK)
5603 tree tem;
5604 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5605 fld_worklist_push (tem, fld);
5606 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5607 fld_worklist_push (tem, fld);
5608 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5611 if (TREE_CODE (t) != IDENTIFIER_NODE
5612 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5613 fld_worklist_push (TREE_TYPE (t), fld);
5615 return NULL_TREE;
5619 /* Find decls and types in T. */
5621 static void
5622 find_decls_types (tree t, struct free_lang_data_d *fld)
5624 while (1)
5626 if (!fld->pset.contains (t))
5627 walk_tree (&t, find_decls_types_r, fld, &fld->pset);
5628 if (fld->worklist.is_empty ())
5629 break;
5630 t = fld->worklist.pop ();
5634 /* Translate all the types in LIST with the corresponding runtime
5635 types. */
5637 static tree
5638 get_eh_types_for_runtime (tree list)
5640 tree head, prev;
5642 if (list == NULL_TREE)
5643 return NULL_TREE;
5645 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5646 prev = head;
5647 list = TREE_CHAIN (list);
5648 while (list)
5650 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5651 TREE_CHAIN (prev) = n;
5652 prev = TREE_CHAIN (prev);
5653 list = TREE_CHAIN (list);
5656 return head;
5660 /* Find decls and types referenced in EH region R and store them in
5661 FLD->DECLS and FLD->TYPES. */
5663 static void
5664 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5666 switch (r->type)
5668 case ERT_CLEANUP:
5669 break;
5671 case ERT_TRY:
5673 eh_catch c;
5675 /* The types referenced in each catch must first be changed to the
5676 EH types used at runtime. This removes references to FE types
5677 in the region. */
5678 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5680 c->type_list = get_eh_types_for_runtime (c->type_list);
5681 walk_tree (&c->type_list, find_decls_types_r, fld, &fld->pset);
5684 break;
5686 case ERT_ALLOWED_EXCEPTIONS:
5687 r->u.allowed.type_list
5688 = get_eh_types_for_runtime (r->u.allowed.type_list);
5689 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, &fld->pset);
5690 break;
5692 case ERT_MUST_NOT_THROW:
5693 walk_tree (&r->u.must_not_throw.failure_decl,
5694 find_decls_types_r, fld, &fld->pset);
5695 break;
5700 /* Find decls and types referenced in cgraph node N and store them in
5701 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5702 look for *every* kind of DECL and TYPE node reachable from N,
5703 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5704 NAMESPACE_DECLs, etc). */
5706 static void
5707 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5709 basic_block bb;
5710 struct function *fn;
5711 unsigned ix;
5712 tree t;
5714 find_decls_types (n->decl, fld);
5716 if (!gimple_has_body_p (n->decl))
5717 return;
5719 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5721 fn = DECL_STRUCT_FUNCTION (n->decl);
5723 /* Traverse locals. */
5724 FOR_EACH_LOCAL_DECL (fn, ix, t)
5725 find_decls_types (t, fld);
5727 /* Traverse EH regions in FN. */
5729 eh_region r;
5730 FOR_ALL_EH_REGION_FN (r, fn)
5731 find_decls_types_in_eh_region (r, fld);
5734 /* Traverse every statement in FN. */
5735 FOR_EACH_BB_FN (bb, fn)
5737 gphi_iterator psi;
5738 gimple_stmt_iterator si;
5739 unsigned i;
5741 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5743 gphi *phi = psi.phi ();
5745 for (i = 0; i < gimple_phi_num_args (phi); i++)
5747 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5748 find_decls_types (*arg_p, fld);
5752 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5754 gimple *stmt = gsi_stmt (si);
5756 if (is_gimple_call (stmt))
5757 find_decls_types (gimple_call_fntype (stmt), fld);
5759 for (i = 0; i < gimple_num_ops (stmt); i++)
5761 tree arg = gimple_op (stmt, i);
5762 find_decls_types (arg, fld);
5769 /* Find decls and types referenced in varpool node N and store them in
5770 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5771 look for *every* kind of DECL and TYPE node reachable from N,
5772 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5773 NAMESPACE_DECLs, etc). */
5775 static void
5776 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5778 find_decls_types (v->decl, fld);
5781 /* If T needs an assembler name, have one created for it. */
5783 void
5784 assign_assembler_name_if_needed (tree t)
5786 if (need_assembler_name_p (t))
5788 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5789 diagnostics that use input_location to show locus
5790 information. The problem here is that, at this point,
5791 input_location is generally anchored to the end of the file
5792 (since the parser is long gone), so we don't have a good
5793 position to pin it to.
5795 To alleviate this problem, this uses the location of T's
5796 declaration. Examples of this are
5797 testsuite/g++.dg/template/cond2.C and
5798 testsuite/g++.dg/template/pr35240.C. */
5799 location_t saved_location = input_location;
5800 input_location = DECL_SOURCE_LOCATION (t);
5802 decl_assembler_name (t);
5804 input_location = saved_location;
5809 /* Free language specific information for every operand and expression
5810 in every node of the call graph. This process operates in three stages:
5812 1- Every callgraph node and varpool node is traversed looking for
5813 decls and types embedded in them. This is a more exhaustive
5814 search than that done by find_referenced_vars, because it will
5815 also collect individual fields, decls embedded in types, etc.
5817 2- All the decls found are sent to free_lang_data_in_decl.
5819 3- All the types found are sent to free_lang_data_in_type.
5821 The ordering between decls and types is important because
5822 free_lang_data_in_decl sets assembler names, which includes
5823 mangling. So types cannot be freed up until assembler names have
5824 been set up. */
5826 static void
5827 free_lang_data_in_cgraph (void)
5829 struct cgraph_node *n;
5830 varpool_node *v;
5831 struct free_lang_data_d fld;
5832 tree t;
5833 unsigned i;
5834 alias_pair *p;
5836 /* Find decls and types in the body of every function in the callgraph. */
5837 FOR_EACH_FUNCTION (n)
5838 find_decls_types_in_node (n, &fld);
5840 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5841 find_decls_types (p->decl, &fld);
5843 /* Find decls and types in every varpool symbol. */
5844 FOR_EACH_VARIABLE (v)
5845 find_decls_types_in_var (v, &fld);
5847 /* Set the assembler name on every decl found. We need to do this
5848 now because free_lang_data_in_decl will invalidate data needed
5849 for mangling. This breaks mangling on interdependent decls. */
5850 FOR_EACH_VEC_ELT (fld.decls, i, t)
5851 assign_assembler_name_if_needed (t);
5853 /* Traverse every decl found freeing its language data. */
5854 FOR_EACH_VEC_ELT (fld.decls, i, t)
5855 free_lang_data_in_decl (t);
5857 /* Traverse every type found freeing its language data. */
5858 FOR_EACH_VEC_ELT (fld.types, i, t)
5859 free_lang_data_in_type (t);
5860 if (flag_checking)
5862 FOR_EACH_VEC_ELT (fld.types, i, t)
5863 verify_type (t);
5868 /* Free resources that are used by FE but are not needed once they are done. */
5870 static unsigned
5871 free_lang_data (void)
5873 unsigned i;
5875 /* If we are the LTO frontend we have freed lang-specific data already. */
5876 if (in_lto_p
5877 || (!flag_generate_lto && !flag_generate_offload))
5878 return 0;
5880 /* Provide a dummy TRANSLATION_UNIT_DECL if the FE failed to provide one. */
5881 if (vec_safe_is_empty (all_translation_units))
5882 build_translation_unit_decl (NULL_TREE);
5884 /* Allocate and assign alias sets to the standard integer types
5885 while the slots are still in the way the frontends generated them. */
5886 for (i = 0; i < itk_none; ++i)
5887 if (integer_types[i])
5888 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5890 /* Traverse the IL resetting language specific information for
5891 operands, expressions, etc. */
5892 free_lang_data_in_cgraph ();
5894 /* Create gimple variants for common types. */
5895 for (unsigned i = 0;
5896 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
5897 ++i)
5898 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
5900 /* Reset some langhooks. Do not reset types_compatible_p, it may
5901 still be used indirectly via the get_alias_set langhook. */
5902 lang_hooks.dwarf_name = lhd_dwarf_name;
5903 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5904 lang_hooks.gimplify_expr = lhd_gimplify_expr;
5906 /* We do not want the default decl_assembler_name implementation,
5907 rather if we have fixed everything we want a wrapper around it
5908 asserting that all non-local symbols already got their assembler
5909 name and only produce assembler names for local symbols. Or rather
5910 make sure we never call decl_assembler_name on local symbols and
5911 devise a separate, middle-end private scheme for it. */
5913 /* Reset diagnostic machinery. */
5914 tree_diagnostics_defaults (global_dc);
5916 return 0;
5920 namespace {
5922 const pass_data pass_data_ipa_free_lang_data =
5924 SIMPLE_IPA_PASS, /* type */
5925 "*free_lang_data", /* name */
5926 OPTGROUP_NONE, /* optinfo_flags */
5927 TV_IPA_FREE_LANG_DATA, /* tv_id */
5928 0, /* properties_required */
5929 0, /* properties_provided */
5930 0, /* properties_destroyed */
5931 0, /* todo_flags_start */
5932 0, /* todo_flags_finish */
5935 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5937 public:
5938 pass_ipa_free_lang_data (gcc::context *ctxt)
5939 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5942 /* opt_pass methods: */
5943 virtual unsigned int execute (function *) { return free_lang_data (); }
5945 }; // class pass_ipa_free_lang_data
5947 } // anon namespace
5949 simple_ipa_opt_pass *
5950 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5952 return new pass_ipa_free_lang_data (ctxt);
5955 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5956 of the various TYPE_QUAL values. */
5958 static void
5959 set_type_quals (tree type, int type_quals)
5961 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
5962 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
5963 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
5964 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
5965 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
5968 /* Returns true iff CAND and BASE have equivalent language-specific
5969 qualifiers. */
5971 bool
5972 check_lang_type (const_tree cand, const_tree base)
5974 if (lang_hooks.types.type_hash_eq == NULL)
5975 return true;
5976 /* type_hash_eq currently only applies to these types. */
5977 if (TREE_CODE (cand) != FUNCTION_TYPE
5978 && TREE_CODE (cand) != METHOD_TYPE)
5979 return true;
5980 return lang_hooks.types.type_hash_eq (cand, base);
5983 /* Returns true iff unqualified CAND and BASE are equivalent. */
5985 bool
5986 check_base_type (const_tree cand, const_tree base)
5988 return (TYPE_NAME (cand) == TYPE_NAME (base)
5989 /* Apparently this is needed for Objective-C. */
5990 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5991 /* Check alignment. */
5992 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
5993 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5994 TYPE_ATTRIBUTES (base)));
5997 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5999 bool
6000 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6002 return (TYPE_QUALS (cand) == type_quals
6003 && check_base_type (cand, base)
6004 && check_lang_type (cand, base));
6007 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6009 static bool
6010 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6012 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6013 && TYPE_NAME (cand) == TYPE_NAME (base)
6014 /* Apparently this is needed for Objective-C. */
6015 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6016 /* Check alignment. */
6017 && TYPE_ALIGN (cand) == align
6018 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6019 TYPE_ATTRIBUTES (base))
6020 && check_lang_type (cand, base));
6023 /* This function checks to see if TYPE matches the size one of the built-in
6024 atomic types, and returns that core atomic type. */
6026 static tree
6027 find_atomic_core_type (tree type)
6029 tree base_atomic_type;
6031 /* Only handle complete types. */
6032 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
6033 return NULL_TREE;
6035 switch (tree_to_uhwi (TYPE_SIZE (type)))
6037 case 8:
6038 base_atomic_type = atomicQI_type_node;
6039 break;
6041 case 16:
6042 base_atomic_type = atomicHI_type_node;
6043 break;
6045 case 32:
6046 base_atomic_type = atomicSI_type_node;
6047 break;
6049 case 64:
6050 base_atomic_type = atomicDI_type_node;
6051 break;
6053 case 128:
6054 base_atomic_type = atomicTI_type_node;
6055 break;
6057 default:
6058 base_atomic_type = NULL_TREE;
6061 return base_atomic_type;
6064 /* Return a version of the TYPE, qualified as indicated by the
6065 TYPE_QUALS, if one exists. If no qualified version exists yet,
6066 return NULL_TREE. */
6068 tree
6069 get_qualified_type (tree type, int type_quals)
6071 tree t;
6073 if (TYPE_QUALS (type) == type_quals)
6074 return type;
6076 /* Search the chain of variants to see if there is already one there just
6077 like the one we need to have. If so, use that existing one. We must
6078 preserve the TYPE_NAME, since there is code that depends on this. */
6079 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6080 if (check_qualified_type (t, type, type_quals))
6081 return t;
6083 return NULL_TREE;
6086 /* Like get_qualified_type, but creates the type if it does not
6087 exist. This function never returns NULL_TREE. */
6089 tree
6090 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
6092 tree t;
6094 /* See if we already have the appropriate qualified variant. */
6095 t = get_qualified_type (type, type_quals);
6097 /* If not, build it. */
6098 if (!t)
6100 t = build_variant_type_copy (type PASS_MEM_STAT);
6101 set_type_quals (t, type_quals);
6103 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6105 /* See if this object can map to a basic atomic type. */
6106 tree atomic_type = find_atomic_core_type (type);
6107 if (atomic_type)
6109 /* Ensure the alignment of this type is compatible with
6110 the required alignment of the atomic type. */
6111 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6112 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
6116 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6117 /* Propagate structural equality. */
6118 SET_TYPE_STRUCTURAL_EQUALITY (t);
6119 else if (TYPE_CANONICAL (type) != type)
6120 /* Build the underlying canonical type, since it is different
6121 from TYPE. */
6123 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6124 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6126 else
6127 /* T is its own canonical type. */
6128 TYPE_CANONICAL (t) = t;
6132 return t;
6135 /* Create a variant of type T with alignment ALIGN. */
6137 tree
6138 build_aligned_type (tree type, unsigned int align)
6140 tree t;
6142 if (TYPE_PACKED (type)
6143 || TYPE_ALIGN (type) == align)
6144 return type;
6146 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6147 if (check_aligned_type (t, type, align))
6148 return t;
6150 t = build_variant_type_copy (type);
6151 SET_TYPE_ALIGN (t, align);
6152 TYPE_USER_ALIGN (t) = 1;
6154 return t;
6157 /* Create a new distinct copy of TYPE. The new type is made its own
6158 MAIN_VARIANT. If TYPE requires structural equality checks, the
6159 resulting type requires structural equality checks; otherwise, its
6160 TYPE_CANONICAL points to itself. */
6162 tree
6163 build_distinct_type_copy (tree type MEM_STAT_DECL)
6165 tree t = copy_node (type PASS_MEM_STAT);
6167 TYPE_POINTER_TO (t) = 0;
6168 TYPE_REFERENCE_TO (t) = 0;
6170 /* Set the canonical type either to a new equivalence class, or
6171 propagate the need for structural equality checks. */
6172 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6173 SET_TYPE_STRUCTURAL_EQUALITY (t);
6174 else
6175 TYPE_CANONICAL (t) = t;
6177 /* Make it its own variant. */
6178 TYPE_MAIN_VARIANT (t) = t;
6179 TYPE_NEXT_VARIANT (t) = 0;
6181 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6182 whose TREE_TYPE is not t. This can also happen in the Ada
6183 frontend when using subtypes. */
6185 return t;
6188 /* Create a new variant of TYPE, equivalent but distinct. This is so
6189 the caller can modify it. TYPE_CANONICAL for the return type will
6190 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6191 are considered equal by the language itself (or that both types
6192 require structural equality checks). */
6194 tree
6195 build_variant_type_copy (tree type MEM_STAT_DECL)
6197 tree t, m = TYPE_MAIN_VARIANT (type);
6199 t = build_distinct_type_copy (type PASS_MEM_STAT);
6201 /* Since we're building a variant, assume that it is a non-semantic
6202 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6203 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6204 /* Type variants have no alias set defined. */
6205 TYPE_ALIAS_SET (t) = -1;
6207 /* Add the new type to the chain of variants of TYPE. */
6208 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6209 TYPE_NEXT_VARIANT (m) = t;
6210 TYPE_MAIN_VARIANT (t) = m;
6212 return t;
6215 /* Return true if the from tree in both tree maps are equal. */
6218 tree_map_base_eq (const void *va, const void *vb)
6220 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6221 *const b = (const struct tree_map_base *) vb;
6222 return (a->from == b->from);
6225 /* Hash a from tree in a tree_base_map. */
6227 unsigned int
6228 tree_map_base_hash (const void *item)
6230 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6233 /* Return true if this tree map structure is marked for garbage collection
6234 purposes. We simply return true if the from tree is marked, so that this
6235 structure goes away when the from tree goes away. */
6238 tree_map_base_marked_p (const void *p)
6240 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6243 /* Hash a from tree in a tree_map. */
6245 unsigned int
6246 tree_map_hash (const void *item)
6248 return (((const struct tree_map *) item)->hash);
6251 /* Hash a from tree in a tree_decl_map. */
6253 unsigned int
6254 tree_decl_map_hash (const void *item)
6256 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6259 /* Return the initialization priority for DECL. */
6261 priority_type
6262 decl_init_priority_lookup (tree decl)
6264 symtab_node *snode = symtab_node::get (decl);
6266 if (!snode)
6267 return DEFAULT_INIT_PRIORITY;
6268 return
6269 snode->get_init_priority ();
6272 /* Return the finalization priority for DECL. */
6274 priority_type
6275 decl_fini_priority_lookup (tree decl)
6277 cgraph_node *node = cgraph_node::get (decl);
6279 if (!node)
6280 return DEFAULT_INIT_PRIORITY;
6281 return
6282 node->get_fini_priority ();
6285 /* Set the initialization priority for DECL to PRIORITY. */
6287 void
6288 decl_init_priority_insert (tree decl, priority_type priority)
6290 struct symtab_node *snode;
6292 if (priority == DEFAULT_INIT_PRIORITY)
6294 snode = symtab_node::get (decl);
6295 if (!snode)
6296 return;
6298 else if (VAR_P (decl))
6299 snode = varpool_node::get_create (decl);
6300 else
6301 snode = cgraph_node::get_create (decl);
6302 snode->set_init_priority (priority);
6305 /* Set the finalization priority for DECL to PRIORITY. */
6307 void
6308 decl_fini_priority_insert (tree decl, priority_type priority)
6310 struct cgraph_node *node;
6312 if (priority == DEFAULT_INIT_PRIORITY)
6314 node = cgraph_node::get (decl);
6315 if (!node)
6316 return;
6318 else
6319 node = cgraph_node::get_create (decl);
6320 node->set_fini_priority (priority);
6323 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6325 static void
6326 print_debug_expr_statistics (void)
6328 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6329 (long) debug_expr_for_decl->size (),
6330 (long) debug_expr_for_decl->elements (),
6331 debug_expr_for_decl->collisions ());
6334 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6336 static void
6337 print_value_expr_statistics (void)
6339 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6340 (long) value_expr_for_decl->size (),
6341 (long) value_expr_for_decl->elements (),
6342 value_expr_for_decl->collisions ());
6345 /* Lookup a debug expression for FROM, and return it if we find one. */
6347 tree
6348 decl_debug_expr_lookup (tree from)
6350 struct tree_decl_map *h, in;
6351 in.base.from = from;
6353 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6354 if (h)
6355 return h->to;
6356 return NULL_TREE;
6359 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6361 void
6362 decl_debug_expr_insert (tree from, tree to)
6364 struct tree_decl_map *h;
6366 h = ggc_alloc<tree_decl_map> ();
6367 h->base.from = from;
6368 h->to = to;
6369 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6372 /* Lookup a value expression for FROM, and return it if we find one. */
6374 tree
6375 decl_value_expr_lookup (tree from)
6377 struct tree_decl_map *h, in;
6378 in.base.from = from;
6380 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6381 if (h)
6382 return h->to;
6383 return NULL_TREE;
6386 /* Insert a mapping FROM->TO in the value expression hashtable. */
6388 void
6389 decl_value_expr_insert (tree from, tree to)
6391 struct tree_decl_map *h;
6393 h = ggc_alloc<tree_decl_map> ();
6394 h->base.from = from;
6395 h->to = to;
6396 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6399 /* Lookup a vector of debug arguments for FROM, and return it if we
6400 find one. */
6402 vec<tree, va_gc> **
6403 decl_debug_args_lookup (tree from)
6405 struct tree_vec_map *h, in;
6407 if (!DECL_HAS_DEBUG_ARGS_P (from))
6408 return NULL;
6409 gcc_checking_assert (debug_args_for_decl != NULL);
6410 in.base.from = from;
6411 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6412 if (h)
6413 return &h->to;
6414 return NULL;
6417 /* Insert a mapping FROM->empty vector of debug arguments in the value
6418 expression hashtable. */
6420 vec<tree, va_gc> **
6421 decl_debug_args_insert (tree from)
6423 struct tree_vec_map *h;
6424 tree_vec_map **loc;
6426 if (DECL_HAS_DEBUG_ARGS_P (from))
6427 return decl_debug_args_lookup (from);
6428 if (debug_args_for_decl == NULL)
6429 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6430 h = ggc_alloc<tree_vec_map> ();
6431 h->base.from = from;
6432 h->to = NULL;
6433 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6434 *loc = h;
6435 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6436 return &h->to;
6439 /* Hashing of types so that we don't make duplicates.
6440 The entry point is `type_hash_canon'. */
6442 /* Generate the default hash code for TYPE. This is designed for
6443 speed, rather than maximum entropy. */
6445 hashval_t
6446 type_hash_canon_hash (tree type)
6448 inchash::hash hstate;
6450 hstate.add_int (TREE_CODE (type));
6452 if (TREE_TYPE (type))
6453 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
6455 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
6456 /* Just the identifier is adequate to distinguish. */
6457 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
6459 switch (TREE_CODE (type))
6461 case METHOD_TYPE:
6462 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
6463 /* FALLTHROUGH. */
6464 case FUNCTION_TYPE:
6465 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6466 if (TREE_VALUE (t) != error_mark_node)
6467 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
6468 break;
6470 case OFFSET_TYPE:
6471 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
6472 break;
6474 case ARRAY_TYPE:
6476 if (TYPE_DOMAIN (type))
6477 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
6478 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
6480 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
6481 hstate.add_object (typeless);
6484 break;
6486 case INTEGER_TYPE:
6488 tree t = TYPE_MAX_VALUE (type);
6489 if (!t)
6490 t = TYPE_MIN_VALUE (type);
6491 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
6492 hstate.add_object (TREE_INT_CST_ELT (t, i));
6493 break;
6496 case REAL_TYPE:
6497 case FIXED_POINT_TYPE:
6499 unsigned prec = TYPE_PRECISION (type);
6500 hstate.add_object (prec);
6501 break;
6504 case VECTOR_TYPE:
6505 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
6506 break;
6508 default:
6509 break;
6512 return hstate.end ();
6515 /* These are the Hashtable callback functions. */
6517 /* Returns true iff the types are equivalent. */
6519 bool
6520 type_cache_hasher::equal (type_hash *a, type_hash *b)
6522 /* First test the things that are the same for all types. */
6523 if (a->hash != b->hash
6524 || TREE_CODE (a->type) != TREE_CODE (b->type)
6525 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6526 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6527 TYPE_ATTRIBUTES (b->type))
6528 || (TREE_CODE (a->type) != COMPLEX_TYPE
6529 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6530 return 0;
6532 /* Be careful about comparing arrays before and after the element type
6533 has been completed; don't compare TYPE_ALIGN unless both types are
6534 complete. */
6535 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6536 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6537 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6538 return 0;
6540 switch (TREE_CODE (a->type))
6542 case VOID_TYPE:
6543 case COMPLEX_TYPE:
6544 case POINTER_TYPE:
6545 case REFERENCE_TYPE:
6546 case NULLPTR_TYPE:
6547 return 1;
6549 case VECTOR_TYPE:
6550 return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
6551 TYPE_VECTOR_SUBPARTS (b->type));
6553 case ENUMERAL_TYPE:
6554 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6555 && !(TYPE_VALUES (a->type)
6556 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6557 && TYPE_VALUES (b->type)
6558 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6559 && type_list_equal (TYPE_VALUES (a->type),
6560 TYPE_VALUES (b->type))))
6561 return 0;
6563 /* fall through */
6565 case INTEGER_TYPE:
6566 case REAL_TYPE:
6567 case BOOLEAN_TYPE:
6568 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6569 return false;
6570 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6571 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6572 TYPE_MAX_VALUE (b->type)))
6573 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6574 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6575 TYPE_MIN_VALUE (b->type))));
6577 case FIXED_POINT_TYPE:
6578 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6580 case OFFSET_TYPE:
6581 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6583 case METHOD_TYPE:
6584 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6585 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6586 || (TYPE_ARG_TYPES (a->type)
6587 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6588 && TYPE_ARG_TYPES (b->type)
6589 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6590 && type_list_equal (TYPE_ARG_TYPES (a->type),
6591 TYPE_ARG_TYPES (b->type)))))
6592 break;
6593 return 0;
6594 case ARRAY_TYPE:
6595 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6596 where the flag should be inherited from the element type
6597 and can change after ARRAY_TYPEs are created; on non-aggregates
6598 compare it and hash it, scalars will never have that flag set
6599 and we need to differentiate between arrays created by different
6600 front-ends or middle-end created arrays. */
6601 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
6602 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
6603 || (TYPE_TYPELESS_STORAGE (a->type)
6604 == TYPE_TYPELESS_STORAGE (b->type))));
6606 case RECORD_TYPE:
6607 case UNION_TYPE:
6608 case QUAL_UNION_TYPE:
6609 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6610 || (TYPE_FIELDS (a->type)
6611 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6612 && TYPE_FIELDS (b->type)
6613 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6614 && type_list_equal (TYPE_FIELDS (a->type),
6615 TYPE_FIELDS (b->type))));
6617 case FUNCTION_TYPE:
6618 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6619 || (TYPE_ARG_TYPES (a->type)
6620 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6621 && TYPE_ARG_TYPES (b->type)
6622 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6623 && type_list_equal (TYPE_ARG_TYPES (a->type),
6624 TYPE_ARG_TYPES (b->type))))
6625 break;
6626 return 0;
6628 default:
6629 return 0;
6632 if (lang_hooks.types.type_hash_eq != NULL)
6633 return lang_hooks.types.type_hash_eq (a->type, b->type);
6635 return 1;
6638 /* Given TYPE, and HASHCODE its hash code, return the canonical
6639 object for an identical type if one already exists.
6640 Otherwise, return TYPE, and record it as the canonical object.
6642 To use this function, first create a type of the sort you want.
6643 Then compute its hash code from the fields of the type that
6644 make it different from other similar types.
6645 Then call this function and use the value. */
6647 tree
6648 type_hash_canon (unsigned int hashcode, tree type)
6650 type_hash in;
6651 type_hash **loc;
6653 /* The hash table only contains main variants, so ensure that's what we're
6654 being passed. */
6655 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6657 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6658 must call that routine before comparing TYPE_ALIGNs. */
6659 layout_type (type);
6661 in.hash = hashcode;
6662 in.type = type;
6664 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6665 if (*loc)
6667 tree t1 = ((type_hash *) *loc)->type;
6668 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
6669 if (TYPE_UID (type) + 1 == next_type_uid)
6670 --next_type_uid;
6671 /* Free also min/max values and the cache for integer
6672 types. This can't be done in free_node, as LTO frees
6673 those on its own. */
6674 if (TREE_CODE (type) == INTEGER_TYPE)
6676 if (TYPE_MIN_VALUE (type)
6677 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
6679 /* Zero is always in TYPE_CACHED_VALUES. */
6680 if (! TYPE_UNSIGNED (type))
6681 int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
6682 ggc_free (TYPE_MIN_VALUE (type));
6684 if (TYPE_MAX_VALUE (type)
6685 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
6687 int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
6688 ggc_free (TYPE_MAX_VALUE (type));
6690 if (TYPE_CACHED_VALUES_P (type))
6691 ggc_free (TYPE_CACHED_VALUES (type));
6693 free_node (type);
6694 return t1;
6696 else
6698 struct type_hash *h;
6700 h = ggc_alloc<type_hash> ();
6701 h->hash = hashcode;
6702 h->type = type;
6703 *loc = h;
6705 return type;
6709 static void
6710 print_type_hash_statistics (void)
6712 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6713 (long) type_hash_table->size (),
6714 (long) type_hash_table->elements (),
6715 type_hash_table->collisions ());
6718 /* Given two lists of types
6719 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6720 return 1 if the lists contain the same types in the same order.
6721 Also, the TREE_PURPOSEs must match. */
6724 type_list_equal (const_tree l1, const_tree l2)
6726 const_tree t1, t2;
6728 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6729 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6730 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6731 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6732 && (TREE_TYPE (TREE_PURPOSE (t1))
6733 == TREE_TYPE (TREE_PURPOSE (t2))))))
6734 return 0;
6736 return t1 == t2;
6739 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6740 given by TYPE. If the argument list accepts variable arguments,
6741 then this function counts only the ordinary arguments. */
6744 type_num_arguments (const_tree type)
6746 int i = 0;
6747 tree t;
6749 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6750 /* If the function does not take a variable number of arguments,
6751 the last element in the list will have type `void'. */
6752 if (VOID_TYPE_P (TREE_VALUE (t)))
6753 break;
6754 else
6755 ++i;
6757 return i;
6760 /* Nonzero if integer constants T1 and T2
6761 represent the same constant value. */
6764 tree_int_cst_equal (const_tree t1, const_tree t2)
6766 if (t1 == t2)
6767 return 1;
6769 if (t1 == 0 || t2 == 0)
6770 return 0;
6772 if (TREE_CODE (t1) == INTEGER_CST
6773 && TREE_CODE (t2) == INTEGER_CST
6774 && wi::to_widest (t1) == wi::to_widest (t2))
6775 return 1;
6777 return 0;
6780 /* Return true if T is an INTEGER_CST whose numerical value (extended
6781 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6783 bool
6784 tree_fits_shwi_p (const_tree t)
6786 return (t != NULL_TREE
6787 && TREE_CODE (t) == INTEGER_CST
6788 && wi::fits_shwi_p (wi::to_widest (t)));
6791 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6792 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
6794 bool
6795 tree_fits_poly_int64_p (const_tree t)
6797 if (t == NULL_TREE)
6798 return false;
6799 if (POLY_INT_CST_P (t))
6801 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6802 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
6803 return false;
6804 return true;
6806 return (TREE_CODE (t) == INTEGER_CST
6807 && wi::fits_shwi_p (wi::to_widest (t)));
6810 /* Return true if T is an INTEGER_CST whose numerical value (extended
6811 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6813 bool
6814 tree_fits_uhwi_p (const_tree t)
6816 return (t != NULL_TREE
6817 && TREE_CODE (t) == INTEGER_CST
6818 && wi::fits_uhwi_p (wi::to_widest (t)));
6821 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6822 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
6824 bool
6825 tree_fits_poly_uint64_p (const_tree t)
6827 if (t == NULL_TREE)
6828 return false;
6829 if (POLY_INT_CST_P (t))
6831 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6832 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
6833 return false;
6834 return true;
6836 return (TREE_CODE (t) == INTEGER_CST
6837 && wi::fits_uhwi_p (wi::to_widest (t)));
6840 /* T is an INTEGER_CST whose numerical value (extended according to
6841 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6842 HOST_WIDE_INT. */
6844 HOST_WIDE_INT
6845 tree_to_shwi (const_tree t)
6847 gcc_assert (tree_fits_shwi_p (t));
6848 return TREE_INT_CST_LOW (t);
6851 /* T is an INTEGER_CST whose numerical value (extended according to
6852 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6853 HOST_WIDE_INT. */
6855 unsigned HOST_WIDE_INT
6856 tree_to_uhwi (const_tree t)
6858 gcc_assert (tree_fits_uhwi_p (t));
6859 return TREE_INT_CST_LOW (t);
6862 /* Return the most significant (sign) bit of T. */
6865 tree_int_cst_sign_bit (const_tree t)
6867 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
6869 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
6872 /* Return an indication of the sign of the integer constant T.
6873 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6874 Note that -1 will never be returned if T's type is unsigned. */
6877 tree_int_cst_sgn (const_tree t)
6879 if (wi::to_wide (t) == 0)
6880 return 0;
6881 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
6882 return 1;
6883 else if (wi::neg_p (wi::to_wide (t)))
6884 return -1;
6885 else
6886 return 1;
6889 /* Return the minimum number of bits needed to represent VALUE in a
6890 signed or unsigned type, UNSIGNEDP says which. */
6892 unsigned int
6893 tree_int_cst_min_precision (tree value, signop sgn)
6895 /* If the value is negative, compute its negative minus 1. The latter
6896 adjustment is because the absolute value of the largest negative value
6897 is one larger than the largest positive value. This is equivalent to
6898 a bit-wise negation, so use that operation instead. */
6900 if (tree_int_cst_sgn (value) < 0)
6901 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
6903 /* Return the number of bits needed, taking into account the fact
6904 that we need one more bit for a signed than unsigned type.
6905 If value is 0 or -1, the minimum precision is 1 no matter
6906 whether unsignedp is true or false. */
6908 if (integer_zerop (value))
6909 return 1;
6910 else
6911 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
6914 /* Return truthvalue of whether T1 is the same tree structure as T2.
6915 Return 1 if they are the same.
6916 Return 0 if they are understandably different.
6917 Return -1 if either contains tree structure not understood by
6918 this function. */
6921 simple_cst_equal (const_tree t1, const_tree t2)
6923 enum tree_code code1, code2;
6924 int cmp;
6925 int i;
6927 if (t1 == t2)
6928 return 1;
6929 if (t1 == 0 || t2 == 0)
6930 return 0;
6932 code1 = TREE_CODE (t1);
6933 code2 = TREE_CODE (t2);
6935 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
6937 if (CONVERT_EXPR_CODE_P (code2)
6938 || code2 == NON_LVALUE_EXPR)
6939 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6940 else
6941 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
6944 else if (CONVERT_EXPR_CODE_P (code2)
6945 || code2 == NON_LVALUE_EXPR)
6946 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
6948 if (code1 != code2)
6949 return 0;
6951 switch (code1)
6953 case INTEGER_CST:
6954 return wi::to_widest (t1) == wi::to_widest (t2);
6956 case REAL_CST:
6957 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
6959 case FIXED_CST:
6960 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
6962 case STRING_CST:
6963 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
6964 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
6965 TREE_STRING_LENGTH (t1)));
6967 case CONSTRUCTOR:
6969 unsigned HOST_WIDE_INT idx;
6970 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
6971 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
6973 if (vec_safe_length (v1) != vec_safe_length (v2))
6974 return false;
6976 for (idx = 0; idx < vec_safe_length (v1); ++idx)
6977 /* ??? Should we handle also fields here? */
6978 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
6979 return false;
6980 return true;
6983 case SAVE_EXPR:
6984 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6986 case CALL_EXPR:
6987 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
6988 if (cmp <= 0)
6989 return cmp;
6990 if (call_expr_nargs (t1) != call_expr_nargs (t2))
6991 return 0;
6993 const_tree arg1, arg2;
6994 const_call_expr_arg_iterator iter1, iter2;
6995 for (arg1 = first_const_call_expr_arg (t1, &iter1),
6996 arg2 = first_const_call_expr_arg (t2, &iter2);
6997 arg1 && arg2;
6998 arg1 = next_const_call_expr_arg (&iter1),
6999 arg2 = next_const_call_expr_arg (&iter2))
7001 cmp = simple_cst_equal (arg1, arg2);
7002 if (cmp <= 0)
7003 return cmp;
7005 return arg1 == arg2;
7008 case TARGET_EXPR:
7009 /* Special case: if either target is an unallocated VAR_DECL,
7010 it means that it's going to be unified with whatever the
7011 TARGET_EXPR is really supposed to initialize, so treat it
7012 as being equivalent to anything. */
7013 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7014 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7015 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7016 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7017 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7018 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7019 cmp = 1;
7020 else
7021 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7023 if (cmp <= 0)
7024 return cmp;
7026 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7028 case WITH_CLEANUP_EXPR:
7029 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7030 if (cmp <= 0)
7031 return cmp;
7033 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7035 case COMPONENT_REF:
7036 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7037 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7039 return 0;
7041 case VAR_DECL:
7042 case PARM_DECL:
7043 case CONST_DECL:
7044 case FUNCTION_DECL:
7045 return 0;
7047 default:
7048 if (POLY_INT_CST_P (t1))
7049 /* A false return means maybe_ne rather than known_ne. */
7050 return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
7051 TYPE_SIGN (TREE_TYPE (t1))),
7052 poly_widest_int::from (poly_int_cst_value (t2),
7053 TYPE_SIGN (TREE_TYPE (t2))));
7054 break;
7057 /* This general rule works for most tree codes. All exceptions should be
7058 handled above. If this is a language-specific tree code, we can't
7059 trust what might be in the operand, so say we don't know
7060 the situation. */
7061 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7062 return -1;
7064 switch (TREE_CODE_CLASS (code1))
7066 case tcc_unary:
7067 case tcc_binary:
7068 case tcc_comparison:
7069 case tcc_expression:
7070 case tcc_reference:
7071 case tcc_statement:
7072 cmp = 1;
7073 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7075 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7076 if (cmp <= 0)
7077 return cmp;
7080 return cmp;
7082 default:
7083 return -1;
7087 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7088 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7089 than U, respectively. */
7092 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7094 if (tree_int_cst_sgn (t) < 0)
7095 return -1;
7096 else if (!tree_fits_uhwi_p (t))
7097 return 1;
7098 else if (TREE_INT_CST_LOW (t) == u)
7099 return 0;
7100 else if (TREE_INT_CST_LOW (t) < u)
7101 return -1;
7102 else
7103 return 1;
7106 /* Return true if SIZE represents a constant size that is in bounds of
7107 what the middle-end and the backend accepts (covering not more than
7108 half of the address-space). */
7110 bool
7111 valid_constant_size_p (const_tree size)
7113 if (POLY_INT_CST_P (size))
7115 if (TREE_OVERFLOW (size))
7116 return false;
7117 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
7118 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
7119 return false;
7120 return true;
7122 if (! tree_fits_uhwi_p (size)
7123 || TREE_OVERFLOW (size)
7124 || tree_int_cst_sign_bit (size) != 0)
7125 return false;
7126 return true;
7129 /* Return the precision of the type, or for a complex or vector type the
7130 precision of the type of its elements. */
7132 unsigned int
7133 element_precision (const_tree type)
7135 if (!TYPE_P (type))
7136 type = TREE_TYPE (type);
7137 enum tree_code code = TREE_CODE (type);
7138 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7139 type = TREE_TYPE (type);
7141 return TYPE_PRECISION (type);
7144 /* Return true if CODE represents an associative tree code. Otherwise
7145 return false. */
7146 bool
7147 associative_tree_code (enum tree_code code)
7149 switch (code)
7151 case BIT_IOR_EXPR:
7152 case BIT_AND_EXPR:
7153 case BIT_XOR_EXPR:
7154 case PLUS_EXPR:
7155 case MULT_EXPR:
7156 case MIN_EXPR:
7157 case MAX_EXPR:
7158 return true;
7160 default:
7161 break;
7163 return false;
7166 /* Return true if CODE represents a commutative tree code. Otherwise
7167 return false. */
7168 bool
7169 commutative_tree_code (enum tree_code code)
7171 switch (code)
7173 case PLUS_EXPR:
7174 case MULT_EXPR:
7175 case MULT_HIGHPART_EXPR:
7176 case MIN_EXPR:
7177 case MAX_EXPR:
7178 case BIT_IOR_EXPR:
7179 case BIT_XOR_EXPR:
7180 case BIT_AND_EXPR:
7181 case NE_EXPR:
7182 case EQ_EXPR:
7183 case UNORDERED_EXPR:
7184 case ORDERED_EXPR:
7185 case UNEQ_EXPR:
7186 case LTGT_EXPR:
7187 case TRUTH_AND_EXPR:
7188 case TRUTH_XOR_EXPR:
7189 case TRUTH_OR_EXPR:
7190 case WIDEN_MULT_EXPR:
7191 case VEC_WIDEN_MULT_HI_EXPR:
7192 case VEC_WIDEN_MULT_LO_EXPR:
7193 case VEC_WIDEN_MULT_EVEN_EXPR:
7194 case VEC_WIDEN_MULT_ODD_EXPR:
7195 return true;
7197 default:
7198 break;
7200 return false;
7203 /* Return true if CODE represents a ternary tree code for which the
7204 first two operands are commutative. Otherwise return false. */
7205 bool
7206 commutative_ternary_tree_code (enum tree_code code)
7208 switch (code)
7210 case WIDEN_MULT_PLUS_EXPR:
7211 case WIDEN_MULT_MINUS_EXPR:
7212 case DOT_PROD_EXPR:
7213 case FMA_EXPR:
7214 return true;
7216 default:
7217 break;
7219 return false;
7222 /* Returns true if CODE can overflow. */
7224 bool
7225 operation_can_overflow (enum tree_code code)
7227 switch (code)
7229 case PLUS_EXPR:
7230 case MINUS_EXPR:
7231 case MULT_EXPR:
7232 case LSHIFT_EXPR:
7233 /* Can overflow in various ways. */
7234 return true;
7235 case TRUNC_DIV_EXPR:
7236 case EXACT_DIV_EXPR:
7237 case FLOOR_DIV_EXPR:
7238 case CEIL_DIV_EXPR:
7239 /* For INT_MIN / -1. */
7240 return true;
7241 case NEGATE_EXPR:
7242 case ABS_EXPR:
7243 /* For -INT_MIN. */
7244 return true;
7245 default:
7246 /* These operators cannot overflow. */
7247 return false;
7251 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7252 ftrapv doesn't generate trapping insns for CODE. */
7254 bool
7255 operation_no_trapping_overflow (tree type, enum tree_code code)
7257 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7259 /* We don't generate instructions that trap on overflow for complex or vector
7260 types. */
7261 if (!INTEGRAL_TYPE_P (type))
7262 return true;
7264 if (!TYPE_OVERFLOW_TRAPS (type))
7265 return true;
7267 switch (code)
7269 case PLUS_EXPR:
7270 case MINUS_EXPR:
7271 case MULT_EXPR:
7272 case NEGATE_EXPR:
7273 case ABS_EXPR:
7274 /* These operators can overflow, and -ftrapv generates trapping code for
7275 these. */
7276 return false;
7277 case TRUNC_DIV_EXPR:
7278 case EXACT_DIV_EXPR:
7279 case FLOOR_DIV_EXPR:
7280 case CEIL_DIV_EXPR:
7281 case LSHIFT_EXPR:
7282 /* These operators can overflow, but -ftrapv does not generate trapping
7283 code for these. */
7284 return true;
7285 default:
7286 /* These operators cannot overflow. */
7287 return true;
7291 namespace inchash
7294 /* Generate a hash value for an expression. This can be used iteratively
7295 by passing a previous result as the HSTATE argument.
7297 This function is intended to produce the same hash for expressions which
7298 would compare equal using operand_equal_p. */
7299 void
7300 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
7302 int i;
7303 enum tree_code code;
7304 enum tree_code_class tclass;
7306 if (t == NULL_TREE || t == error_mark_node)
7308 hstate.merge_hash (0);
7309 return;
7312 if (!(flags & OEP_ADDRESS_OF))
7313 STRIP_NOPS (t);
7315 code = TREE_CODE (t);
7317 switch (code)
7319 /* Alas, constants aren't shared, so we can't rely on pointer
7320 identity. */
7321 case VOID_CST:
7322 hstate.merge_hash (0);
7323 return;
7324 case INTEGER_CST:
7325 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7326 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
7327 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
7328 return;
7329 case REAL_CST:
7331 unsigned int val2;
7332 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
7333 val2 = rvc_zero;
7334 else
7335 val2 = real_hash (TREE_REAL_CST_PTR (t));
7336 hstate.merge_hash (val2);
7337 return;
7339 case FIXED_CST:
7341 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7342 hstate.merge_hash (val2);
7343 return;
7345 case STRING_CST:
7346 hstate.add ((const void *) TREE_STRING_POINTER (t),
7347 TREE_STRING_LENGTH (t));
7348 return;
7349 case COMPLEX_CST:
7350 inchash::add_expr (TREE_REALPART (t), hstate, flags);
7351 inchash::add_expr (TREE_IMAGPART (t), hstate, flags);
7352 return;
7353 case VECTOR_CST:
7355 hstate.add_int (VECTOR_CST_NPATTERNS (t));
7356 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
7357 unsigned int count = vector_cst_encoded_nelts (t);
7358 for (unsigned int i = 0; i < count; ++i)
7359 inchash::add_expr (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
7360 return;
7362 case SSA_NAME:
7363 /* We can just compare by pointer. */
7364 hstate.add_hwi (SSA_NAME_VERSION (t));
7365 return;
7366 case PLACEHOLDER_EXPR:
7367 /* The node itself doesn't matter. */
7368 return;
7369 case BLOCK:
7370 case OMP_CLAUSE:
7371 /* Ignore. */
7372 return;
7373 case TREE_LIST:
7374 /* A list of expressions, for a CALL_EXPR or as the elements of a
7375 VECTOR_CST. */
7376 for (; t; t = TREE_CHAIN (t))
7377 inchash::add_expr (TREE_VALUE (t), hstate, flags);
7378 return;
7379 case CONSTRUCTOR:
7381 unsigned HOST_WIDE_INT idx;
7382 tree field, value;
7383 flags &= ~OEP_ADDRESS_OF;
7384 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7386 inchash::add_expr (field, hstate, flags);
7387 inchash::add_expr (value, hstate, flags);
7389 return;
7391 case STATEMENT_LIST:
7393 tree_stmt_iterator i;
7394 for (i = tsi_start (CONST_CAST_TREE (t));
7395 !tsi_end_p (i); tsi_next (&i))
7396 inchash::add_expr (tsi_stmt (i), hstate, flags);
7397 return;
7399 case TREE_VEC:
7400 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
7401 inchash::add_expr (TREE_VEC_ELT (t, i), hstate, flags);
7402 return;
7403 case FUNCTION_DECL:
7404 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7405 Otherwise nodes that compare equal according to operand_equal_p might
7406 get different hash codes. However, don't do this for machine specific
7407 or front end builtins, since the function code is overloaded in those
7408 cases. */
7409 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7410 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7412 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7413 code = TREE_CODE (t);
7415 /* FALL THROUGH */
7416 default:
7417 if (POLY_INT_CST_P (t))
7419 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
7420 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
7421 return;
7423 tclass = TREE_CODE_CLASS (code);
7425 if (tclass == tcc_declaration)
7427 /* DECL's have a unique ID */
7428 hstate.add_hwi (DECL_UID (t));
7430 else if (tclass == tcc_comparison && !commutative_tree_code (code))
7432 /* For comparisons that can be swapped, use the lower
7433 tree code. */
7434 enum tree_code ccode = swap_tree_comparison (code);
7435 if (code < ccode)
7436 ccode = code;
7437 hstate.add_object (ccode);
7438 inchash::add_expr (TREE_OPERAND (t, ccode != code), hstate, flags);
7439 inchash::add_expr (TREE_OPERAND (t, ccode == code), hstate, flags);
7441 else if (CONVERT_EXPR_CODE_P (code))
7443 /* NOP_EXPR and CONVERT_EXPR are considered equal by
7444 operand_equal_p. */
7445 enum tree_code ccode = NOP_EXPR;
7446 hstate.add_object (ccode);
7448 /* Don't hash the type, that can lead to having nodes which
7449 compare equal according to operand_equal_p, but which
7450 have different hash codes. Make sure to include signedness
7451 in the hash computation. */
7452 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7453 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
7455 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
7456 else if (code == MEM_REF
7457 && (flags & OEP_ADDRESS_OF) != 0
7458 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
7459 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
7460 && integer_zerop (TREE_OPERAND (t, 1)))
7461 inchash::add_expr (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
7462 hstate, flags);
7463 /* Don't ICE on FE specific trees, or their arguments etc.
7464 during operand_equal_p hash verification. */
7465 else if (!IS_EXPR_CODE_CLASS (tclass))
7466 gcc_assert (flags & OEP_HASH_CHECK);
7467 else
7469 unsigned int sflags = flags;
7471 hstate.add_object (code);
7473 switch (code)
7475 case ADDR_EXPR:
7476 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7477 flags |= OEP_ADDRESS_OF;
7478 sflags = flags;
7479 break;
7481 case INDIRECT_REF:
7482 case MEM_REF:
7483 case TARGET_MEM_REF:
7484 flags &= ~OEP_ADDRESS_OF;
7485 sflags = flags;
7486 break;
7488 case ARRAY_REF:
7489 case ARRAY_RANGE_REF:
7490 case COMPONENT_REF:
7491 case BIT_FIELD_REF:
7492 sflags &= ~OEP_ADDRESS_OF;
7493 break;
7495 case COND_EXPR:
7496 flags &= ~OEP_ADDRESS_OF;
7497 break;
7499 case FMA_EXPR:
7500 case WIDEN_MULT_PLUS_EXPR:
7501 case WIDEN_MULT_MINUS_EXPR:
7503 /* The multiplication operands are commutative. */
7504 inchash::hash one, two;
7505 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
7506 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
7507 hstate.add_commutative (one, two);
7508 inchash::add_expr (TREE_OPERAND (t, 2), two, flags);
7509 return;
7512 case CALL_EXPR:
7513 if (CALL_EXPR_FN (t) == NULL_TREE)
7514 hstate.add_int (CALL_EXPR_IFN (t));
7515 break;
7517 case TARGET_EXPR:
7518 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
7519 Usually different TARGET_EXPRs just should use
7520 different temporaries in their slots. */
7521 inchash::add_expr (TARGET_EXPR_SLOT (t), hstate, flags);
7522 return;
7524 default:
7525 break;
7528 /* Don't hash the type, that can lead to having nodes which
7529 compare equal according to operand_equal_p, but which
7530 have different hash codes. */
7531 if (code == NON_LVALUE_EXPR)
7533 /* Make sure to include signness in the hash computation. */
7534 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7535 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
7538 else if (commutative_tree_code (code))
7540 /* It's a commutative expression. We want to hash it the same
7541 however it appears. We do this by first hashing both operands
7542 and then rehashing based on the order of their independent
7543 hashes. */
7544 inchash::hash one, two;
7545 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
7546 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
7547 hstate.add_commutative (one, two);
7549 else
7550 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7551 inchash::add_expr (TREE_OPERAND (t, i), hstate,
7552 i == 0 ? flags : sflags);
7554 return;
7560 /* Constructors for pointer, array and function types.
7561 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7562 constructed by language-dependent code, not here.) */
7564 /* Construct, lay out and return the type of pointers to TO_TYPE with
7565 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7566 reference all of memory. If such a type has already been
7567 constructed, reuse it. */
7569 tree
7570 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7571 bool can_alias_all)
7573 tree t;
7574 bool could_alias = can_alias_all;
7576 if (to_type == error_mark_node)
7577 return error_mark_node;
7579 /* If the pointed-to type has the may_alias attribute set, force
7580 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7581 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7582 can_alias_all = true;
7584 /* In some cases, languages will have things that aren't a POINTER_TYPE
7585 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7586 In that case, return that type without regard to the rest of our
7587 operands.
7589 ??? This is a kludge, but consistent with the way this function has
7590 always operated and there doesn't seem to be a good way to avoid this
7591 at the moment. */
7592 if (TYPE_POINTER_TO (to_type) != 0
7593 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7594 return TYPE_POINTER_TO (to_type);
7596 /* First, if we already have a type for pointers to TO_TYPE and it's
7597 the proper mode, use it. */
7598 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7599 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7600 return t;
7602 t = make_node (POINTER_TYPE);
7604 TREE_TYPE (t) = to_type;
7605 SET_TYPE_MODE (t, mode);
7606 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7607 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7608 TYPE_POINTER_TO (to_type) = t;
7610 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7611 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7612 SET_TYPE_STRUCTURAL_EQUALITY (t);
7613 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7614 TYPE_CANONICAL (t)
7615 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7616 mode, false);
7618 /* Lay out the type. This function has many callers that are concerned
7619 with expression-construction, and this simplifies them all. */
7620 layout_type (t);
7622 return t;
7625 /* By default build pointers in ptr_mode. */
7627 tree
7628 build_pointer_type (tree to_type)
7630 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7631 : TYPE_ADDR_SPACE (to_type);
7632 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7633 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7636 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7638 tree
7639 build_reference_type_for_mode (tree to_type, machine_mode mode,
7640 bool can_alias_all)
7642 tree t;
7643 bool could_alias = can_alias_all;
7645 if (to_type == error_mark_node)
7646 return error_mark_node;
7648 /* If the pointed-to type has the may_alias attribute set, force
7649 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7650 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7651 can_alias_all = true;
7653 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7654 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7655 In that case, return that type without regard to the rest of our
7656 operands.
7658 ??? This is a kludge, but consistent with the way this function has
7659 always operated and there doesn't seem to be a good way to avoid this
7660 at the moment. */
7661 if (TYPE_REFERENCE_TO (to_type) != 0
7662 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7663 return TYPE_REFERENCE_TO (to_type);
7665 /* First, if we already have a type for pointers to TO_TYPE and it's
7666 the proper mode, use it. */
7667 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7668 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7669 return t;
7671 t = make_node (REFERENCE_TYPE);
7673 TREE_TYPE (t) = to_type;
7674 SET_TYPE_MODE (t, mode);
7675 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7676 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7677 TYPE_REFERENCE_TO (to_type) = t;
7679 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7680 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7681 SET_TYPE_STRUCTURAL_EQUALITY (t);
7682 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7683 TYPE_CANONICAL (t)
7684 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7685 mode, false);
7687 layout_type (t);
7689 return t;
7693 /* Build the node for the type of references-to-TO_TYPE by default
7694 in ptr_mode. */
7696 tree
7697 build_reference_type (tree to_type)
7699 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7700 : TYPE_ADDR_SPACE (to_type);
7701 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7702 return build_reference_type_for_mode (to_type, pointer_mode, false);
7705 #define MAX_INT_CACHED_PREC \
7706 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7707 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7709 /* Builds a signed or unsigned integer type of precision PRECISION.
7710 Used for C bitfields whose precision does not match that of
7711 built-in target types. */
7712 tree
7713 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7714 int unsignedp)
7716 tree itype, ret;
7718 if (unsignedp)
7719 unsignedp = MAX_INT_CACHED_PREC + 1;
7721 if (precision <= MAX_INT_CACHED_PREC)
7723 itype = nonstandard_integer_type_cache[precision + unsignedp];
7724 if (itype)
7725 return itype;
7728 itype = make_node (INTEGER_TYPE);
7729 TYPE_PRECISION (itype) = precision;
7731 if (unsignedp)
7732 fixup_unsigned_type (itype);
7733 else
7734 fixup_signed_type (itype);
7736 ret = itype;
7738 inchash::hash hstate;
7739 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7740 ret = type_hash_canon (hstate.end (), itype);
7741 if (precision <= MAX_INT_CACHED_PREC)
7742 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7744 return ret;
7747 #define MAX_BOOL_CACHED_PREC \
7748 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7749 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
7751 /* Builds a boolean type of precision PRECISION.
7752 Used for boolean vectors to choose proper vector element size. */
7753 tree
7754 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
7756 tree type;
7758 if (precision <= MAX_BOOL_CACHED_PREC)
7760 type = nonstandard_boolean_type_cache[precision];
7761 if (type)
7762 return type;
7765 type = make_node (BOOLEAN_TYPE);
7766 TYPE_PRECISION (type) = precision;
7767 fixup_signed_type (type);
7769 if (precision <= MAX_INT_CACHED_PREC)
7770 nonstandard_boolean_type_cache[precision] = type;
7772 return type;
7775 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7776 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7777 is true, reuse such a type that has already been constructed. */
7779 static tree
7780 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7782 tree itype = make_node (INTEGER_TYPE);
7784 TREE_TYPE (itype) = type;
7786 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7787 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7789 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7790 SET_TYPE_MODE (itype, TYPE_MODE (type));
7791 TYPE_SIZE (itype) = TYPE_SIZE (type);
7792 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7793 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
7794 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7795 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
7797 if (!shared)
7798 return itype;
7800 if ((TYPE_MIN_VALUE (itype)
7801 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7802 || (TYPE_MAX_VALUE (itype)
7803 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7805 /* Since we cannot reliably merge this type, we need to compare it using
7806 structural equality checks. */
7807 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7808 return itype;
7811 hashval_t hash = type_hash_canon_hash (itype);
7812 itype = type_hash_canon (hash, itype);
7814 return itype;
7817 /* Wrapper around build_range_type_1 with SHARED set to true. */
7819 tree
7820 build_range_type (tree type, tree lowval, tree highval)
7822 return build_range_type_1 (type, lowval, highval, true);
7825 /* Wrapper around build_range_type_1 with SHARED set to false. */
7827 tree
7828 build_nonshared_range_type (tree type, tree lowval, tree highval)
7830 return build_range_type_1 (type, lowval, highval, false);
7833 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7834 MAXVAL should be the maximum value in the domain
7835 (one less than the length of the array).
7837 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7838 We don't enforce this limit, that is up to caller (e.g. language front end).
7839 The limit exists because the result is a signed type and we don't handle
7840 sizes that use more than one HOST_WIDE_INT. */
7842 tree
7843 build_index_type (tree maxval)
7845 return build_range_type (sizetype, size_zero_node, maxval);
7848 /* Return true if the debug information for TYPE, a subtype, should be emitted
7849 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7850 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7851 debug info and doesn't reflect the source code. */
7853 bool
7854 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7856 tree base_type = TREE_TYPE (type), low, high;
7858 /* Subrange types have a base type which is an integral type. */
7859 if (!INTEGRAL_TYPE_P (base_type))
7860 return false;
7862 /* Get the real bounds of the subtype. */
7863 if (lang_hooks.types.get_subrange_bounds)
7864 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7865 else
7867 low = TYPE_MIN_VALUE (type);
7868 high = TYPE_MAX_VALUE (type);
7871 /* If the type and its base type have the same representation and the same
7872 name, then the type is not a subrange but a copy of the base type. */
7873 if ((TREE_CODE (base_type) == INTEGER_TYPE
7874 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7875 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7876 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7877 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7878 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7879 return false;
7881 if (lowval)
7882 *lowval = low;
7883 if (highval)
7884 *highval = high;
7885 return true;
7888 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7889 and number of elements specified by the range of values of INDEX_TYPE.
7890 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7891 If SHARED is true, reuse such a type that has already been constructed. */
7893 static tree
7894 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
7895 bool shared)
7897 tree t;
7899 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7901 error ("arrays of functions are not meaningful");
7902 elt_type = integer_type_node;
7905 t = make_node (ARRAY_TYPE);
7906 TREE_TYPE (t) = elt_type;
7907 TYPE_DOMAIN (t) = index_type;
7908 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7909 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
7910 layout_type (t);
7912 /* If the element type is incomplete at this point we get marked for
7913 structural equality. Do not record these types in the canonical
7914 type hashtable. */
7915 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7916 return t;
7918 if (shared)
7920 hashval_t hash = type_hash_canon_hash (t);
7921 t = type_hash_canon (hash, t);
7924 if (TYPE_CANONICAL (t) == t)
7926 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7927 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
7928 || in_lto_p)
7929 SET_TYPE_STRUCTURAL_EQUALITY (t);
7930 else if (TYPE_CANONICAL (elt_type) != elt_type
7931 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7932 TYPE_CANONICAL (t)
7933 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7934 index_type
7935 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7936 typeless_storage, shared);
7939 return t;
7942 /* Wrapper around build_array_type_1 with SHARED set to true. */
7944 tree
7945 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
7947 return build_array_type_1 (elt_type, index_type, typeless_storage, true);
7950 /* Wrapper around build_array_type_1 with SHARED set to false. */
7952 tree
7953 build_nonshared_array_type (tree elt_type, tree index_type)
7955 return build_array_type_1 (elt_type, index_type, false, false);
7958 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7959 sizetype. */
7961 tree
7962 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
7964 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7967 /* Recursively examines the array elements of TYPE, until a non-array
7968 element type is found. */
7970 tree
7971 strip_array_types (tree type)
7973 while (TREE_CODE (type) == ARRAY_TYPE)
7974 type = TREE_TYPE (type);
7976 return type;
7979 /* Computes the canonical argument types from the argument type list
7980 ARGTYPES.
7982 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7983 on entry to this function, or if any of the ARGTYPES are
7984 structural.
7986 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7987 true on entry to this function, or if any of the ARGTYPES are
7988 non-canonical.
7990 Returns a canonical argument list, which may be ARGTYPES when the
7991 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7992 true) or would not differ from ARGTYPES. */
7994 static tree
7995 maybe_canonicalize_argtypes (tree argtypes,
7996 bool *any_structural_p,
7997 bool *any_noncanonical_p)
7999 tree arg;
8000 bool any_noncanonical_argtypes_p = false;
8002 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8004 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8005 /* Fail gracefully by stating that the type is structural. */
8006 *any_structural_p = true;
8007 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8008 *any_structural_p = true;
8009 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8010 || TREE_PURPOSE (arg))
8011 /* If the argument has a default argument, we consider it
8012 non-canonical even though the type itself is canonical.
8013 That way, different variants of function and method types
8014 with default arguments will all point to the variant with
8015 no defaults as their canonical type. */
8016 any_noncanonical_argtypes_p = true;
8019 if (*any_structural_p)
8020 return argtypes;
8022 if (any_noncanonical_argtypes_p)
8024 /* Build the canonical list of argument types. */
8025 tree canon_argtypes = NULL_TREE;
8026 bool is_void = false;
8028 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8030 if (arg == void_list_node)
8031 is_void = true;
8032 else
8033 canon_argtypes = tree_cons (NULL_TREE,
8034 TYPE_CANONICAL (TREE_VALUE (arg)),
8035 canon_argtypes);
8038 canon_argtypes = nreverse (canon_argtypes);
8039 if (is_void)
8040 canon_argtypes = chainon (canon_argtypes, void_list_node);
8042 /* There is a non-canonical type. */
8043 *any_noncanonical_p = true;
8044 return canon_argtypes;
8047 /* The canonical argument types are the same as ARGTYPES. */
8048 return argtypes;
8051 /* Construct, lay out and return
8052 the type of functions returning type VALUE_TYPE
8053 given arguments of types ARG_TYPES.
8054 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8055 are data type nodes for the arguments of the function.
8056 If such a type has already been constructed, reuse it. */
8058 tree
8059 build_function_type (tree value_type, tree arg_types)
8061 tree t;
8062 inchash::hash hstate;
8063 bool any_structural_p, any_noncanonical_p;
8064 tree canon_argtypes;
8066 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8068 error ("function return type cannot be function");
8069 value_type = integer_type_node;
8072 /* Make a node of the sort we want. */
8073 t = make_node (FUNCTION_TYPE);
8074 TREE_TYPE (t) = value_type;
8075 TYPE_ARG_TYPES (t) = arg_types;
8077 /* If we already have such a type, use the old one. */
8078 hashval_t hash = type_hash_canon_hash (t);
8079 t = type_hash_canon (hash, t);
8081 /* Set up the canonical type. */
8082 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8083 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8084 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8085 &any_structural_p,
8086 &any_noncanonical_p);
8087 if (any_structural_p)
8088 SET_TYPE_STRUCTURAL_EQUALITY (t);
8089 else if (any_noncanonical_p)
8090 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8091 canon_argtypes);
8093 if (!COMPLETE_TYPE_P (t))
8094 layout_type (t);
8095 return t;
8098 /* Build a function type. The RETURN_TYPE is the type returned by the
8099 function. If VAARGS is set, no void_type_node is appended to the
8100 list. ARGP must be always be terminated be a NULL_TREE. */
8102 static tree
8103 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8105 tree t, args, last;
8107 t = va_arg (argp, tree);
8108 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8109 args = tree_cons (NULL_TREE, t, args);
8111 if (vaargs)
8113 last = args;
8114 if (args != NULL_TREE)
8115 args = nreverse (args);
8116 gcc_assert (last != void_list_node);
8118 else if (args == NULL_TREE)
8119 args = void_list_node;
8120 else
8122 last = args;
8123 args = nreverse (args);
8124 TREE_CHAIN (last) = void_list_node;
8126 args = build_function_type (return_type, args);
8128 return args;
8131 /* Build a function type. The RETURN_TYPE is the type returned by the
8132 function. If additional arguments are provided, they are
8133 additional argument types. The list of argument types must always
8134 be terminated by NULL_TREE. */
8136 tree
8137 build_function_type_list (tree return_type, ...)
8139 tree args;
8140 va_list p;
8142 va_start (p, return_type);
8143 args = build_function_type_list_1 (false, return_type, p);
8144 va_end (p);
8145 return args;
8148 /* Build a variable argument function type. The RETURN_TYPE is the
8149 type returned by the function. If additional arguments are provided,
8150 they are additional argument types. The list of argument types must
8151 always be terminated by NULL_TREE. */
8153 tree
8154 build_varargs_function_type_list (tree return_type, ...)
8156 tree args;
8157 va_list p;
8159 va_start (p, return_type);
8160 args = build_function_type_list_1 (true, return_type, p);
8161 va_end (p);
8163 return args;
8166 /* Build a function type. RETURN_TYPE is the type returned by the
8167 function; VAARGS indicates whether the function takes varargs. The
8168 function takes N named arguments, the types of which are provided in
8169 ARG_TYPES. */
8171 static tree
8172 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8173 tree *arg_types)
8175 int i;
8176 tree t = vaargs ? NULL_TREE : void_list_node;
8178 for (i = n - 1; i >= 0; i--)
8179 t = tree_cons (NULL_TREE, arg_types[i], t);
8181 return build_function_type (return_type, t);
8184 /* Build a function type. RETURN_TYPE is the type returned by the
8185 function. The function takes N named arguments, the types of which
8186 are provided in ARG_TYPES. */
8188 tree
8189 build_function_type_array (tree return_type, int n, tree *arg_types)
8191 return build_function_type_array_1 (false, return_type, n, arg_types);
8194 /* Build a variable argument function type. RETURN_TYPE is the type
8195 returned by the function. The function takes N named arguments, the
8196 types of which are provided in ARG_TYPES. */
8198 tree
8199 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8201 return build_function_type_array_1 (true, return_type, n, arg_types);
8204 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8205 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8206 for the method. An implicit additional parameter (of type
8207 pointer-to-BASETYPE) is added to the ARGTYPES. */
8209 tree
8210 build_method_type_directly (tree basetype,
8211 tree rettype,
8212 tree argtypes)
8214 tree t;
8215 tree ptype;
8216 bool any_structural_p, any_noncanonical_p;
8217 tree canon_argtypes;
8219 /* Make a node of the sort we want. */
8220 t = make_node (METHOD_TYPE);
8222 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8223 TREE_TYPE (t) = rettype;
8224 ptype = build_pointer_type (basetype);
8226 /* The actual arglist for this function includes a "hidden" argument
8227 which is "this". Put it into the list of argument types. */
8228 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8229 TYPE_ARG_TYPES (t) = argtypes;
8231 /* If we already have such a type, use the old one. */
8232 hashval_t hash = type_hash_canon_hash (t);
8233 t = type_hash_canon (hash, t);
8235 /* Set up the canonical type. */
8236 any_structural_p
8237 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8238 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8239 any_noncanonical_p
8240 = (TYPE_CANONICAL (basetype) != basetype
8241 || TYPE_CANONICAL (rettype) != rettype);
8242 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8243 &any_structural_p,
8244 &any_noncanonical_p);
8245 if (any_structural_p)
8246 SET_TYPE_STRUCTURAL_EQUALITY (t);
8247 else if (any_noncanonical_p)
8248 TYPE_CANONICAL (t)
8249 = build_method_type_directly (TYPE_CANONICAL (basetype),
8250 TYPE_CANONICAL (rettype),
8251 canon_argtypes);
8252 if (!COMPLETE_TYPE_P (t))
8253 layout_type (t);
8255 return t;
8258 /* Construct, lay out and return the type of methods belonging to class
8259 BASETYPE and whose arguments and values are described by TYPE.
8260 If that type exists already, reuse it.
8261 TYPE must be a FUNCTION_TYPE node. */
8263 tree
8264 build_method_type (tree basetype, tree type)
8266 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8268 return build_method_type_directly (basetype,
8269 TREE_TYPE (type),
8270 TYPE_ARG_TYPES (type));
8273 /* Construct, lay out and return the type of offsets to a value
8274 of type TYPE, within an object of type BASETYPE.
8275 If a suitable offset type exists already, reuse it. */
8277 tree
8278 build_offset_type (tree basetype, tree type)
8280 tree t;
8282 /* Make a node of the sort we want. */
8283 t = make_node (OFFSET_TYPE);
8285 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8286 TREE_TYPE (t) = type;
8288 /* If we already have such a type, use the old one. */
8289 hashval_t hash = type_hash_canon_hash (t);
8290 t = type_hash_canon (hash, t);
8292 if (!COMPLETE_TYPE_P (t))
8293 layout_type (t);
8295 if (TYPE_CANONICAL (t) == t)
8297 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8298 || TYPE_STRUCTURAL_EQUALITY_P (type))
8299 SET_TYPE_STRUCTURAL_EQUALITY (t);
8300 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8301 || TYPE_CANONICAL (type) != type)
8302 TYPE_CANONICAL (t)
8303 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8304 TYPE_CANONICAL (type));
8307 return t;
8310 /* Create a complex type whose components are COMPONENT_TYPE.
8312 If NAMED is true, the type is given a TYPE_NAME. We do not always
8313 do so because this creates a DECL node and thus make the DECL_UIDs
8314 dependent on the type canonicalization hashtable, which is GC-ed,
8315 so the DECL_UIDs would not be stable wrt garbage collection. */
8317 tree
8318 build_complex_type (tree component_type, bool named)
8320 gcc_assert (INTEGRAL_TYPE_P (component_type)
8321 || SCALAR_FLOAT_TYPE_P (component_type)
8322 || FIXED_POINT_TYPE_P (component_type));
8324 /* Make a node of the sort we want. */
8325 tree probe = make_node (COMPLEX_TYPE);
8327 TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
8329 /* If we already have such a type, use the old one. */
8330 hashval_t hash = type_hash_canon_hash (probe);
8331 tree t = type_hash_canon (hash, probe);
8333 if (t == probe)
8335 /* We created a new type. The hash insertion will have laid
8336 out the type. We need to check the canonicalization and
8337 maybe set the name. */
8338 gcc_checking_assert (COMPLETE_TYPE_P (t)
8339 && !TYPE_NAME (t)
8340 && TYPE_CANONICAL (t) == t);
8342 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
8343 SET_TYPE_STRUCTURAL_EQUALITY (t);
8344 else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
8345 TYPE_CANONICAL (t)
8346 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
8348 /* We need to create a name, since complex is a fundamental type. */
8349 if (named)
8351 const char *name = NULL;
8353 if (TREE_TYPE (t) == char_type_node)
8354 name = "complex char";
8355 else if (TREE_TYPE (t) == signed_char_type_node)
8356 name = "complex signed char";
8357 else if (TREE_TYPE (t) == unsigned_char_type_node)
8358 name = "complex unsigned char";
8359 else if (TREE_TYPE (t) == short_integer_type_node)
8360 name = "complex short int";
8361 else if (TREE_TYPE (t) == short_unsigned_type_node)
8362 name = "complex short unsigned int";
8363 else if (TREE_TYPE (t) == integer_type_node)
8364 name = "complex int";
8365 else if (TREE_TYPE (t) == unsigned_type_node)
8366 name = "complex unsigned int";
8367 else if (TREE_TYPE (t) == long_integer_type_node)
8368 name = "complex long int";
8369 else if (TREE_TYPE (t) == long_unsigned_type_node)
8370 name = "complex long unsigned int";
8371 else if (TREE_TYPE (t) == long_long_integer_type_node)
8372 name = "complex long long int";
8373 else if (TREE_TYPE (t) == long_long_unsigned_type_node)
8374 name = "complex long long unsigned int";
8376 if (name != NULL)
8377 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8378 get_identifier (name), t);
8382 return build_qualified_type (t, TYPE_QUALS (component_type));
8385 /* If TYPE is a real or complex floating-point type and the target
8386 does not directly support arithmetic on TYPE then return the wider
8387 type to be used for arithmetic on TYPE. Otherwise, return
8388 NULL_TREE. */
8390 tree
8391 excess_precision_type (tree type)
8393 /* The target can give two different responses to the question of
8394 which excess precision mode it would like depending on whether we
8395 are in -fexcess-precision=standard or -fexcess-precision=fast. */
8397 enum excess_precision_type requested_type
8398 = (flag_excess_precision == EXCESS_PRECISION_FAST
8399 ? EXCESS_PRECISION_TYPE_FAST
8400 : EXCESS_PRECISION_TYPE_STANDARD);
8402 enum flt_eval_method target_flt_eval_method
8403 = targetm.c.excess_precision (requested_type);
8405 /* The target should not ask for unpredictable float evaluation (though
8406 it might advertise that implicitly the evaluation is unpredictable,
8407 but we don't care about that here, it will have been reported
8408 elsewhere). If it does ask for unpredictable evaluation, we have
8409 nothing to do here. */
8410 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
8412 /* Nothing to do. The target has asked for all types we know about
8413 to be computed with their native precision and range. */
8414 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
8415 return NULL_TREE;
8417 /* The target will promote this type in a target-dependent way, so excess
8418 precision ought to leave it alone. */
8419 if (targetm.promoted_type (type) != NULL_TREE)
8420 return NULL_TREE;
8422 machine_mode float16_type_mode = (float16_type_node
8423 ? TYPE_MODE (float16_type_node)
8424 : VOIDmode);
8425 machine_mode float_type_mode = TYPE_MODE (float_type_node);
8426 machine_mode double_type_mode = TYPE_MODE (double_type_node);
8428 switch (TREE_CODE (type))
8430 case REAL_TYPE:
8432 machine_mode type_mode = TYPE_MODE (type);
8433 switch (target_flt_eval_method)
8435 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8436 if (type_mode == float16_type_mode)
8437 return float_type_node;
8438 break;
8439 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8440 if (type_mode == float16_type_mode
8441 || type_mode == float_type_mode)
8442 return double_type_node;
8443 break;
8444 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8445 if (type_mode == float16_type_mode
8446 || type_mode == float_type_mode
8447 || type_mode == double_type_mode)
8448 return long_double_type_node;
8449 break;
8450 default:
8451 gcc_unreachable ();
8453 break;
8455 case COMPLEX_TYPE:
8457 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8458 return NULL_TREE;
8459 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
8460 switch (target_flt_eval_method)
8462 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8463 if (type_mode == float16_type_mode)
8464 return complex_float_type_node;
8465 break;
8466 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8467 if (type_mode == float16_type_mode
8468 || type_mode == float_type_mode)
8469 return complex_double_type_node;
8470 break;
8471 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8472 if (type_mode == float16_type_mode
8473 || type_mode == float_type_mode
8474 || type_mode == double_type_mode)
8475 return complex_long_double_type_node;
8476 break;
8477 default:
8478 gcc_unreachable ();
8480 break;
8482 default:
8483 break;
8486 return NULL_TREE;
8489 /* Return OP, stripped of any conversions to wider types as much as is safe.
8490 Converting the value back to OP's type makes a value equivalent to OP.
8492 If FOR_TYPE is nonzero, we return a value which, if converted to
8493 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8495 OP must have integer, real or enumeral type. Pointers are not allowed!
8497 There are some cases where the obvious value we could return
8498 would regenerate to OP if converted to OP's type,
8499 but would not extend like OP to wider types.
8500 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8501 For example, if OP is (unsigned short)(signed char)-1,
8502 we avoid returning (signed char)-1 if FOR_TYPE is int,
8503 even though extending that to an unsigned short would regenerate OP,
8504 since the result of extending (signed char)-1 to (int)
8505 is different from (int) OP. */
8507 tree
8508 get_unwidened (tree op, tree for_type)
8510 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8511 tree type = TREE_TYPE (op);
8512 unsigned final_prec
8513 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8514 int uns
8515 = (for_type != 0 && for_type != type
8516 && final_prec > TYPE_PRECISION (type)
8517 && TYPE_UNSIGNED (type));
8518 tree win = op;
8520 while (CONVERT_EXPR_P (op))
8522 int bitschange;
8524 /* TYPE_PRECISION on vector types has different meaning
8525 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8526 so avoid them here. */
8527 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8528 break;
8530 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8531 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8533 /* Truncations are many-one so cannot be removed.
8534 Unless we are later going to truncate down even farther. */
8535 if (bitschange < 0
8536 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8537 break;
8539 /* See what's inside this conversion. If we decide to strip it,
8540 we will set WIN. */
8541 op = TREE_OPERAND (op, 0);
8543 /* If we have not stripped any zero-extensions (uns is 0),
8544 we can strip any kind of extension.
8545 If we have previously stripped a zero-extension,
8546 only zero-extensions can safely be stripped.
8547 Any extension can be stripped if the bits it would produce
8548 are all going to be discarded later by truncating to FOR_TYPE. */
8550 if (bitschange > 0)
8552 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8553 win = op;
8554 /* TYPE_UNSIGNED says whether this is a zero-extension.
8555 Let's avoid computing it if it does not affect WIN
8556 and if UNS will not be needed again. */
8557 if ((uns
8558 || CONVERT_EXPR_P (op))
8559 && TYPE_UNSIGNED (TREE_TYPE (op)))
8561 uns = 1;
8562 win = op;
8567 /* If we finally reach a constant see if it fits in sth smaller and
8568 in that case convert it. */
8569 if (TREE_CODE (win) == INTEGER_CST)
8571 tree wtype = TREE_TYPE (win);
8572 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
8573 if (for_type)
8574 prec = MAX (prec, final_prec);
8575 if (prec < TYPE_PRECISION (wtype))
8577 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
8578 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
8579 win = fold_convert (t, win);
8583 return win;
8586 /* Return OP or a simpler expression for a narrower value
8587 which can be sign-extended or zero-extended to give back OP.
8588 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8589 or 0 if the value should be sign-extended. */
8591 tree
8592 get_narrower (tree op, int *unsignedp_ptr)
8594 int uns = 0;
8595 int first = 1;
8596 tree win = op;
8597 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8599 while (TREE_CODE (op) == NOP_EXPR)
8601 int bitschange
8602 = (TYPE_PRECISION (TREE_TYPE (op))
8603 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8605 /* Truncations are many-one so cannot be removed. */
8606 if (bitschange < 0)
8607 break;
8609 /* See what's inside this conversion. If we decide to strip it,
8610 we will set WIN. */
8612 if (bitschange > 0)
8614 op = TREE_OPERAND (op, 0);
8615 /* An extension: the outermost one can be stripped,
8616 but remember whether it is zero or sign extension. */
8617 if (first)
8618 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8619 /* Otherwise, if a sign extension has been stripped,
8620 only sign extensions can now be stripped;
8621 if a zero extension has been stripped, only zero-extensions. */
8622 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8623 break;
8624 first = 0;
8626 else /* bitschange == 0 */
8628 /* A change in nominal type can always be stripped, but we must
8629 preserve the unsignedness. */
8630 if (first)
8631 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8632 first = 0;
8633 op = TREE_OPERAND (op, 0);
8634 /* Keep trying to narrow, but don't assign op to win if it
8635 would turn an integral type into something else. */
8636 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8637 continue;
8640 win = op;
8643 if (TREE_CODE (op) == COMPONENT_REF
8644 /* Since type_for_size always gives an integer type. */
8645 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8646 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8647 /* Ensure field is laid out already. */
8648 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8649 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8651 unsigned HOST_WIDE_INT innerprec
8652 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8653 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8654 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8655 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8657 /* We can get this structure field in a narrower type that fits it,
8658 but the resulting extension to its nominal type (a fullword type)
8659 must satisfy the same conditions as for other extensions.
8661 Do this only for fields that are aligned (not bit-fields),
8662 because when bit-field insns will be used there is no
8663 advantage in doing this. */
8665 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8666 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8667 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8668 && type != 0)
8670 if (first)
8671 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8672 win = fold_convert (type, op);
8676 *unsignedp_ptr = uns;
8677 return win;
8680 /* Return true if integer constant C has a value that is permissible
8681 for TYPE, an integral type. */
8683 bool
8684 int_fits_type_p (const_tree c, const_tree type)
8686 tree type_low_bound, type_high_bound;
8687 bool ok_for_low_bound, ok_for_high_bound;
8688 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8690 /* Non-standard boolean types can have arbitrary precision but various
8691 transformations assume that they can only take values 0 and +/-1. */
8692 if (TREE_CODE (type) == BOOLEAN_TYPE)
8693 return wi::fits_to_boolean_p (wi::to_wide (c), type);
8695 retry:
8696 type_low_bound = TYPE_MIN_VALUE (type);
8697 type_high_bound = TYPE_MAX_VALUE (type);
8699 /* If at least one bound of the type is a constant integer, we can check
8700 ourselves and maybe make a decision. If no such decision is possible, but
8701 this type is a subtype, try checking against that. Otherwise, use
8702 fits_to_tree_p, which checks against the precision.
8704 Compute the status for each possibly constant bound, and return if we see
8705 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8706 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8707 for "constant known to fit". */
8709 /* Check if c >= type_low_bound. */
8710 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8712 if (tree_int_cst_lt (c, type_low_bound))
8713 return false;
8714 ok_for_low_bound = true;
8716 else
8717 ok_for_low_bound = false;
8719 /* Check if c <= type_high_bound. */
8720 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8722 if (tree_int_cst_lt (type_high_bound, c))
8723 return false;
8724 ok_for_high_bound = true;
8726 else
8727 ok_for_high_bound = false;
8729 /* If the constant fits both bounds, the result is known. */
8730 if (ok_for_low_bound && ok_for_high_bound)
8731 return true;
8733 /* Perform some generic filtering which may allow making a decision
8734 even if the bounds are not constant. First, negative integers
8735 never fit in unsigned types, */
8736 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
8737 return false;
8739 /* Second, narrower types always fit in wider ones. */
8740 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8741 return true;
8743 /* Third, unsigned integers with top bit set never fit signed types. */
8744 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8746 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
8747 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8749 /* When a tree_cst is converted to a wide-int, the precision
8750 is taken from the type. However, if the precision of the
8751 mode underneath the type is smaller than that, it is
8752 possible that the value will not fit. The test below
8753 fails if any bit is set between the sign bit of the
8754 underlying mode and the top bit of the type. */
8755 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
8756 return false;
8758 else if (wi::neg_p (wi::to_wide (c)))
8759 return false;
8762 /* If we haven't been able to decide at this point, there nothing more we
8763 can check ourselves here. Look at the base type if we have one and it
8764 has the same precision. */
8765 if (TREE_CODE (type) == INTEGER_TYPE
8766 && TREE_TYPE (type) != 0
8767 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8769 type = TREE_TYPE (type);
8770 goto retry;
8773 /* Or to fits_to_tree_p, if nothing else. */
8774 return wi::fits_to_tree_p (wi::to_wide (c), type);
8777 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8778 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8779 represented (assuming two's-complement arithmetic) within the bit
8780 precision of the type are returned instead. */
8782 void
8783 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8785 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8786 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8787 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
8788 else
8790 if (TYPE_UNSIGNED (type))
8791 mpz_set_ui (min, 0);
8792 else
8794 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8795 wi::to_mpz (mn, min, SIGNED);
8799 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8800 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8801 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
8802 else
8804 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8805 wi::to_mpz (mn, max, TYPE_SIGN (type));
8809 /* Return true if VAR is an automatic variable defined in function FN. */
8811 bool
8812 auto_var_in_fn_p (const_tree var, const_tree fn)
8814 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8815 && ((((VAR_P (var) && ! DECL_EXTERNAL (var))
8816 || TREE_CODE (var) == PARM_DECL)
8817 && ! TREE_STATIC (var))
8818 || TREE_CODE (var) == LABEL_DECL
8819 || TREE_CODE (var) == RESULT_DECL));
8822 /* Subprogram of following function. Called by walk_tree.
8824 Return *TP if it is an automatic variable or parameter of the
8825 function passed in as DATA. */
8827 static tree
8828 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8830 tree fn = (tree) data;
8832 if (TYPE_P (*tp))
8833 *walk_subtrees = 0;
8835 else if (DECL_P (*tp)
8836 && auto_var_in_fn_p (*tp, fn))
8837 return *tp;
8839 return NULL_TREE;
8842 /* Returns true if T is, contains, or refers to a type with variable
8843 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8844 arguments, but not the return type. If FN is nonzero, only return
8845 true if a modifier of the type or position of FN is a variable or
8846 parameter inside FN.
8848 This concept is more general than that of C99 'variably modified types':
8849 in C99, a struct type is never variably modified because a VLA may not
8850 appear as a structure member. However, in GNU C code like:
8852 struct S { int i[f()]; };
8854 is valid, and other languages may define similar constructs. */
8856 bool
8857 variably_modified_type_p (tree type, tree fn)
8859 tree t;
8861 /* Test if T is either variable (if FN is zero) or an expression containing
8862 a variable in FN. If TYPE isn't gimplified, return true also if
8863 gimplify_one_sizepos would gimplify the expression into a local
8864 variable. */
8865 #define RETURN_TRUE_IF_VAR(T) \
8866 do { tree _t = (T); \
8867 if (_t != NULL_TREE \
8868 && _t != error_mark_node \
8869 && TREE_CODE (_t) != INTEGER_CST \
8870 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8871 && (!fn \
8872 || (!TYPE_SIZES_GIMPLIFIED (type) \
8873 && !is_gimple_sizepos (_t)) \
8874 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8875 return true; } while (0)
8877 if (type == error_mark_node)
8878 return false;
8880 /* If TYPE itself has variable size, it is variably modified. */
8881 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8882 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8884 switch (TREE_CODE (type))
8886 case POINTER_TYPE:
8887 case REFERENCE_TYPE:
8888 case VECTOR_TYPE:
8889 /* Ada can have pointer types refering to themselves indirectly. */
8890 if (TREE_VISITED (type))
8891 return false;
8892 TREE_VISITED (type) = true;
8893 if (variably_modified_type_p (TREE_TYPE (type), fn))
8895 TREE_VISITED (type) = false;
8896 return true;
8898 TREE_VISITED (type) = false;
8899 break;
8901 case FUNCTION_TYPE:
8902 case METHOD_TYPE:
8903 /* If TYPE is a function type, it is variably modified if the
8904 return type is variably modified. */
8905 if (variably_modified_type_p (TREE_TYPE (type), fn))
8906 return true;
8907 break;
8909 case INTEGER_TYPE:
8910 case REAL_TYPE:
8911 case FIXED_POINT_TYPE:
8912 case ENUMERAL_TYPE:
8913 case BOOLEAN_TYPE:
8914 /* Scalar types are variably modified if their end points
8915 aren't constant. */
8916 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8917 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8918 break;
8920 case RECORD_TYPE:
8921 case UNION_TYPE:
8922 case QUAL_UNION_TYPE:
8923 /* We can't see if any of the fields are variably-modified by the
8924 definition we normally use, since that would produce infinite
8925 recursion via pointers. */
8926 /* This is variably modified if some field's type is. */
8927 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8928 if (TREE_CODE (t) == FIELD_DECL)
8930 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8931 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8932 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8934 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8935 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8937 break;
8939 case ARRAY_TYPE:
8940 /* Do not call ourselves to avoid infinite recursion. This is
8941 variably modified if the element type is. */
8942 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8943 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8944 break;
8946 default:
8947 break;
8950 /* The current language may have other cases to check, but in general,
8951 all other types are not variably modified. */
8952 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8954 #undef RETURN_TRUE_IF_VAR
8957 /* Given a DECL or TYPE, return the scope in which it was declared, or
8958 NULL_TREE if there is no containing scope. */
8960 tree
8961 get_containing_scope (const_tree t)
8963 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8966 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
8968 const_tree
8969 get_ultimate_context (const_tree decl)
8971 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
8973 if (TREE_CODE (decl) == BLOCK)
8974 decl = BLOCK_SUPERCONTEXT (decl);
8975 else
8976 decl = get_containing_scope (decl);
8978 return decl;
8981 /* Return the innermost context enclosing DECL that is
8982 a FUNCTION_DECL, or zero if none. */
8984 tree
8985 decl_function_context (const_tree decl)
8987 tree context;
8989 if (TREE_CODE (decl) == ERROR_MARK)
8990 return 0;
8992 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8993 where we look up the function at runtime. Such functions always take
8994 a first argument of type 'pointer to real context'.
8996 C++ should really be fixed to use DECL_CONTEXT for the real context,
8997 and use something else for the "virtual context". */
8998 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8999 context
9000 = TYPE_MAIN_VARIANT
9001 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9002 else
9003 context = DECL_CONTEXT (decl);
9005 while (context && TREE_CODE (context) != FUNCTION_DECL)
9007 if (TREE_CODE (context) == BLOCK)
9008 context = BLOCK_SUPERCONTEXT (context);
9009 else
9010 context = get_containing_scope (context);
9013 return context;
9016 /* Return the innermost context enclosing DECL that is
9017 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9018 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9020 tree
9021 decl_type_context (const_tree decl)
9023 tree context = DECL_CONTEXT (decl);
9025 while (context)
9026 switch (TREE_CODE (context))
9028 case NAMESPACE_DECL:
9029 case TRANSLATION_UNIT_DECL:
9030 return NULL_TREE;
9032 case RECORD_TYPE:
9033 case UNION_TYPE:
9034 case QUAL_UNION_TYPE:
9035 return context;
9037 case TYPE_DECL:
9038 case FUNCTION_DECL:
9039 context = DECL_CONTEXT (context);
9040 break;
9042 case BLOCK:
9043 context = BLOCK_SUPERCONTEXT (context);
9044 break;
9046 default:
9047 gcc_unreachable ();
9050 return NULL_TREE;
9053 /* CALL is a CALL_EXPR. Return the declaration for the function
9054 called, or NULL_TREE if the called function cannot be
9055 determined. */
9057 tree
9058 get_callee_fndecl (const_tree call)
9060 tree addr;
9062 if (call == error_mark_node)
9063 return error_mark_node;
9065 /* It's invalid to call this function with anything but a
9066 CALL_EXPR. */
9067 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9069 /* The first operand to the CALL is the address of the function
9070 called. */
9071 addr = CALL_EXPR_FN (call);
9073 /* If there is no function, return early. */
9074 if (addr == NULL_TREE)
9075 return NULL_TREE;
9077 STRIP_NOPS (addr);
9079 /* If this is a readonly function pointer, extract its initial value. */
9080 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9081 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9082 && DECL_INITIAL (addr))
9083 addr = DECL_INITIAL (addr);
9085 /* If the address is just `&f' for some function `f', then we know
9086 that `f' is being called. */
9087 if (TREE_CODE (addr) == ADDR_EXPR
9088 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9089 return TREE_OPERAND (addr, 0);
9091 /* We couldn't figure out what was being called. */
9092 return NULL_TREE;
9095 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
9096 return the associated function code, otherwise return CFN_LAST. */
9098 combined_fn
9099 get_call_combined_fn (const_tree call)
9101 /* It's invalid to call this function with anything but a CALL_EXPR. */
9102 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9104 if (!CALL_EXPR_FN (call))
9105 return as_combined_fn (CALL_EXPR_IFN (call));
9107 tree fndecl = get_callee_fndecl (call);
9108 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
9109 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
9111 return CFN_LAST;
9114 #define TREE_MEM_USAGE_SPACES 40
9116 /* Print debugging information about tree nodes generated during the compile,
9117 and any language-specific information. */
9119 void
9120 dump_tree_statistics (void)
9122 if (GATHER_STATISTICS)
9124 int i;
9125 int total_nodes, total_bytes;
9126 fprintf (stderr, "\nKind Nodes Bytes\n");
9127 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9128 total_nodes = total_bytes = 0;
9129 for (i = 0; i < (int) all_kinds; i++)
9131 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9132 tree_node_counts[i], tree_node_sizes[i]);
9133 total_nodes += tree_node_counts[i];
9134 total_bytes += tree_node_sizes[i];
9136 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9137 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9138 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9139 fprintf (stderr, "Code Nodes\n");
9140 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9141 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9142 fprintf (stderr, "%-32s %7d\n", get_tree_code_name ((enum tree_code) i),
9143 tree_code_counts[i]);
9144 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9145 fprintf (stderr, "\n");
9146 ssanames_print_statistics ();
9147 fprintf (stderr, "\n");
9148 phinodes_print_statistics ();
9149 fprintf (stderr, "\n");
9151 else
9152 fprintf (stderr, "(No per-node statistics)\n");
9154 print_type_hash_statistics ();
9155 print_debug_expr_statistics ();
9156 print_value_expr_statistics ();
9157 lang_hooks.print_statistics ();
9160 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9162 /* Generate a crc32 of the low BYTES bytes of VALUE. */
9164 unsigned
9165 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
9167 /* This relies on the raw feedback's top 4 bits being zero. */
9168 #define FEEDBACK(X) ((X) * 0x04c11db7)
9169 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
9170 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
9171 static const unsigned syndromes[16] =
9173 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
9174 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
9175 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
9176 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
9178 #undef FEEDBACK
9179 #undef SYNDROME
9181 value <<= (32 - bytes * 8);
9182 for (unsigned ix = bytes * 2; ix--; value <<= 4)
9184 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
9186 chksum = (chksum << 4) ^ feedback;
9189 return chksum;
9192 /* Generate a crc32 of a string. */
9194 unsigned
9195 crc32_string (unsigned chksum, const char *string)
9198 chksum = crc32_byte (chksum, *string);
9199 while (*string++);
9200 return chksum;
9203 /* P is a string that will be used in a symbol. Mask out any characters
9204 that are not valid in that context. */
9206 void
9207 clean_symbol_name (char *p)
9209 for (; *p; p++)
9210 if (! (ISALNUM (*p)
9211 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9212 || *p == '$'
9213 #endif
9214 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9215 || *p == '.'
9216 #endif
9218 *p = '_';
9221 /* For anonymous aggregate types, we need some sort of name to
9222 hold on to. In practice, this should not appear, but it should
9223 not be harmful if it does. */
9224 bool
9225 anon_aggrname_p(const_tree id_node)
9227 #ifndef NO_DOT_IN_LABEL
9228 return (IDENTIFIER_POINTER (id_node)[0] == '.'
9229 && IDENTIFIER_POINTER (id_node)[1] == '_');
9230 #else /* NO_DOT_IN_LABEL */
9231 #ifndef NO_DOLLAR_IN_LABEL
9232 return (IDENTIFIER_POINTER (id_node)[0] == '$' \
9233 && IDENTIFIER_POINTER (id_node)[1] == '_');
9234 #else /* NO_DOLLAR_IN_LABEL */
9235 #define ANON_AGGRNAME_PREFIX "__anon_"
9236 return (!strncmp (IDENTIFIER_POINTER (id_node), ANON_AGGRNAME_PREFIX,
9237 sizeof (ANON_AGGRNAME_PREFIX) - 1));
9238 #endif /* NO_DOLLAR_IN_LABEL */
9239 #endif /* NO_DOT_IN_LABEL */
9242 /* Return a format for an anonymous aggregate name. */
9243 const char *
9244 anon_aggrname_format()
9246 #ifndef NO_DOT_IN_LABEL
9247 return "._%d";
9248 #else /* NO_DOT_IN_LABEL */
9249 #ifndef NO_DOLLAR_IN_LABEL
9250 return "$_%d";
9251 #else /* NO_DOLLAR_IN_LABEL */
9252 return "__anon_%d";
9253 #endif /* NO_DOLLAR_IN_LABEL */
9254 #endif /* NO_DOT_IN_LABEL */
9257 /* Generate a name for a special-purpose function.
9258 The generated name may need to be unique across the whole link.
9259 Changes to this function may also require corresponding changes to
9260 xstrdup_mask_random.
9261 TYPE is some string to identify the purpose of this function to the
9262 linker or collect2; it must start with an uppercase letter,
9263 one of:
9264 I - for constructors
9265 D - for destructors
9266 N - for C++ anonymous namespaces
9267 F - for DWARF unwind frame information. */
9269 tree
9270 get_file_function_name (const char *type)
9272 char *buf;
9273 const char *p;
9274 char *q;
9276 /* If we already have a name we know to be unique, just use that. */
9277 if (first_global_object_name)
9278 p = q = ASTRDUP (first_global_object_name);
9279 /* If the target is handling the constructors/destructors, they
9280 will be local to this file and the name is only necessary for
9281 debugging purposes.
9282 We also assign sub_I and sub_D sufixes to constructors called from
9283 the global static constructors. These are always local. */
9284 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9285 || (strncmp (type, "sub_", 4) == 0
9286 && (type[4] == 'I' || type[4] == 'D')))
9288 const char *file = main_input_filename;
9289 if (! file)
9290 file = LOCATION_FILE (input_location);
9291 /* Just use the file's basename, because the full pathname
9292 might be quite long. */
9293 p = q = ASTRDUP (lbasename (file));
9295 else
9297 /* Otherwise, the name must be unique across the entire link.
9298 We don't have anything that we know to be unique to this translation
9299 unit, so use what we do have and throw in some randomness. */
9300 unsigned len;
9301 const char *name = weak_global_object_name;
9302 const char *file = main_input_filename;
9304 if (! name)
9305 name = "";
9306 if (! file)
9307 file = LOCATION_FILE (input_location);
9309 len = strlen (file);
9310 q = (char *) alloca (9 + 19 + len + 1);
9311 memcpy (q, file, len + 1);
9313 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9314 crc32_string (0, name), get_random_seed (false));
9316 p = q;
9319 clean_symbol_name (q);
9320 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9321 + strlen (type));
9323 /* Set up the name of the file-level functions we may need.
9324 Use a global object (which is already required to be unique over
9325 the program) rather than the file name (which imposes extra
9326 constraints). */
9327 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9329 return get_identifier (buf);
9332 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9334 /* Complain that the tree code of NODE does not match the expected 0
9335 terminated list of trailing codes. The trailing code list can be
9336 empty, for a more vague error message. FILE, LINE, and FUNCTION
9337 are of the caller. */
9339 void
9340 tree_check_failed (const_tree node, const char *file,
9341 int line, const char *function, ...)
9343 va_list args;
9344 const char *buffer;
9345 unsigned length = 0;
9346 enum tree_code code;
9348 va_start (args, function);
9349 while ((code = (enum tree_code) va_arg (args, int)))
9350 length += 4 + strlen (get_tree_code_name (code));
9351 va_end (args);
9352 if (length)
9354 char *tmp;
9355 va_start (args, function);
9356 length += strlen ("expected ");
9357 buffer = tmp = (char *) alloca (length);
9358 length = 0;
9359 while ((code = (enum tree_code) va_arg (args, int)))
9361 const char *prefix = length ? " or " : "expected ";
9363 strcpy (tmp + length, prefix);
9364 length += strlen (prefix);
9365 strcpy (tmp + length, get_tree_code_name (code));
9366 length += strlen (get_tree_code_name (code));
9368 va_end (args);
9370 else
9371 buffer = "unexpected node";
9373 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9374 buffer, get_tree_code_name (TREE_CODE (node)),
9375 function, trim_filename (file), line);
9378 /* Complain that the tree code of NODE does match the expected 0
9379 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9380 the caller. */
9382 void
9383 tree_not_check_failed (const_tree node, const char *file,
9384 int line, const char *function, ...)
9386 va_list args;
9387 char *buffer;
9388 unsigned length = 0;
9389 enum tree_code code;
9391 va_start (args, function);
9392 while ((code = (enum tree_code) va_arg (args, int)))
9393 length += 4 + strlen (get_tree_code_name (code));
9394 va_end (args);
9395 va_start (args, function);
9396 buffer = (char *) alloca (length);
9397 length = 0;
9398 while ((code = (enum tree_code) va_arg (args, int)))
9400 if (length)
9402 strcpy (buffer + length, " or ");
9403 length += 4;
9405 strcpy (buffer + length, get_tree_code_name (code));
9406 length += strlen (get_tree_code_name (code));
9408 va_end (args);
9410 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9411 buffer, get_tree_code_name (TREE_CODE (node)),
9412 function, trim_filename (file), line);
9415 /* Similar to tree_check_failed, except that we check for a class of tree
9416 code, given in CL. */
9418 void
9419 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9420 const char *file, int line, const char *function)
9422 internal_error
9423 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9424 TREE_CODE_CLASS_STRING (cl),
9425 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9426 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9429 /* Similar to tree_check_failed, except that instead of specifying a
9430 dozen codes, use the knowledge that they're all sequential. */
9432 void
9433 tree_range_check_failed (const_tree node, const char *file, int line,
9434 const char *function, enum tree_code c1,
9435 enum tree_code c2)
9437 char *buffer;
9438 unsigned length = 0;
9439 unsigned int c;
9441 for (c = c1; c <= c2; ++c)
9442 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9444 length += strlen ("expected ");
9445 buffer = (char *) alloca (length);
9446 length = 0;
9448 for (c = c1; c <= c2; ++c)
9450 const char *prefix = length ? " or " : "expected ";
9452 strcpy (buffer + length, prefix);
9453 length += strlen (prefix);
9454 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9455 length += strlen (get_tree_code_name ((enum tree_code) c));
9458 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9459 buffer, get_tree_code_name (TREE_CODE (node)),
9460 function, trim_filename (file), line);
9464 /* Similar to tree_check_failed, except that we check that a tree does
9465 not have the specified code, given in CL. */
9467 void
9468 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9469 const char *file, int line, const char *function)
9471 internal_error
9472 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9473 TREE_CODE_CLASS_STRING (cl),
9474 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9475 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9479 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9481 void
9482 omp_clause_check_failed (const_tree node, const char *file, int line,
9483 const char *function, enum omp_clause_code code)
9485 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9486 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9487 function, trim_filename (file), line);
9491 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9493 void
9494 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9495 const char *function, enum omp_clause_code c1,
9496 enum omp_clause_code c2)
9498 char *buffer;
9499 unsigned length = 0;
9500 unsigned int c;
9502 for (c = c1; c <= c2; ++c)
9503 length += 4 + strlen (omp_clause_code_name[c]);
9505 length += strlen ("expected ");
9506 buffer = (char *) alloca (length);
9507 length = 0;
9509 for (c = c1; c <= c2; ++c)
9511 const char *prefix = length ? " or " : "expected ";
9513 strcpy (buffer + length, prefix);
9514 length += strlen (prefix);
9515 strcpy (buffer + length, omp_clause_code_name[c]);
9516 length += strlen (omp_clause_code_name[c]);
9519 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9520 buffer, omp_clause_code_name[TREE_CODE (node)],
9521 function, trim_filename (file), line);
9525 #undef DEFTREESTRUCT
9526 #define DEFTREESTRUCT(VAL, NAME) NAME,
9528 static const char *ts_enum_names[] = {
9529 #include "treestruct.def"
9531 #undef DEFTREESTRUCT
9533 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9535 /* Similar to tree_class_check_failed, except that we check for
9536 whether CODE contains the tree structure identified by EN. */
9538 void
9539 tree_contains_struct_check_failed (const_tree node,
9540 const enum tree_node_structure_enum en,
9541 const char *file, int line,
9542 const char *function)
9544 internal_error
9545 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9546 TS_ENUM_NAME (en),
9547 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9551 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9552 (dynamically sized) vector. */
9554 void
9555 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9556 const char *function)
9558 internal_error
9559 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9560 idx + 1, len, function, trim_filename (file), line);
9563 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9564 (dynamically sized) vector. */
9566 void
9567 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9568 const char *function)
9570 internal_error
9571 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9572 idx + 1, len, function, trim_filename (file), line);
9575 /* Similar to above, except that the check is for the bounds of the operand
9576 vector of an expression node EXP. */
9578 void
9579 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9580 int line, const char *function)
9582 enum tree_code code = TREE_CODE (exp);
9583 internal_error
9584 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9585 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9586 function, trim_filename (file), line);
9589 /* Similar to above, except that the check is for the number of
9590 operands of an OMP_CLAUSE node. */
9592 void
9593 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9594 int line, const char *function)
9596 internal_error
9597 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9598 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9599 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9600 trim_filename (file), line);
9602 #endif /* ENABLE_TREE_CHECKING */
9604 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
9605 and mapped to the machine mode MODE. Initialize its fields and build
9606 the information necessary for debugging output. */
9608 static tree
9609 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
9611 tree t;
9612 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9614 t = make_node (VECTOR_TYPE);
9615 TREE_TYPE (t) = mv_innertype;
9616 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9617 SET_TYPE_MODE (t, mode);
9619 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9620 SET_TYPE_STRUCTURAL_EQUALITY (t);
9621 else if ((TYPE_CANONICAL (mv_innertype) != innertype
9622 || mode != VOIDmode)
9623 && !VECTOR_BOOLEAN_TYPE_P (t))
9624 TYPE_CANONICAL (t)
9625 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
9627 layout_type (t);
9629 hashval_t hash = type_hash_canon_hash (t);
9630 t = type_hash_canon (hash, t);
9632 /* We have built a main variant, based on the main variant of the
9633 inner type. Use it to build the variant we return. */
9634 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9635 && TREE_TYPE (t) != innertype)
9636 return build_type_attribute_qual_variant (t,
9637 TYPE_ATTRIBUTES (innertype),
9638 TYPE_QUALS (innertype));
9640 return t;
9643 static tree
9644 make_or_reuse_type (unsigned size, int unsignedp)
9646 int i;
9648 if (size == INT_TYPE_SIZE)
9649 return unsignedp ? unsigned_type_node : integer_type_node;
9650 if (size == CHAR_TYPE_SIZE)
9651 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9652 if (size == SHORT_TYPE_SIZE)
9653 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9654 if (size == LONG_TYPE_SIZE)
9655 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9656 if (size == LONG_LONG_TYPE_SIZE)
9657 return (unsignedp ? long_long_unsigned_type_node
9658 : long_long_integer_type_node);
9660 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9661 if (size == int_n_data[i].bitsize
9662 && int_n_enabled_p[i])
9663 return (unsignedp ? int_n_trees[i].unsigned_type
9664 : int_n_trees[i].signed_type);
9666 if (unsignedp)
9667 return make_unsigned_type (size);
9668 else
9669 return make_signed_type (size);
9672 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9674 static tree
9675 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9677 if (satp)
9679 if (size == SHORT_FRACT_TYPE_SIZE)
9680 return unsignedp ? sat_unsigned_short_fract_type_node
9681 : sat_short_fract_type_node;
9682 if (size == FRACT_TYPE_SIZE)
9683 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9684 if (size == LONG_FRACT_TYPE_SIZE)
9685 return unsignedp ? sat_unsigned_long_fract_type_node
9686 : sat_long_fract_type_node;
9687 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9688 return unsignedp ? sat_unsigned_long_long_fract_type_node
9689 : sat_long_long_fract_type_node;
9691 else
9693 if (size == SHORT_FRACT_TYPE_SIZE)
9694 return unsignedp ? unsigned_short_fract_type_node
9695 : short_fract_type_node;
9696 if (size == FRACT_TYPE_SIZE)
9697 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9698 if (size == LONG_FRACT_TYPE_SIZE)
9699 return unsignedp ? unsigned_long_fract_type_node
9700 : long_fract_type_node;
9701 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9702 return unsignedp ? unsigned_long_long_fract_type_node
9703 : long_long_fract_type_node;
9706 return make_fract_type (size, unsignedp, satp);
9709 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9711 static tree
9712 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9714 if (satp)
9716 if (size == SHORT_ACCUM_TYPE_SIZE)
9717 return unsignedp ? sat_unsigned_short_accum_type_node
9718 : sat_short_accum_type_node;
9719 if (size == ACCUM_TYPE_SIZE)
9720 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9721 if (size == LONG_ACCUM_TYPE_SIZE)
9722 return unsignedp ? sat_unsigned_long_accum_type_node
9723 : sat_long_accum_type_node;
9724 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9725 return unsignedp ? sat_unsigned_long_long_accum_type_node
9726 : sat_long_long_accum_type_node;
9728 else
9730 if (size == SHORT_ACCUM_TYPE_SIZE)
9731 return unsignedp ? unsigned_short_accum_type_node
9732 : short_accum_type_node;
9733 if (size == ACCUM_TYPE_SIZE)
9734 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9735 if (size == LONG_ACCUM_TYPE_SIZE)
9736 return unsignedp ? unsigned_long_accum_type_node
9737 : long_accum_type_node;
9738 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9739 return unsignedp ? unsigned_long_long_accum_type_node
9740 : long_long_accum_type_node;
9743 return make_accum_type (size, unsignedp, satp);
9747 /* Create an atomic variant node for TYPE. This routine is called
9748 during initialization of data types to create the 5 basic atomic
9749 types. The generic build_variant_type function requires these to
9750 already be set up in order to function properly, so cannot be
9751 called from there. If ALIGN is non-zero, then ensure alignment is
9752 overridden to this value. */
9754 static tree
9755 build_atomic_base (tree type, unsigned int align)
9757 tree t;
9759 /* Make sure its not already registered. */
9760 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9761 return t;
9763 t = build_variant_type_copy (type);
9764 set_type_quals (t, TYPE_QUAL_ATOMIC);
9766 if (align)
9767 SET_TYPE_ALIGN (t, align);
9769 return t;
9772 /* Information about the _FloatN and _FloatNx types. This must be in
9773 the same order as the corresponding TI_* enum values. */
9774 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
9776 { 16, false },
9777 { 32, false },
9778 { 64, false },
9779 { 128, false },
9780 { 32, true },
9781 { 64, true },
9782 { 128, true },
9786 /* Create nodes for all integer types (and error_mark_node) using the sizes
9787 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9789 void
9790 build_common_tree_nodes (bool signed_char)
9792 int i;
9794 error_mark_node = make_node (ERROR_MARK);
9795 TREE_TYPE (error_mark_node) = error_mark_node;
9797 initialize_sizetypes ();
9799 /* Define both `signed char' and `unsigned char'. */
9800 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9801 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9802 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9803 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9805 /* Define `char', which is like either `signed char' or `unsigned char'
9806 but not the same as either. */
9807 char_type_node
9808 = (signed_char
9809 ? make_signed_type (CHAR_TYPE_SIZE)
9810 : make_unsigned_type (CHAR_TYPE_SIZE));
9811 TYPE_STRING_FLAG (char_type_node) = 1;
9813 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9814 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9815 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9816 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9817 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9818 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9819 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9820 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9822 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9824 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9825 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9826 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
9827 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
9829 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
9830 && int_n_enabled_p[i])
9832 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9833 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9837 /* Define a boolean type. This type only represents boolean values but
9838 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9839 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9840 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9841 TYPE_PRECISION (boolean_type_node) = 1;
9842 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9844 /* Define what type to use for size_t. */
9845 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9846 size_type_node = unsigned_type_node;
9847 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9848 size_type_node = long_unsigned_type_node;
9849 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9850 size_type_node = long_long_unsigned_type_node;
9851 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9852 size_type_node = short_unsigned_type_node;
9853 else
9855 int i;
9857 size_type_node = NULL_TREE;
9858 for (i = 0; i < NUM_INT_N_ENTS; i++)
9859 if (int_n_enabled_p[i])
9861 char name[50];
9862 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9864 if (strcmp (name, SIZE_TYPE) == 0)
9866 size_type_node = int_n_trees[i].unsigned_type;
9869 if (size_type_node == NULL_TREE)
9870 gcc_unreachable ();
9873 /* Define what type to use for ptrdiff_t. */
9874 if (strcmp (PTRDIFF_TYPE, "int") == 0)
9875 ptrdiff_type_node = integer_type_node;
9876 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
9877 ptrdiff_type_node = long_integer_type_node;
9878 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
9879 ptrdiff_type_node = long_long_integer_type_node;
9880 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
9881 ptrdiff_type_node = short_integer_type_node;
9882 else
9884 ptrdiff_type_node = NULL_TREE;
9885 for (int i = 0; i < NUM_INT_N_ENTS; i++)
9886 if (int_n_enabled_p[i])
9888 char name[50];
9889 sprintf (name, "__int%d", int_n_data[i].bitsize);
9890 if (strcmp (name, PTRDIFF_TYPE) == 0)
9891 ptrdiff_type_node = int_n_trees[i].signed_type;
9893 if (ptrdiff_type_node == NULL_TREE)
9894 gcc_unreachable ();
9897 /* Fill in the rest of the sized types. Reuse existing type nodes
9898 when possible. */
9899 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9900 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9901 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9902 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9903 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9905 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9906 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9907 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9908 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9909 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9911 /* Don't call build_qualified type for atomics. That routine does
9912 special processing for atomics, and until they are initialized
9913 it's better not to make that call.
9915 Check to see if there is a target override for atomic types. */
9917 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9918 targetm.atomic_align_for_mode (QImode));
9919 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9920 targetm.atomic_align_for_mode (HImode));
9921 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9922 targetm.atomic_align_for_mode (SImode));
9923 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9924 targetm.atomic_align_for_mode (DImode));
9925 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9926 targetm.atomic_align_for_mode (TImode));
9928 access_public_node = get_identifier ("public");
9929 access_protected_node = get_identifier ("protected");
9930 access_private_node = get_identifier ("private");
9932 /* Define these next since types below may used them. */
9933 integer_zero_node = build_int_cst (integer_type_node, 0);
9934 integer_one_node = build_int_cst (integer_type_node, 1);
9935 integer_three_node = build_int_cst (integer_type_node, 3);
9936 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9938 size_zero_node = size_int (0);
9939 size_one_node = size_int (1);
9940 bitsize_zero_node = bitsize_int (0);
9941 bitsize_one_node = bitsize_int (1);
9942 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9944 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9945 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9947 void_type_node = make_node (VOID_TYPE);
9948 layout_type (void_type_node);
9950 pointer_bounds_type_node = targetm.chkp_bound_type ();
9952 /* We are not going to have real types in C with less than byte alignment,
9953 so we might as well not have any types that claim to have it. */
9954 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
9955 TYPE_USER_ALIGN (void_type_node) = 0;
9957 void_node = make_node (VOID_CST);
9958 TREE_TYPE (void_node) = void_type_node;
9960 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9961 layout_type (TREE_TYPE (null_pointer_node));
9963 ptr_type_node = build_pointer_type (void_type_node);
9964 const_ptr_type_node
9965 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9966 for (unsigned i = 0;
9967 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
9968 ++i)
9969 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
9971 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9973 float_type_node = make_node (REAL_TYPE);
9974 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9975 layout_type (float_type_node);
9977 double_type_node = make_node (REAL_TYPE);
9978 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9979 layout_type (double_type_node);
9981 long_double_type_node = make_node (REAL_TYPE);
9982 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9983 layout_type (long_double_type_node);
9985 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9987 int n = floatn_nx_types[i].n;
9988 bool extended = floatn_nx_types[i].extended;
9989 scalar_float_mode mode;
9990 if (!targetm.floatn_mode (n, extended).exists (&mode))
9991 continue;
9992 int precision = GET_MODE_PRECISION (mode);
9993 /* Work around the rs6000 KFmode having precision 113 not
9994 128. */
9995 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
9996 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
9997 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
9998 if (!extended)
9999 gcc_assert (min_precision == n);
10000 if (precision < min_precision)
10001 precision = min_precision;
10002 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
10003 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
10004 layout_type (FLOATN_NX_TYPE_NODE (i));
10005 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
10008 float_ptr_type_node = build_pointer_type (float_type_node);
10009 double_ptr_type_node = build_pointer_type (double_type_node);
10010 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
10011 integer_ptr_type_node = build_pointer_type (integer_type_node);
10013 /* Fixed size integer types. */
10014 uint16_type_node = make_or_reuse_type (16, 1);
10015 uint32_type_node = make_or_reuse_type (32, 1);
10016 uint64_type_node = make_or_reuse_type (64, 1);
10018 /* Decimal float types. */
10019 dfloat32_type_node = make_node (REAL_TYPE);
10020 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
10021 SET_TYPE_MODE (dfloat32_type_node, SDmode);
10022 layout_type (dfloat32_type_node);
10023 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
10025 dfloat64_type_node = make_node (REAL_TYPE);
10026 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
10027 SET_TYPE_MODE (dfloat64_type_node, DDmode);
10028 layout_type (dfloat64_type_node);
10029 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
10031 dfloat128_type_node = make_node (REAL_TYPE);
10032 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
10033 SET_TYPE_MODE (dfloat128_type_node, TDmode);
10034 layout_type (dfloat128_type_node);
10035 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
10037 complex_integer_type_node = build_complex_type (integer_type_node, true);
10038 complex_float_type_node = build_complex_type (float_type_node, true);
10039 complex_double_type_node = build_complex_type (double_type_node, true);
10040 complex_long_double_type_node = build_complex_type (long_double_type_node,
10041 true);
10043 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
10045 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
10046 COMPLEX_FLOATN_NX_TYPE_NODE (i)
10047 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
10050 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
10051 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10052 sat_ ## KIND ## _type_node = \
10053 make_sat_signed_ ## KIND ## _type (SIZE); \
10054 sat_unsigned_ ## KIND ## _type_node = \
10055 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10056 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10057 unsigned_ ## KIND ## _type_node = \
10058 make_unsigned_ ## KIND ## _type (SIZE);
10060 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10061 sat_ ## WIDTH ## KIND ## _type_node = \
10062 make_sat_signed_ ## KIND ## _type (SIZE); \
10063 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10064 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10065 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10066 unsigned_ ## WIDTH ## KIND ## _type_node = \
10067 make_unsigned_ ## KIND ## _type (SIZE);
10069 /* Make fixed-point type nodes based on four different widths. */
10070 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10071 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10072 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10073 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10074 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10076 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
10077 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10078 NAME ## _type_node = \
10079 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10080 u ## NAME ## _type_node = \
10081 make_or_reuse_unsigned_ ## KIND ## _type \
10082 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10083 sat_ ## NAME ## _type_node = \
10084 make_or_reuse_sat_signed_ ## KIND ## _type \
10085 (GET_MODE_BITSIZE (MODE ## mode)); \
10086 sat_u ## NAME ## _type_node = \
10087 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10088 (GET_MODE_BITSIZE (U ## MODE ## mode));
10090 /* Fixed-point type and mode nodes. */
10091 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10092 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10093 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10094 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10095 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10096 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10097 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10098 MAKE_FIXED_MODE_NODE (accum, ha, HA)
10099 MAKE_FIXED_MODE_NODE (accum, sa, SA)
10100 MAKE_FIXED_MODE_NODE (accum, da, DA)
10101 MAKE_FIXED_MODE_NODE (accum, ta, TA)
10104 tree t = targetm.build_builtin_va_list ();
10106 /* Many back-ends define record types without setting TYPE_NAME.
10107 If we copied the record type here, we'd keep the original
10108 record type without a name. This breaks name mangling. So,
10109 don't copy record types and let c_common_nodes_and_builtins()
10110 declare the type to be __builtin_va_list. */
10111 if (TREE_CODE (t) != RECORD_TYPE)
10112 t = build_variant_type_copy (t);
10114 va_list_type_node = t;
10118 /* Modify DECL for given flags.
10119 TM_PURE attribute is set only on types, so the function will modify
10120 DECL's type when ECF_TM_PURE is used. */
10122 void
10123 set_call_expr_flags (tree decl, int flags)
10125 if (flags & ECF_NOTHROW)
10126 TREE_NOTHROW (decl) = 1;
10127 if (flags & ECF_CONST)
10128 TREE_READONLY (decl) = 1;
10129 if (flags & ECF_PURE)
10130 DECL_PURE_P (decl) = 1;
10131 if (flags & ECF_LOOPING_CONST_OR_PURE)
10132 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10133 if (flags & ECF_NOVOPS)
10134 DECL_IS_NOVOPS (decl) = 1;
10135 if (flags & ECF_NORETURN)
10136 TREE_THIS_VOLATILE (decl) = 1;
10137 if (flags & ECF_MALLOC)
10138 DECL_IS_MALLOC (decl) = 1;
10139 if (flags & ECF_RETURNS_TWICE)
10140 DECL_IS_RETURNS_TWICE (decl) = 1;
10141 if (flags & ECF_LEAF)
10142 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10143 NULL, DECL_ATTRIBUTES (decl));
10144 if (flags & ECF_COLD)
10145 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
10146 NULL, DECL_ATTRIBUTES (decl));
10147 if (flags & ECF_RET1)
10148 DECL_ATTRIBUTES (decl)
10149 = tree_cons (get_identifier ("fn spec"),
10150 build_tree_list (NULL_TREE, build_string (1, "1")),
10151 DECL_ATTRIBUTES (decl));
10152 if ((flags & ECF_TM_PURE) && flag_tm)
10153 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10154 /* Looping const or pure is implied by noreturn.
10155 There is currently no way to declare looping const or looping pure alone. */
10156 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10157 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10161 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10163 static void
10164 local_define_builtin (const char *name, tree type, enum built_in_function code,
10165 const char *library_name, int ecf_flags)
10167 tree decl;
10169 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10170 library_name, NULL_TREE);
10171 set_call_expr_flags (decl, ecf_flags);
10173 set_builtin_decl (code, decl, true);
10176 /* Call this function after instantiating all builtins that the language
10177 front end cares about. This will build the rest of the builtins
10178 and internal functions that are relied upon by the tree optimizers and
10179 the middle-end. */
10181 void
10182 build_common_builtin_nodes (void)
10184 tree tmp, ftype;
10185 int ecf_flags;
10187 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
10188 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
10190 ftype = build_function_type (void_type_node, void_list_node);
10191 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10192 local_define_builtin ("__builtin_unreachable", ftype,
10193 BUILT_IN_UNREACHABLE,
10194 "__builtin_unreachable",
10195 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10196 | ECF_CONST | ECF_COLD);
10197 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
10198 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
10199 "abort",
10200 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
10203 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10204 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10206 ftype = build_function_type_list (ptr_type_node,
10207 ptr_type_node, const_ptr_type_node,
10208 size_type_node, NULL_TREE);
10210 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10211 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10212 "memcpy", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10213 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10214 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10215 "memmove", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10218 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10220 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10221 const_ptr_type_node, size_type_node,
10222 NULL_TREE);
10223 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10224 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10227 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10229 ftype = build_function_type_list (ptr_type_node,
10230 ptr_type_node, integer_type_node,
10231 size_type_node, NULL_TREE);
10232 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10233 "memset", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10236 /* If we're checking the stack, `alloca' can throw. */
10237 const int alloca_flags
10238 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
10240 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10242 ftype = build_function_type_list (ptr_type_node,
10243 size_type_node, NULL_TREE);
10244 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10245 "alloca", alloca_flags);
10248 ftype = build_function_type_list (ptr_type_node, size_type_node,
10249 size_type_node, NULL_TREE);
10250 local_define_builtin ("__builtin_alloca_with_align", ftype,
10251 BUILT_IN_ALLOCA_WITH_ALIGN,
10252 "__builtin_alloca_with_align",
10253 alloca_flags);
10255 ftype = build_function_type_list (ptr_type_node, size_type_node,
10256 size_type_node, size_type_node, NULL_TREE);
10257 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
10258 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
10259 "__builtin_alloca_with_align_and_max",
10260 alloca_flags);
10262 ftype = build_function_type_list (void_type_node,
10263 ptr_type_node, ptr_type_node,
10264 ptr_type_node, NULL_TREE);
10265 local_define_builtin ("__builtin_init_trampoline", ftype,
10266 BUILT_IN_INIT_TRAMPOLINE,
10267 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10268 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10269 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10270 "__builtin_init_heap_trampoline",
10271 ECF_NOTHROW | ECF_LEAF);
10272 local_define_builtin ("__builtin_init_descriptor", ftype,
10273 BUILT_IN_INIT_DESCRIPTOR,
10274 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
10276 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10277 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10278 BUILT_IN_ADJUST_TRAMPOLINE,
10279 "__builtin_adjust_trampoline",
10280 ECF_CONST | ECF_NOTHROW);
10281 local_define_builtin ("__builtin_adjust_descriptor", ftype,
10282 BUILT_IN_ADJUST_DESCRIPTOR,
10283 "__builtin_adjust_descriptor",
10284 ECF_CONST | ECF_NOTHROW);
10286 ftype = build_function_type_list (void_type_node,
10287 ptr_type_node, ptr_type_node, NULL_TREE);
10288 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10289 BUILT_IN_NONLOCAL_GOTO,
10290 "__builtin_nonlocal_goto",
10291 ECF_NORETURN | ECF_NOTHROW);
10293 ftype = build_function_type_list (void_type_node,
10294 ptr_type_node, ptr_type_node, NULL_TREE);
10295 local_define_builtin ("__builtin_setjmp_setup", ftype,
10296 BUILT_IN_SETJMP_SETUP,
10297 "__builtin_setjmp_setup", ECF_NOTHROW);
10299 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10300 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10301 BUILT_IN_SETJMP_RECEIVER,
10302 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10304 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10305 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10306 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10308 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10309 local_define_builtin ("__builtin_stack_restore", ftype,
10310 BUILT_IN_STACK_RESTORE,
10311 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10313 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10314 const_ptr_type_node, size_type_node,
10315 NULL_TREE);
10316 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
10317 "__builtin_memcmp_eq",
10318 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10320 /* If there's a possibility that we might use the ARM EABI, build the
10321 alternate __cxa_end_cleanup node used to resume from C++. */
10322 if (targetm.arm_eabi_unwinder)
10324 ftype = build_function_type_list (void_type_node, NULL_TREE);
10325 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10326 BUILT_IN_CXA_END_CLEANUP,
10327 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10330 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10331 local_define_builtin ("__builtin_unwind_resume", ftype,
10332 BUILT_IN_UNWIND_RESUME,
10333 ((targetm_common.except_unwind_info (&global_options)
10334 == UI_SJLJ)
10335 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10336 ECF_NORETURN);
10338 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10340 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10341 NULL_TREE);
10342 local_define_builtin ("__builtin_return_address", ftype,
10343 BUILT_IN_RETURN_ADDRESS,
10344 "__builtin_return_address",
10345 ECF_NOTHROW);
10348 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10349 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10351 ftype = build_function_type_list (void_type_node, ptr_type_node,
10352 ptr_type_node, NULL_TREE);
10353 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10354 local_define_builtin ("__cyg_profile_func_enter", ftype,
10355 BUILT_IN_PROFILE_FUNC_ENTER,
10356 "__cyg_profile_func_enter", 0);
10357 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10358 local_define_builtin ("__cyg_profile_func_exit", ftype,
10359 BUILT_IN_PROFILE_FUNC_EXIT,
10360 "__cyg_profile_func_exit", 0);
10363 /* The exception object and filter values from the runtime. The argument
10364 must be zero before exception lowering, i.e. from the front end. After
10365 exception lowering, it will be the region number for the exception
10366 landing pad. These functions are PURE instead of CONST to prevent
10367 them from being hoisted past the exception edge that will initialize
10368 its value in the landing pad. */
10369 ftype = build_function_type_list (ptr_type_node,
10370 integer_type_node, NULL_TREE);
10371 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10372 /* Only use TM_PURE if we have TM language support. */
10373 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10374 ecf_flags |= ECF_TM_PURE;
10375 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10376 "__builtin_eh_pointer", ecf_flags);
10378 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10379 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10380 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10381 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10383 ftype = build_function_type_list (void_type_node,
10384 integer_type_node, integer_type_node,
10385 NULL_TREE);
10386 local_define_builtin ("__builtin_eh_copy_values", ftype,
10387 BUILT_IN_EH_COPY_VALUES,
10388 "__builtin_eh_copy_values", ECF_NOTHROW);
10390 /* Complex multiplication and division. These are handled as builtins
10391 rather than optabs because emit_library_call_value doesn't support
10392 complex. Further, we can do slightly better with folding these
10393 beasties if the real and complex parts of the arguments are separate. */
10395 int mode;
10397 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10399 char mode_name_buf[4], *q;
10400 const char *p;
10401 enum built_in_function mcode, dcode;
10402 tree type, inner_type;
10403 const char *prefix = "__";
10405 if (targetm.libfunc_gnu_prefix)
10406 prefix = "__gnu_";
10408 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10409 if (type == NULL)
10410 continue;
10411 inner_type = TREE_TYPE (type);
10413 ftype = build_function_type_list (type, inner_type, inner_type,
10414 inner_type, inner_type, NULL_TREE);
10416 mcode = ((enum built_in_function)
10417 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10418 dcode = ((enum built_in_function)
10419 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10421 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10422 *q = TOLOWER (*p);
10423 *q = '\0';
10425 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10426 NULL);
10427 local_define_builtin (built_in_names[mcode], ftype, mcode,
10428 built_in_names[mcode],
10429 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10431 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10432 NULL);
10433 local_define_builtin (built_in_names[dcode], ftype, dcode,
10434 built_in_names[dcode],
10435 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10439 init_internal_fns ();
10442 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10443 better way.
10445 If we requested a pointer to a vector, build up the pointers that
10446 we stripped off while looking for the inner type. Similarly for
10447 return values from functions.
10449 The argument TYPE is the top of the chain, and BOTTOM is the
10450 new type which we will point to. */
10452 tree
10453 reconstruct_complex_type (tree type, tree bottom)
10455 tree inner, outer;
10457 if (TREE_CODE (type) == POINTER_TYPE)
10459 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10460 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10461 TYPE_REF_CAN_ALIAS_ALL (type));
10463 else if (TREE_CODE (type) == REFERENCE_TYPE)
10465 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10466 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10467 TYPE_REF_CAN_ALIAS_ALL (type));
10469 else if (TREE_CODE (type) == ARRAY_TYPE)
10471 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10472 outer = build_array_type (inner, TYPE_DOMAIN (type));
10474 else if (TREE_CODE (type) == FUNCTION_TYPE)
10476 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10477 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10479 else if (TREE_CODE (type) == METHOD_TYPE)
10481 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10482 /* The build_method_type_directly() routine prepends 'this' to argument list,
10483 so we must compensate by getting rid of it. */
10484 outer
10485 = build_method_type_directly
10486 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10487 inner,
10488 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10490 else if (TREE_CODE (type) == OFFSET_TYPE)
10492 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10493 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10495 else
10496 return bottom;
10498 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10499 TYPE_QUALS (type));
10502 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10503 the inner type. */
10504 tree
10505 build_vector_type_for_mode (tree innertype, machine_mode mode)
10507 poly_int64 nunits;
10508 unsigned int bitsize;
10510 switch (GET_MODE_CLASS (mode))
10512 case MODE_VECTOR_BOOL:
10513 case MODE_VECTOR_INT:
10514 case MODE_VECTOR_FLOAT:
10515 case MODE_VECTOR_FRACT:
10516 case MODE_VECTOR_UFRACT:
10517 case MODE_VECTOR_ACCUM:
10518 case MODE_VECTOR_UACCUM:
10519 nunits = GET_MODE_NUNITS (mode);
10520 break;
10522 case MODE_INT:
10523 /* Check that there are no leftover bits. */
10524 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
10525 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10526 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10527 break;
10529 default:
10530 gcc_unreachable ();
10533 return make_vector_type (innertype, nunits, mode);
10536 /* Similarly, but takes the inner type and number of units, which must be
10537 a power of two. */
10539 tree
10540 build_vector_type (tree innertype, poly_int64 nunits)
10542 return make_vector_type (innertype, nunits, VOIDmode);
10545 /* Build truth vector with specified length and number of units. */
10547 tree
10548 build_truth_vector_type (poly_uint64 nunits, poly_uint64 vector_size)
10550 machine_mode mask_mode
10551 = targetm.vectorize.get_mask_mode (nunits, vector_size).else_blk ();
10553 poly_uint64 vsize;
10554 if (mask_mode == BLKmode)
10555 vsize = vector_size * BITS_PER_UNIT;
10556 else
10557 vsize = GET_MODE_BITSIZE (mask_mode);
10559 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
10561 tree bool_type = build_nonstandard_boolean_type (esize);
10563 return make_vector_type (bool_type, nunits, mask_mode);
10566 /* Returns a vector type corresponding to a comparison of VECTYPE. */
10568 tree
10569 build_same_sized_truth_vector_type (tree vectype)
10571 if (VECTOR_BOOLEAN_TYPE_P (vectype))
10572 return vectype;
10574 poly_uint64 size = GET_MODE_SIZE (TYPE_MODE (vectype));
10576 if (known_eq (size, 0U))
10577 size = tree_to_uhwi (TYPE_SIZE_UNIT (vectype));
10579 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (vectype), size);
10582 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10584 tree
10585 build_opaque_vector_type (tree innertype, poly_int64 nunits)
10587 tree t = make_vector_type (innertype, nunits, VOIDmode);
10588 tree cand;
10589 /* We always build the non-opaque variant before the opaque one,
10590 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10591 cand = TYPE_NEXT_VARIANT (t);
10592 if (cand
10593 && TYPE_VECTOR_OPAQUE (cand)
10594 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10595 return cand;
10596 /* Othewise build a variant type and make sure to queue it after
10597 the non-opaque type. */
10598 cand = build_distinct_type_copy (t);
10599 TYPE_VECTOR_OPAQUE (cand) = true;
10600 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10601 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10602 TYPE_NEXT_VARIANT (t) = cand;
10603 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10604 return cand;
10607 /* Return the value of element I of VECTOR_CST T as a wide_int. */
10609 wide_int
10610 vector_cst_int_elt (const_tree t, unsigned int i)
10612 /* First handle elements that are directly encoded. */
10613 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10614 if (i < encoded_nelts)
10615 return wi::to_wide (VECTOR_CST_ENCODED_ELT (t, i));
10617 /* Identify the pattern that contains element I and work out the index of
10618 the last encoded element for that pattern. */
10619 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10620 unsigned int pattern = i % npatterns;
10621 unsigned int count = i / npatterns;
10622 unsigned int final_i = encoded_nelts - npatterns + pattern;
10624 /* If there are no steps, the final encoded value is the right one. */
10625 if (!VECTOR_CST_STEPPED_P (t))
10626 return wi::to_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
10628 /* Otherwise work out the value from the last two encoded elements. */
10629 tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
10630 tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
10631 wide_int diff = wi::to_wide (v2) - wi::to_wide (v1);
10632 return wi::to_wide (v2) + (count - 2) * diff;
10635 /* Return the value of element I of VECTOR_CST T. */
10637 tree
10638 vector_cst_elt (const_tree t, unsigned int i)
10640 /* First handle elements that are directly encoded. */
10641 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10642 if (i < encoded_nelts)
10643 return VECTOR_CST_ENCODED_ELT (t, i);
10645 /* If there are no steps, the final encoded value is the right one. */
10646 if (!VECTOR_CST_STEPPED_P (t))
10648 /* Identify the pattern that contains element I and work out the index of
10649 the last encoded element for that pattern. */
10650 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10651 unsigned int pattern = i % npatterns;
10652 unsigned int final_i = encoded_nelts - npatterns + pattern;
10653 return VECTOR_CST_ENCODED_ELT (t, final_i);
10656 /* Otherwise work out the value from the last two encoded elements. */
10657 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
10658 vector_cst_int_elt (t, i));
10661 /* Given an initializer INIT, return TRUE if INIT is zero or some
10662 aggregate of zeros. Otherwise return FALSE. */
10663 bool
10664 initializer_zerop (const_tree init)
10666 tree elt;
10668 STRIP_NOPS (init);
10670 switch (TREE_CODE (init))
10672 case INTEGER_CST:
10673 return integer_zerop (init);
10675 case REAL_CST:
10676 /* ??? Note that this is not correct for C4X float formats. There,
10677 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10678 negative exponent. */
10679 return real_zerop (init)
10680 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10682 case FIXED_CST:
10683 return fixed_zerop (init);
10685 case COMPLEX_CST:
10686 return integer_zerop (init)
10687 || (real_zerop (init)
10688 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10689 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10691 case VECTOR_CST:
10692 return (VECTOR_CST_NPATTERNS (init) == 1
10693 && VECTOR_CST_DUPLICATE_P (init)
10694 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)));
10696 case CONSTRUCTOR:
10698 unsigned HOST_WIDE_INT idx;
10700 if (TREE_CLOBBER_P (init))
10701 return false;
10702 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10703 if (!initializer_zerop (elt))
10704 return false;
10705 return true;
10708 case STRING_CST:
10710 int i;
10712 /* We need to loop through all elements to handle cases like
10713 "\0" and "\0foobar". */
10714 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10715 if (TREE_STRING_POINTER (init)[i] != '\0')
10716 return false;
10718 return true;
10721 default:
10722 return false;
10726 /* Check if vector VEC consists of all the equal elements and
10727 that the number of elements corresponds to the type of VEC.
10728 The function returns first element of the vector
10729 or NULL_TREE if the vector is not uniform. */
10730 tree
10731 uniform_vector_p (const_tree vec)
10733 tree first, t;
10734 unsigned HOST_WIDE_INT i, nelts;
10736 if (vec == NULL_TREE)
10737 return NULL_TREE;
10739 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10741 if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
10742 return TREE_OPERAND (vec, 0);
10744 else if (TREE_CODE (vec) == VECTOR_CST)
10746 if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
10747 return VECTOR_CST_ENCODED_ELT (vec, 0);
10748 return NULL_TREE;
10751 else if (TREE_CODE (vec) == CONSTRUCTOR
10752 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
10754 first = error_mark_node;
10756 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10758 if (i == 0)
10760 first = t;
10761 continue;
10763 if (!operand_equal_p (first, t, 0))
10764 return NULL_TREE;
10766 if (i != nelts)
10767 return NULL_TREE;
10769 return first;
10772 return NULL_TREE;
10775 /* Build an empty statement at location LOC. */
10777 tree
10778 build_empty_stmt (location_t loc)
10780 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10781 SET_EXPR_LOCATION (t, loc);
10782 return t;
10786 /* Build an OpenMP clause with code CODE. LOC is the location of the
10787 clause. */
10789 tree
10790 build_omp_clause (location_t loc, enum omp_clause_code code)
10792 tree t;
10793 int size, length;
10795 length = omp_clause_num_ops[code];
10796 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10798 record_node_allocation_statistics (OMP_CLAUSE, size);
10800 t = (tree) ggc_internal_alloc (size);
10801 memset (t, 0, size);
10802 TREE_SET_CODE (t, OMP_CLAUSE);
10803 OMP_CLAUSE_SET_CODE (t, code);
10804 OMP_CLAUSE_LOCATION (t) = loc;
10806 return t;
10809 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10810 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10811 Except for the CODE and operand count field, other storage for the
10812 object is initialized to zeros. */
10814 tree
10815 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
10817 tree t;
10818 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10820 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10821 gcc_assert (len >= 1);
10823 record_node_allocation_statistics (code, length);
10825 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10827 TREE_SET_CODE (t, code);
10829 /* Can't use TREE_OPERAND to store the length because if checking is
10830 enabled, it will try to check the length before we store it. :-P */
10831 t->exp.operands[0] = build_int_cst (sizetype, len);
10833 return t;
10836 /* Helper function for build_call_* functions; build a CALL_EXPR with
10837 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10838 the argument slots. */
10840 static tree
10841 build_call_1 (tree return_type, tree fn, int nargs)
10843 tree t;
10845 t = build_vl_exp (CALL_EXPR, nargs + 3);
10846 TREE_TYPE (t) = return_type;
10847 CALL_EXPR_FN (t) = fn;
10848 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10850 return t;
10853 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10854 FN and a null static chain slot. NARGS is the number of call arguments
10855 which are specified as "..." arguments. */
10857 tree
10858 build_call_nary (tree return_type, tree fn, int nargs, ...)
10860 tree ret;
10861 va_list args;
10862 va_start (args, nargs);
10863 ret = build_call_valist (return_type, fn, nargs, args);
10864 va_end (args);
10865 return ret;
10868 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10869 FN and a null static chain slot. NARGS is the number of call arguments
10870 which are specified as a va_list ARGS. */
10872 tree
10873 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10875 tree t;
10876 int i;
10878 t = build_call_1 (return_type, fn, nargs);
10879 for (i = 0; i < nargs; i++)
10880 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10881 process_call_operands (t);
10882 return t;
10885 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10886 FN and a null static chain slot. NARGS is the number of call arguments
10887 which are specified as a tree array ARGS. */
10889 tree
10890 build_call_array_loc (location_t loc, tree return_type, tree fn,
10891 int nargs, const tree *args)
10893 tree t;
10894 int i;
10896 t = build_call_1 (return_type, fn, nargs);
10897 for (i = 0; i < nargs; i++)
10898 CALL_EXPR_ARG (t, i) = args[i];
10899 process_call_operands (t);
10900 SET_EXPR_LOCATION (t, loc);
10901 return t;
10904 /* Like build_call_array, but takes a vec. */
10906 tree
10907 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10909 tree ret, t;
10910 unsigned int ix;
10912 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10913 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10914 CALL_EXPR_ARG (ret, ix) = t;
10915 process_call_operands (ret);
10916 return ret;
10919 /* Conveniently construct a function call expression. FNDECL names the
10920 function to be called and N arguments are passed in the array
10921 ARGARRAY. */
10923 tree
10924 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10926 tree fntype = TREE_TYPE (fndecl);
10927 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10929 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10932 /* Conveniently construct a function call expression. FNDECL names the
10933 function to be called and the arguments are passed in the vector
10934 VEC. */
10936 tree
10937 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10939 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10940 vec_safe_address (vec));
10944 /* Conveniently construct a function call expression. FNDECL names the
10945 function to be called, N is the number of arguments, and the "..."
10946 parameters are the argument expressions. */
10948 tree
10949 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10951 va_list ap;
10952 tree *argarray = XALLOCAVEC (tree, n);
10953 int i;
10955 va_start (ap, n);
10956 for (i = 0; i < n; i++)
10957 argarray[i] = va_arg (ap, tree);
10958 va_end (ap);
10959 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10962 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10963 varargs macros aren't supported by all bootstrap compilers. */
10965 tree
10966 build_call_expr (tree fndecl, int n, ...)
10968 va_list ap;
10969 tree *argarray = XALLOCAVEC (tree, n);
10970 int i;
10972 va_start (ap, n);
10973 for (i = 0; i < n; i++)
10974 argarray[i] = va_arg (ap, tree);
10975 va_end (ap);
10976 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10979 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10980 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10981 It will get gimplified later into an ordinary internal function. */
10983 tree
10984 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
10985 tree type, int n, const tree *args)
10987 tree t = build_call_1 (type, NULL_TREE, n);
10988 for (int i = 0; i < n; ++i)
10989 CALL_EXPR_ARG (t, i) = args[i];
10990 SET_EXPR_LOCATION (t, loc);
10991 CALL_EXPR_IFN (t) = ifn;
10992 return t;
10995 /* Build internal call expression. This is just like CALL_EXPR, except
10996 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10997 internal function. */
10999 tree
11000 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
11001 tree type, int n, ...)
11003 va_list ap;
11004 tree *argarray = XALLOCAVEC (tree, n);
11005 int i;
11007 va_start (ap, n);
11008 for (i = 0; i < n; i++)
11009 argarray[i] = va_arg (ap, tree);
11010 va_end (ap);
11011 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11014 /* Return a function call to FN, if the target is guaranteed to support it,
11015 or null otherwise.
11017 N is the number of arguments, passed in the "...", and TYPE is the
11018 type of the return value. */
11020 tree
11021 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
11022 int n, ...)
11024 va_list ap;
11025 tree *argarray = XALLOCAVEC (tree, n);
11026 int i;
11028 va_start (ap, n);
11029 for (i = 0; i < n; i++)
11030 argarray[i] = va_arg (ap, tree);
11031 va_end (ap);
11032 if (internal_fn_p (fn))
11034 internal_fn ifn = as_internal_fn (fn);
11035 if (direct_internal_fn_p (ifn))
11037 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
11038 if (!direct_internal_fn_supported_p (ifn, types,
11039 OPTIMIZE_FOR_BOTH))
11040 return NULL_TREE;
11042 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11044 else
11046 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
11047 if (!fndecl)
11048 return NULL_TREE;
11049 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11053 /* Return a function call to the appropriate builtin alloca variant.
11055 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
11056 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
11057 bound for SIZE in case it is not a fixed value. */
11059 tree
11060 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
11062 if (max_size >= 0)
11064 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
11065 return
11066 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
11068 else if (align > 0)
11070 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
11071 return build_call_expr (t, 2, size, size_int (align));
11073 else
11075 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
11076 return build_call_expr (t, 1, size);
11080 /* Create a new constant string literal and return a char* pointer to it.
11081 The STRING_CST value is the LEN characters at STR. */
11082 tree
11083 build_string_literal (int len, const char *str)
11085 tree t, elem, index, type;
11087 t = build_string (len, str);
11088 elem = build_type_variant (char_type_node, 1, 0);
11089 index = build_index_type (size_int (len - 1));
11090 type = build_array_type (elem, index);
11091 TREE_TYPE (t) = type;
11092 TREE_CONSTANT (t) = 1;
11093 TREE_READONLY (t) = 1;
11094 TREE_STATIC (t) = 1;
11096 type = build_pointer_type (elem);
11097 t = build1 (ADDR_EXPR, type,
11098 build4 (ARRAY_REF, elem,
11099 t, integer_zero_node, NULL_TREE, NULL_TREE));
11100 return t;
11105 /* Return true if T (assumed to be a DECL) must be assigned a memory
11106 location. */
11108 bool
11109 needs_to_live_in_memory (const_tree t)
11111 return (TREE_ADDRESSABLE (t)
11112 || is_global_var (t)
11113 || (TREE_CODE (t) == RESULT_DECL
11114 && !DECL_BY_REFERENCE (t)
11115 && aggregate_value_p (t, current_function_decl)));
11118 /* Return value of a constant X and sign-extend it. */
11120 HOST_WIDE_INT
11121 int_cst_value (const_tree x)
11123 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11124 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11126 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11127 gcc_assert (cst_and_fits_in_hwi (x));
11129 if (bits < HOST_BITS_PER_WIDE_INT)
11131 bool negative = ((val >> (bits - 1)) & 1) != 0;
11132 if (negative)
11133 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
11134 else
11135 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
11138 return val;
11141 /* If TYPE is an integral or pointer type, return an integer type with
11142 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11143 if TYPE is already an integer type of signedness UNSIGNEDP. */
11145 tree
11146 signed_or_unsigned_type_for (int unsignedp, tree type)
11148 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
11149 return type;
11151 if (TREE_CODE (type) == VECTOR_TYPE)
11153 tree inner = TREE_TYPE (type);
11154 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11155 if (!inner2)
11156 return NULL_TREE;
11157 if (inner == inner2)
11158 return type;
11159 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11162 if (!INTEGRAL_TYPE_P (type)
11163 && !POINTER_TYPE_P (type)
11164 && TREE_CODE (type) != OFFSET_TYPE)
11165 return NULL_TREE;
11167 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
11170 /* If TYPE is an integral or pointer type, return an integer type with
11171 the same precision which is unsigned, or itself if TYPE is already an
11172 unsigned integer type. */
11174 tree
11175 unsigned_type_for (tree type)
11177 return signed_or_unsigned_type_for (1, type);
11180 /* If TYPE is an integral or pointer type, return an integer type with
11181 the same precision which is signed, or itself if TYPE is already a
11182 signed integer type. */
11184 tree
11185 signed_type_for (tree type)
11187 return signed_or_unsigned_type_for (0, type);
11190 /* If TYPE is a vector type, return a signed integer vector type with the
11191 same width and number of subparts. Otherwise return boolean_type_node. */
11193 tree
11194 truth_type_for (tree type)
11196 if (TREE_CODE (type) == VECTOR_TYPE)
11198 if (VECTOR_BOOLEAN_TYPE_P (type))
11199 return type;
11200 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (type),
11201 GET_MODE_SIZE (TYPE_MODE (type)));
11203 else
11204 return boolean_type_node;
11207 /* Returns the largest value obtainable by casting something in INNER type to
11208 OUTER type. */
11210 tree
11211 upper_bound_in_type (tree outer, tree inner)
11213 unsigned int det = 0;
11214 unsigned oprec = TYPE_PRECISION (outer);
11215 unsigned iprec = TYPE_PRECISION (inner);
11216 unsigned prec;
11218 /* Compute a unique number for every combination. */
11219 det |= (oprec > iprec) ? 4 : 0;
11220 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11221 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11223 /* Determine the exponent to use. */
11224 switch (det)
11226 case 0:
11227 case 1:
11228 /* oprec <= iprec, outer: signed, inner: don't care. */
11229 prec = oprec - 1;
11230 break;
11231 case 2:
11232 case 3:
11233 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11234 prec = oprec;
11235 break;
11236 case 4:
11237 /* oprec > iprec, outer: signed, inner: signed. */
11238 prec = iprec - 1;
11239 break;
11240 case 5:
11241 /* oprec > iprec, outer: signed, inner: unsigned. */
11242 prec = iprec;
11243 break;
11244 case 6:
11245 /* oprec > iprec, outer: unsigned, inner: signed. */
11246 prec = oprec;
11247 break;
11248 case 7:
11249 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11250 prec = iprec;
11251 break;
11252 default:
11253 gcc_unreachable ();
11256 return wide_int_to_tree (outer,
11257 wi::mask (prec, false, TYPE_PRECISION (outer)));
11260 /* Returns the smallest value obtainable by casting something in INNER type to
11261 OUTER type. */
11263 tree
11264 lower_bound_in_type (tree outer, tree inner)
11266 unsigned oprec = TYPE_PRECISION (outer);
11267 unsigned iprec = TYPE_PRECISION (inner);
11269 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11270 and obtain 0. */
11271 if (TYPE_UNSIGNED (outer)
11272 /* If we are widening something of an unsigned type, OUTER type
11273 contains all values of INNER type. In particular, both INNER
11274 and OUTER types have zero in common. */
11275 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11276 return build_int_cst (outer, 0);
11277 else
11279 /* If we are widening a signed type to another signed type, we
11280 want to obtain -2^^(iprec-1). If we are keeping the
11281 precision or narrowing to a signed type, we want to obtain
11282 -2^(oprec-1). */
11283 unsigned prec = oprec > iprec ? iprec : oprec;
11284 return wide_int_to_tree (outer,
11285 wi::mask (prec - 1, true,
11286 TYPE_PRECISION (outer)));
11290 /* Return nonzero if two operands that are suitable for PHI nodes are
11291 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11292 SSA_NAME or invariant. Note that this is strictly an optimization.
11293 That is, callers of this function can directly call operand_equal_p
11294 and get the same result, only slower. */
11297 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11299 if (arg0 == arg1)
11300 return 1;
11301 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11302 return 0;
11303 return operand_equal_p (arg0, arg1, 0);
11306 /* Returns number of zeros at the end of binary representation of X. */
11308 tree
11309 num_ending_zeros (const_tree x)
11311 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
11315 #define WALK_SUBTREE(NODE) \
11316 do \
11318 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11319 if (result) \
11320 return result; \
11322 while (0)
11324 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11325 be walked whenever a type is seen in the tree. Rest of operands and return
11326 value are as for walk_tree. */
11328 static tree
11329 walk_type_fields (tree type, walk_tree_fn func, void *data,
11330 hash_set<tree> *pset, walk_tree_lh lh)
11332 tree result = NULL_TREE;
11334 switch (TREE_CODE (type))
11336 case POINTER_TYPE:
11337 case REFERENCE_TYPE:
11338 case VECTOR_TYPE:
11339 /* We have to worry about mutually recursive pointers. These can't
11340 be written in C. They can in Ada. It's pathological, but
11341 there's an ACATS test (c38102a) that checks it. Deal with this
11342 by checking if we're pointing to another pointer, that one
11343 points to another pointer, that one does too, and we have no htab.
11344 If so, get a hash table. We check three levels deep to avoid
11345 the cost of the hash table if we don't need one. */
11346 if (POINTER_TYPE_P (TREE_TYPE (type))
11347 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11348 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11349 && !pset)
11351 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11352 func, data);
11353 if (result)
11354 return result;
11356 break;
11359 /* fall through */
11361 case COMPLEX_TYPE:
11362 WALK_SUBTREE (TREE_TYPE (type));
11363 break;
11365 case METHOD_TYPE:
11366 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11368 /* Fall through. */
11370 case FUNCTION_TYPE:
11371 WALK_SUBTREE (TREE_TYPE (type));
11373 tree arg;
11375 /* We never want to walk into default arguments. */
11376 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11377 WALK_SUBTREE (TREE_VALUE (arg));
11379 break;
11381 case ARRAY_TYPE:
11382 /* Don't follow this nodes's type if a pointer for fear that
11383 we'll have infinite recursion. If we have a PSET, then we
11384 need not fear. */
11385 if (pset
11386 || (!POINTER_TYPE_P (TREE_TYPE (type))
11387 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11388 WALK_SUBTREE (TREE_TYPE (type));
11389 WALK_SUBTREE (TYPE_DOMAIN (type));
11390 break;
11392 case OFFSET_TYPE:
11393 WALK_SUBTREE (TREE_TYPE (type));
11394 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11395 break;
11397 default:
11398 break;
11401 return NULL_TREE;
11404 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11405 called with the DATA and the address of each sub-tree. If FUNC returns a
11406 non-NULL value, the traversal is stopped, and the value returned by FUNC
11407 is returned. If PSET is non-NULL it is used to record the nodes visited,
11408 and to avoid visiting a node more than once. */
11410 tree
11411 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11412 hash_set<tree> *pset, walk_tree_lh lh)
11414 enum tree_code code;
11415 int walk_subtrees;
11416 tree result;
11418 #define WALK_SUBTREE_TAIL(NODE) \
11419 do \
11421 tp = & (NODE); \
11422 goto tail_recurse; \
11424 while (0)
11426 tail_recurse:
11427 /* Skip empty subtrees. */
11428 if (!*tp)
11429 return NULL_TREE;
11431 /* Don't walk the same tree twice, if the user has requested
11432 that we avoid doing so. */
11433 if (pset && pset->add (*tp))
11434 return NULL_TREE;
11436 /* Call the function. */
11437 walk_subtrees = 1;
11438 result = (*func) (tp, &walk_subtrees, data);
11440 /* If we found something, return it. */
11441 if (result)
11442 return result;
11444 code = TREE_CODE (*tp);
11446 /* Even if we didn't, FUNC may have decided that there was nothing
11447 interesting below this point in the tree. */
11448 if (!walk_subtrees)
11450 /* But we still need to check our siblings. */
11451 if (code == TREE_LIST)
11452 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11453 else if (code == OMP_CLAUSE)
11454 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11455 else
11456 return NULL_TREE;
11459 if (lh)
11461 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11462 if (result || !walk_subtrees)
11463 return result;
11466 switch (code)
11468 case ERROR_MARK:
11469 case IDENTIFIER_NODE:
11470 case INTEGER_CST:
11471 case REAL_CST:
11472 case FIXED_CST:
11473 case VECTOR_CST:
11474 case STRING_CST:
11475 case BLOCK:
11476 case PLACEHOLDER_EXPR:
11477 case SSA_NAME:
11478 case FIELD_DECL:
11479 case RESULT_DECL:
11480 /* None of these have subtrees other than those already walked
11481 above. */
11482 break;
11484 case TREE_LIST:
11485 WALK_SUBTREE (TREE_VALUE (*tp));
11486 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11487 break;
11489 case TREE_VEC:
11491 int len = TREE_VEC_LENGTH (*tp);
11493 if (len == 0)
11494 break;
11496 /* Walk all elements but the first. */
11497 while (--len)
11498 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11500 /* Now walk the first one as a tail call. */
11501 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11504 case COMPLEX_CST:
11505 WALK_SUBTREE (TREE_REALPART (*tp));
11506 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11508 case CONSTRUCTOR:
11510 unsigned HOST_WIDE_INT idx;
11511 constructor_elt *ce;
11513 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11514 idx++)
11515 WALK_SUBTREE (ce->value);
11517 break;
11519 case SAVE_EXPR:
11520 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11522 case BIND_EXPR:
11524 tree decl;
11525 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11527 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11528 into declarations that are just mentioned, rather than
11529 declared; they don't really belong to this part of the tree.
11530 And, we can see cycles: the initializer for a declaration
11531 can refer to the declaration itself. */
11532 WALK_SUBTREE (DECL_INITIAL (decl));
11533 WALK_SUBTREE (DECL_SIZE (decl));
11534 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11536 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11539 case STATEMENT_LIST:
11541 tree_stmt_iterator i;
11542 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11543 WALK_SUBTREE (*tsi_stmt_ptr (i));
11545 break;
11547 case OMP_CLAUSE:
11548 switch (OMP_CLAUSE_CODE (*tp))
11550 case OMP_CLAUSE_GANG:
11551 case OMP_CLAUSE__GRIDDIM_:
11552 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11553 /* FALLTHRU */
11555 case OMP_CLAUSE_ASYNC:
11556 case OMP_CLAUSE_WAIT:
11557 case OMP_CLAUSE_WORKER:
11558 case OMP_CLAUSE_VECTOR:
11559 case OMP_CLAUSE_NUM_GANGS:
11560 case OMP_CLAUSE_NUM_WORKERS:
11561 case OMP_CLAUSE_VECTOR_LENGTH:
11562 case OMP_CLAUSE_PRIVATE:
11563 case OMP_CLAUSE_SHARED:
11564 case OMP_CLAUSE_FIRSTPRIVATE:
11565 case OMP_CLAUSE_COPYIN:
11566 case OMP_CLAUSE_COPYPRIVATE:
11567 case OMP_CLAUSE_FINAL:
11568 case OMP_CLAUSE_IF:
11569 case OMP_CLAUSE_NUM_THREADS:
11570 case OMP_CLAUSE_SCHEDULE:
11571 case OMP_CLAUSE_UNIFORM:
11572 case OMP_CLAUSE_DEPEND:
11573 case OMP_CLAUSE_NUM_TEAMS:
11574 case OMP_CLAUSE_THREAD_LIMIT:
11575 case OMP_CLAUSE_DEVICE:
11576 case OMP_CLAUSE_DIST_SCHEDULE:
11577 case OMP_CLAUSE_SAFELEN:
11578 case OMP_CLAUSE_SIMDLEN:
11579 case OMP_CLAUSE_ORDERED:
11580 case OMP_CLAUSE_PRIORITY:
11581 case OMP_CLAUSE_GRAINSIZE:
11582 case OMP_CLAUSE_NUM_TASKS:
11583 case OMP_CLAUSE_HINT:
11584 case OMP_CLAUSE_TO_DECLARE:
11585 case OMP_CLAUSE_LINK:
11586 case OMP_CLAUSE_USE_DEVICE_PTR:
11587 case OMP_CLAUSE_IS_DEVICE_PTR:
11588 case OMP_CLAUSE__LOOPTEMP_:
11589 case OMP_CLAUSE__SIMDUID_:
11590 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11591 /* FALLTHRU */
11593 case OMP_CLAUSE_INDEPENDENT:
11594 case OMP_CLAUSE_NOWAIT:
11595 case OMP_CLAUSE_DEFAULT:
11596 case OMP_CLAUSE_UNTIED:
11597 case OMP_CLAUSE_MERGEABLE:
11598 case OMP_CLAUSE_PROC_BIND:
11599 case OMP_CLAUSE_INBRANCH:
11600 case OMP_CLAUSE_NOTINBRANCH:
11601 case OMP_CLAUSE_FOR:
11602 case OMP_CLAUSE_PARALLEL:
11603 case OMP_CLAUSE_SECTIONS:
11604 case OMP_CLAUSE_TASKGROUP:
11605 case OMP_CLAUSE_NOGROUP:
11606 case OMP_CLAUSE_THREADS:
11607 case OMP_CLAUSE_SIMD:
11608 case OMP_CLAUSE_DEFAULTMAP:
11609 case OMP_CLAUSE_AUTO:
11610 case OMP_CLAUSE_SEQ:
11611 case OMP_CLAUSE_TILE:
11612 case OMP_CLAUSE__SIMT_:
11613 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11615 case OMP_CLAUSE_LASTPRIVATE:
11616 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11617 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11618 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11620 case OMP_CLAUSE_COLLAPSE:
11622 int i;
11623 for (i = 0; i < 3; i++)
11624 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11625 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11628 case OMP_CLAUSE_LINEAR:
11629 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11630 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11631 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11632 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11634 case OMP_CLAUSE_ALIGNED:
11635 case OMP_CLAUSE_FROM:
11636 case OMP_CLAUSE_TO:
11637 case OMP_CLAUSE_MAP:
11638 case OMP_CLAUSE__CACHE_:
11639 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11640 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11641 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11643 case OMP_CLAUSE_REDUCTION:
11645 int i;
11646 for (i = 0; i < 5; i++)
11647 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11648 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11651 default:
11652 gcc_unreachable ();
11654 break;
11656 case TARGET_EXPR:
11658 int i, len;
11660 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11661 But, we only want to walk once. */
11662 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11663 for (i = 0; i < len; ++i)
11664 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11665 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11668 case DECL_EXPR:
11669 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11670 defining. We only want to walk into these fields of a type in this
11671 case and not in the general case of a mere reference to the type.
11673 The criterion is as follows: if the field can be an expression, it
11674 must be walked only here. This should be in keeping with the fields
11675 that are directly gimplified in gimplify_type_sizes in order for the
11676 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11677 variable-sized types.
11679 Note that DECLs get walked as part of processing the BIND_EXPR. */
11680 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11682 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11683 if (TREE_CODE (*type_p) == ERROR_MARK)
11684 return NULL_TREE;
11686 /* Call the function for the type. See if it returns anything or
11687 doesn't want us to continue. If we are to continue, walk both
11688 the normal fields and those for the declaration case. */
11689 result = (*func) (type_p, &walk_subtrees, data);
11690 if (result || !walk_subtrees)
11691 return result;
11693 /* But do not walk a pointed-to type since it may itself need to
11694 be walked in the declaration case if it isn't anonymous. */
11695 if (!POINTER_TYPE_P (*type_p))
11697 result = walk_type_fields (*type_p, func, data, pset, lh);
11698 if (result)
11699 return result;
11702 /* If this is a record type, also walk the fields. */
11703 if (RECORD_OR_UNION_TYPE_P (*type_p))
11705 tree field;
11707 for (field = TYPE_FIELDS (*type_p); field;
11708 field = DECL_CHAIN (field))
11710 /* We'd like to look at the type of the field, but we can
11711 easily get infinite recursion. So assume it's pointed
11712 to elsewhere in the tree. Also, ignore things that
11713 aren't fields. */
11714 if (TREE_CODE (field) != FIELD_DECL)
11715 continue;
11717 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11718 WALK_SUBTREE (DECL_SIZE (field));
11719 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11720 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11721 WALK_SUBTREE (DECL_QUALIFIER (field));
11725 /* Same for scalar types. */
11726 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11727 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11728 || TREE_CODE (*type_p) == INTEGER_TYPE
11729 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11730 || TREE_CODE (*type_p) == REAL_TYPE)
11732 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11733 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11736 WALK_SUBTREE (TYPE_SIZE (*type_p));
11737 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11739 /* FALLTHRU */
11741 default:
11742 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11744 int i, len;
11746 /* Walk over all the sub-trees of this operand. */
11747 len = TREE_OPERAND_LENGTH (*tp);
11749 /* Go through the subtrees. We need to do this in forward order so
11750 that the scope of a FOR_EXPR is handled properly. */
11751 if (len)
11753 for (i = 0; i < len - 1; ++i)
11754 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11755 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11758 /* If this is a type, walk the needed fields in the type. */
11759 else if (TYPE_P (*tp))
11760 return walk_type_fields (*tp, func, data, pset, lh);
11761 break;
11764 /* We didn't find what we were looking for. */
11765 return NULL_TREE;
11767 #undef WALK_SUBTREE_TAIL
11769 #undef WALK_SUBTREE
11771 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11773 tree
11774 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11775 walk_tree_lh lh)
11777 tree result;
11779 hash_set<tree> pset;
11780 result = walk_tree_1 (tp, func, data, &pset, lh);
11781 return result;
11785 tree
11786 tree_block (tree t)
11788 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11790 if (IS_EXPR_CODE_CLASS (c))
11791 return LOCATION_BLOCK (t->exp.locus);
11792 gcc_unreachable ();
11793 return NULL;
11796 void
11797 tree_set_block (tree t, tree b)
11799 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11801 if (IS_EXPR_CODE_CLASS (c))
11803 t->exp.locus = set_block (t->exp.locus, b);
11805 else
11806 gcc_unreachable ();
11809 /* Create a nameless artificial label and put it in the current
11810 function context. The label has a location of LOC. Returns the
11811 newly created label. */
11813 tree
11814 create_artificial_label (location_t loc)
11816 tree lab = build_decl (loc,
11817 LABEL_DECL, NULL_TREE, void_type_node);
11819 DECL_ARTIFICIAL (lab) = 1;
11820 DECL_IGNORED_P (lab) = 1;
11821 DECL_CONTEXT (lab) = current_function_decl;
11822 return lab;
11825 /* Given a tree, try to return a useful variable name that we can use
11826 to prefix a temporary that is being assigned the value of the tree.
11827 I.E. given <temp> = &A, return A. */
11829 const char *
11830 get_name (tree t)
11832 tree stripped_decl;
11834 stripped_decl = t;
11835 STRIP_NOPS (stripped_decl);
11836 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11837 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11838 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11840 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11841 if (!name)
11842 return NULL;
11843 return IDENTIFIER_POINTER (name);
11845 else
11847 switch (TREE_CODE (stripped_decl))
11849 case ADDR_EXPR:
11850 return get_name (TREE_OPERAND (stripped_decl, 0));
11851 default:
11852 return NULL;
11857 /* Return true if TYPE has a variable argument list. */
11859 bool
11860 stdarg_p (const_tree fntype)
11862 function_args_iterator args_iter;
11863 tree n = NULL_TREE, t;
11865 if (!fntype)
11866 return false;
11868 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11870 n = t;
11873 return n != NULL_TREE && n != void_type_node;
11876 /* Return true if TYPE has a prototype. */
11878 bool
11879 prototype_p (const_tree fntype)
11881 tree t;
11883 gcc_assert (fntype != NULL_TREE);
11885 t = TYPE_ARG_TYPES (fntype);
11886 return (t != NULL_TREE);
11889 /* If BLOCK is inlined from an __attribute__((__artificial__))
11890 routine, return pointer to location from where it has been
11891 called. */
11892 location_t *
11893 block_nonartificial_location (tree block)
11895 location_t *ret = NULL;
11897 while (block && TREE_CODE (block) == BLOCK
11898 && BLOCK_ABSTRACT_ORIGIN (block))
11900 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11902 while (TREE_CODE (ao) == BLOCK
11903 && BLOCK_ABSTRACT_ORIGIN (ao)
11904 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11905 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11907 if (TREE_CODE (ao) == FUNCTION_DECL)
11909 /* If AO is an artificial inline, point RET to the
11910 call site locus at which it has been inlined and continue
11911 the loop, in case AO's caller is also an artificial
11912 inline. */
11913 if (DECL_DECLARED_INLINE_P (ao)
11914 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11915 ret = &BLOCK_SOURCE_LOCATION (block);
11916 else
11917 break;
11919 else if (TREE_CODE (ao) != BLOCK)
11920 break;
11922 block = BLOCK_SUPERCONTEXT (block);
11924 return ret;
11928 /* If EXP is inlined from an __attribute__((__artificial__))
11929 function, return the location of the original call expression. */
11931 location_t
11932 tree_nonartificial_location (tree exp)
11934 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11936 if (loc)
11937 return *loc;
11938 else
11939 return EXPR_LOCATION (exp);
11943 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11944 nodes. */
11946 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11948 hashval_t
11949 cl_option_hasher::hash (tree x)
11951 const_tree const t = x;
11952 const char *p;
11953 size_t i;
11954 size_t len = 0;
11955 hashval_t hash = 0;
11957 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11959 p = (const char *)TREE_OPTIMIZATION (t);
11960 len = sizeof (struct cl_optimization);
11963 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11964 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11966 else
11967 gcc_unreachable ();
11969 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11970 something else. */
11971 for (i = 0; i < len; i++)
11972 if (p[i])
11973 hash = (hash << 4) ^ ((i << 2) | p[i]);
11975 return hash;
11978 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11979 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11980 same. */
11982 bool
11983 cl_option_hasher::equal (tree x, tree y)
11985 const_tree const xt = x;
11986 const_tree const yt = y;
11987 const char *xp;
11988 const char *yp;
11989 size_t len;
11991 if (TREE_CODE (xt) != TREE_CODE (yt))
11992 return 0;
11994 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11996 xp = (const char *)TREE_OPTIMIZATION (xt);
11997 yp = (const char *)TREE_OPTIMIZATION (yt);
11998 len = sizeof (struct cl_optimization);
12001 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
12003 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
12004 TREE_TARGET_OPTION (yt));
12007 else
12008 gcc_unreachable ();
12010 return (memcmp (xp, yp, len) == 0);
12013 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
12015 tree
12016 build_optimization_node (struct gcc_options *opts)
12018 tree t;
12020 /* Use the cache of optimization nodes. */
12022 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
12023 opts);
12025 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
12026 t = *slot;
12027 if (!t)
12029 /* Insert this one into the hash table. */
12030 t = cl_optimization_node;
12031 *slot = t;
12033 /* Make a new node for next time round. */
12034 cl_optimization_node = make_node (OPTIMIZATION_NODE);
12037 return t;
12040 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
12042 tree
12043 build_target_option_node (struct gcc_options *opts)
12045 tree t;
12047 /* Use the cache of optimization nodes. */
12049 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
12050 opts);
12052 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
12053 t = *slot;
12054 if (!t)
12056 /* Insert this one into the hash table. */
12057 t = cl_target_option_node;
12058 *slot = t;
12060 /* Make a new node for next time round. */
12061 cl_target_option_node = make_node (TARGET_OPTION_NODE);
12064 return t;
12067 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
12068 so that they aren't saved during PCH writing. */
12070 void
12071 prepare_target_option_nodes_for_pch (void)
12073 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
12074 for (; iter != cl_option_hash_table->end (); ++iter)
12075 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
12076 TREE_TARGET_GLOBALS (*iter) = NULL;
12079 /* Determine the "ultimate origin" of a block. The block may be an inlined
12080 instance of an inlined instance of a block which is local to an inline
12081 function, so we have to trace all of the way back through the origin chain
12082 to find out what sort of node actually served as the original seed for the
12083 given block. */
12085 tree
12086 block_ultimate_origin (const_tree block)
12088 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
12090 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
12091 we're trying to output the abstract instance of this function. */
12092 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
12093 return NULL_TREE;
12095 if (immediate_origin == NULL_TREE)
12096 return NULL_TREE;
12097 else
12099 tree ret_val;
12100 tree lookahead = immediate_origin;
12104 ret_val = lookahead;
12105 lookahead = (TREE_CODE (ret_val) == BLOCK
12106 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
12108 while (lookahead != NULL && lookahead != ret_val);
12110 /* The block's abstract origin chain may not be the *ultimate* origin of
12111 the block. It could lead to a DECL that has an abstract origin set.
12112 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
12113 will give us if it has one). Note that DECL's abstract origins are
12114 supposed to be the most distant ancestor (or so decl_ultimate_origin
12115 claims), so we don't need to loop following the DECL origins. */
12116 if (DECL_P (ret_val))
12117 return DECL_ORIGIN (ret_val);
12119 return ret_val;
12123 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12124 no instruction. */
12126 bool
12127 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
12129 /* Do not strip casts into or out of differing address spaces. */
12130 if (POINTER_TYPE_P (outer_type)
12131 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
12133 if (!POINTER_TYPE_P (inner_type)
12134 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
12135 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
12136 return false;
12138 else if (POINTER_TYPE_P (inner_type)
12139 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
12141 /* We already know that outer_type is not a pointer with
12142 a non-generic address space. */
12143 return false;
12146 /* Use precision rather then machine mode when we can, which gives
12147 the correct answer even for submode (bit-field) types. */
12148 if ((INTEGRAL_TYPE_P (outer_type)
12149 || POINTER_TYPE_P (outer_type)
12150 || TREE_CODE (outer_type) == OFFSET_TYPE)
12151 && (INTEGRAL_TYPE_P (inner_type)
12152 || POINTER_TYPE_P (inner_type)
12153 || TREE_CODE (inner_type) == OFFSET_TYPE))
12154 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12156 /* Otherwise fall back on comparing machine modes (e.g. for
12157 aggregate types, floats). */
12158 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12161 /* Return true iff conversion in EXP generates no instruction. Mark
12162 it inline so that we fully inline into the stripping functions even
12163 though we have two uses of this function. */
12165 static inline bool
12166 tree_nop_conversion (const_tree exp)
12168 tree outer_type, inner_type;
12170 if (!CONVERT_EXPR_P (exp)
12171 && TREE_CODE (exp) != NON_LVALUE_EXPR)
12172 return false;
12173 if (TREE_OPERAND (exp, 0) == error_mark_node)
12174 return false;
12176 outer_type = TREE_TYPE (exp);
12177 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12179 if (!inner_type)
12180 return false;
12182 return tree_nop_conversion_p (outer_type, inner_type);
12185 /* Return true iff conversion in EXP generates no instruction. Don't
12186 consider conversions changing the signedness. */
12188 static bool
12189 tree_sign_nop_conversion (const_tree exp)
12191 tree outer_type, inner_type;
12193 if (!tree_nop_conversion (exp))
12194 return false;
12196 outer_type = TREE_TYPE (exp);
12197 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12199 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12200 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12203 /* Strip conversions from EXP according to tree_nop_conversion and
12204 return the resulting expression. */
12206 tree
12207 tree_strip_nop_conversions (tree exp)
12209 while (tree_nop_conversion (exp))
12210 exp = TREE_OPERAND (exp, 0);
12211 return exp;
12214 /* Strip conversions from EXP according to tree_sign_nop_conversion
12215 and return the resulting expression. */
12217 tree
12218 tree_strip_sign_nop_conversions (tree exp)
12220 while (tree_sign_nop_conversion (exp))
12221 exp = TREE_OPERAND (exp, 0);
12222 return exp;
12225 /* Avoid any floating point extensions from EXP. */
12226 tree
12227 strip_float_extensions (tree exp)
12229 tree sub, expt, subt;
12231 /* For floating point constant look up the narrowest type that can hold
12232 it properly and handle it like (type)(narrowest_type)constant.
12233 This way we can optimize for instance a=a*2.0 where "a" is float
12234 but 2.0 is double constant. */
12235 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12237 REAL_VALUE_TYPE orig;
12238 tree type = NULL;
12240 orig = TREE_REAL_CST (exp);
12241 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12242 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12243 type = float_type_node;
12244 else if (TYPE_PRECISION (TREE_TYPE (exp))
12245 > TYPE_PRECISION (double_type_node)
12246 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12247 type = double_type_node;
12248 if (type)
12249 return build_real_truncate (type, orig);
12252 if (!CONVERT_EXPR_P (exp))
12253 return exp;
12255 sub = TREE_OPERAND (exp, 0);
12256 subt = TREE_TYPE (sub);
12257 expt = TREE_TYPE (exp);
12259 if (!FLOAT_TYPE_P (subt))
12260 return exp;
12262 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12263 return exp;
12265 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12266 return exp;
12268 return strip_float_extensions (sub);
12271 /* Strip out all handled components that produce invariant
12272 offsets. */
12274 const_tree
12275 strip_invariant_refs (const_tree op)
12277 while (handled_component_p (op))
12279 switch (TREE_CODE (op))
12281 case ARRAY_REF:
12282 case ARRAY_RANGE_REF:
12283 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12284 || TREE_OPERAND (op, 2) != NULL_TREE
12285 || TREE_OPERAND (op, 3) != NULL_TREE)
12286 return NULL;
12287 break;
12289 case COMPONENT_REF:
12290 if (TREE_OPERAND (op, 2) != NULL_TREE)
12291 return NULL;
12292 break;
12294 default:;
12296 op = TREE_OPERAND (op, 0);
12299 return op;
12302 static GTY(()) tree gcc_eh_personality_decl;
12304 /* Return the GCC personality function decl. */
12306 tree
12307 lhd_gcc_personality (void)
12309 if (!gcc_eh_personality_decl)
12310 gcc_eh_personality_decl = build_personality_function ("gcc");
12311 return gcc_eh_personality_decl;
12314 /* TARGET is a call target of GIMPLE call statement
12315 (obtained by gimple_call_fn). Return true if it is
12316 OBJ_TYPE_REF representing an virtual call of C++ method.
12317 (As opposed to OBJ_TYPE_REF representing objc calls
12318 through a cast where middle-end devirtualization machinery
12319 can't apply.) */
12321 bool
12322 virtual_method_call_p (const_tree target)
12324 if (TREE_CODE (target) != OBJ_TYPE_REF)
12325 return false;
12326 tree t = TREE_TYPE (target);
12327 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12328 t = TREE_TYPE (t);
12329 if (TREE_CODE (t) == FUNCTION_TYPE)
12330 return false;
12331 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12332 /* If we do not have BINFO associated, it means that type was built
12333 without devirtualization enabled. Do not consider this a virtual
12334 call. */
12335 if (!TYPE_BINFO (obj_type_ref_class (target)))
12336 return false;
12337 return true;
12340 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
12342 tree
12343 obj_type_ref_class (const_tree ref)
12345 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
12346 ref = TREE_TYPE (ref);
12347 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12348 ref = TREE_TYPE (ref);
12349 /* We look for type THIS points to. ObjC also builds
12350 OBJ_TYPE_REF with non-method calls, Their first parameter
12351 ID however also corresponds to class type. */
12352 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12353 || TREE_CODE (ref) == FUNCTION_TYPE);
12354 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12355 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12356 return TREE_TYPE (ref);
12359 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12361 static tree
12362 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12364 unsigned int i;
12365 tree base_binfo, b;
12367 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12368 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12369 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12370 return base_binfo;
12371 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12372 return b;
12373 return NULL;
12376 /* Try to find a base info of BINFO that would have its field decl at offset
12377 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12378 found, return, otherwise return NULL_TREE. */
12380 tree
12381 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
12383 tree type = BINFO_TYPE (binfo);
12385 while (true)
12387 HOST_WIDE_INT pos, size;
12388 tree fld;
12389 int i;
12391 if (types_same_for_odr (type, expected_type))
12392 return binfo;
12393 if (maybe_lt (offset, 0))
12394 return NULL_TREE;
12396 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12398 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12399 continue;
12401 pos = int_bit_position (fld);
12402 size = tree_to_uhwi (DECL_SIZE (fld));
12403 if (known_in_range_p (offset, pos, size))
12404 break;
12406 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12407 return NULL_TREE;
12409 /* Offset 0 indicates the primary base, whose vtable contents are
12410 represented in the binfo for the derived class. */
12411 else if (maybe_ne (offset, 0))
12413 tree found_binfo = NULL, base_binfo;
12414 /* Offsets in BINFO are in bytes relative to the whole structure
12415 while POS is in bits relative to the containing field. */
12416 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12417 / BITS_PER_UNIT);
12419 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12420 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12421 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12423 found_binfo = base_binfo;
12424 break;
12426 if (found_binfo)
12427 binfo = found_binfo;
12428 else
12429 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12430 binfo_offset);
12433 type = TREE_TYPE (fld);
12434 offset -= pos;
12438 /* Returns true if X is a typedef decl. */
12440 bool
12441 is_typedef_decl (const_tree x)
12443 return (x && TREE_CODE (x) == TYPE_DECL
12444 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12447 /* Returns true iff TYPE is a type variant created for a typedef. */
12449 bool
12450 typedef_variant_p (const_tree type)
12452 return is_typedef_decl (TYPE_NAME (type));
12455 /* Warn about a use of an identifier which was marked deprecated. */
12456 void
12457 warn_deprecated_use (tree node, tree attr)
12459 const char *msg;
12461 if (node == 0 || !warn_deprecated_decl)
12462 return;
12464 if (!attr)
12466 if (DECL_P (node))
12467 attr = DECL_ATTRIBUTES (node);
12468 else if (TYPE_P (node))
12470 tree decl = TYPE_STUB_DECL (node);
12471 if (decl)
12472 attr = lookup_attribute ("deprecated",
12473 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12477 if (attr)
12478 attr = lookup_attribute ("deprecated", attr);
12480 if (attr)
12481 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12482 else
12483 msg = NULL;
12485 bool w;
12486 if (DECL_P (node))
12488 if (msg)
12489 w = warning (OPT_Wdeprecated_declarations,
12490 "%qD is deprecated: %s", node, msg);
12491 else
12492 w = warning (OPT_Wdeprecated_declarations,
12493 "%qD is deprecated", node);
12494 if (w)
12495 inform (DECL_SOURCE_LOCATION (node), "declared here");
12497 else if (TYPE_P (node))
12499 tree what = NULL_TREE;
12500 tree decl = TYPE_STUB_DECL (node);
12502 if (TYPE_NAME (node))
12504 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12505 what = TYPE_NAME (node);
12506 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12507 && DECL_NAME (TYPE_NAME (node)))
12508 what = DECL_NAME (TYPE_NAME (node));
12511 if (decl)
12513 if (what)
12515 if (msg)
12516 w = warning (OPT_Wdeprecated_declarations,
12517 "%qE is deprecated: %s", what, msg);
12518 else
12519 w = warning (OPT_Wdeprecated_declarations,
12520 "%qE is deprecated", what);
12522 else
12524 if (msg)
12525 w = warning (OPT_Wdeprecated_declarations,
12526 "type is deprecated: %s", msg);
12527 else
12528 w = warning (OPT_Wdeprecated_declarations,
12529 "type is deprecated");
12531 if (w)
12532 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12534 else
12536 if (what)
12538 if (msg)
12539 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12540 what, msg);
12541 else
12542 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12544 else
12546 if (msg)
12547 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12548 msg);
12549 else
12550 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12556 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12557 somewhere in it. */
12559 bool
12560 contains_bitfld_component_ref_p (const_tree ref)
12562 while (handled_component_p (ref))
12564 if (TREE_CODE (ref) == COMPONENT_REF
12565 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12566 return true;
12567 ref = TREE_OPERAND (ref, 0);
12570 return false;
12573 /* Try to determine whether a TRY_CATCH expression can fall through.
12574 This is a subroutine of block_may_fallthru. */
12576 static bool
12577 try_catch_may_fallthru (const_tree stmt)
12579 tree_stmt_iterator i;
12581 /* If the TRY block can fall through, the whole TRY_CATCH can
12582 fall through. */
12583 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12584 return true;
12586 i = tsi_start (TREE_OPERAND (stmt, 1));
12587 switch (TREE_CODE (tsi_stmt (i)))
12589 case CATCH_EXPR:
12590 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12591 catch expression and a body. The whole TRY_CATCH may fall
12592 through iff any of the catch bodies falls through. */
12593 for (; !tsi_end_p (i); tsi_next (&i))
12595 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12596 return true;
12598 return false;
12600 case EH_FILTER_EXPR:
12601 /* The exception filter expression only matters if there is an
12602 exception. If the exception does not match EH_FILTER_TYPES,
12603 we will execute EH_FILTER_FAILURE, and we will fall through
12604 if that falls through. If the exception does match
12605 EH_FILTER_TYPES, the stack unwinder will continue up the
12606 stack, so we will not fall through. We don't know whether we
12607 will throw an exception which matches EH_FILTER_TYPES or not,
12608 so we just ignore EH_FILTER_TYPES and assume that we might
12609 throw an exception which doesn't match. */
12610 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12612 default:
12613 /* This case represents statements to be executed when an
12614 exception occurs. Those statements are implicitly followed
12615 by a RESX statement to resume execution after the exception.
12616 So in this case the TRY_CATCH never falls through. */
12617 return false;
12621 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12622 need not be 100% accurate; simply be conservative and return true if we
12623 don't know. This is used only to avoid stupidly generating extra code.
12624 If we're wrong, we'll just delete the extra code later. */
12626 bool
12627 block_may_fallthru (const_tree block)
12629 /* This CONST_CAST is okay because expr_last returns its argument
12630 unmodified and we assign it to a const_tree. */
12631 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12633 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12635 case GOTO_EXPR:
12636 case RETURN_EXPR:
12637 /* Easy cases. If the last statement of the block implies
12638 control transfer, then we can't fall through. */
12639 return false;
12641 case SWITCH_EXPR:
12642 /* If there is a default: label or case labels cover all possible
12643 SWITCH_COND values, then the SWITCH_EXPR will transfer control
12644 to some case label in all cases and all we care is whether the
12645 SWITCH_BODY falls through. */
12646 if (SWITCH_ALL_CASES_P (stmt))
12647 return block_may_fallthru (SWITCH_BODY (stmt));
12648 return true;
12650 case COND_EXPR:
12651 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12652 return true;
12653 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12655 case BIND_EXPR:
12656 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12658 case TRY_CATCH_EXPR:
12659 return try_catch_may_fallthru (stmt);
12661 case TRY_FINALLY_EXPR:
12662 /* The finally clause is always executed after the try clause,
12663 so if it does not fall through, then the try-finally will not
12664 fall through. Otherwise, if the try clause does not fall
12665 through, then when the finally clause falls through it will
12666 resume execution wherever the try clause was going. So the
12667 whole try-finally will only fall through if both the try
12668 clause and the finally clause fall through. */
12669 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12670 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12672 case MODIFY_EXPR:
12673 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12674 stmt = TREE_OPERAND (stmt, 1);
12675 else
12676 return true;
12677 /* FALLTHRU */
12679 case CALL_EXPR:
12680 /* Functions that do not return do not fall through. */
12681 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12683 case CLEANUP_POINT_EXPR:
12684 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12686 case TARGET_EXPR:
12687 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12689 case ERROR_MARK:
12690 return true;
12692 default:
12693 return lang_hooks.block_may_fallthru (stmt);
12697 /* True if we are using EH to handle cleanups. */
12698 static bool using_eh_for_cleanups_flag = false;
12700 /* This routine is called from front ends to indicate eh should be used for
12701 cleanups. */
12702 void
12703 using_eh_for_cleanups (void)
12705 using_eh_for_cleanups_flag = true;
12708 /* Query whether EH is used for cleanups. */
12709 bool
12710 using_eh_for_cleanups_p (void)
12712 return using_eh_for_cleanups_flag;
12715 /* Wrapper for tree_code_name to ensure that tree code is valid */
12716 const char *
12717 get_tree_code_name (enum tree_code code)
12719 const char *invalid = "<invalid tree code>";
12721 if (code >= MAX_TREE_CODES)
12722 return invalid;
12724 return tree_code_name[code];
12727 /* Drops the TREE_OVERFLOW flag from T. */
12729 tree
12730 drop_tree_overflow (tree t)
12732 gcc_checking_assert (TREE_OVERFLOW (t));
12734 /* For tree codes with a sharing machinery re-build the result. */
12735 if (poly_int_tree_p (t))
12736 return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
12738 /* For VECTOR_CST, remove the overflow bits from the encoded elements
12739 and canonicalize the result. */
12740 if (TREE_CODE (t) == VECTOR_CST)
12742 tree_vector_builder builder;
12743 builder.new_unary_operation (TREE_TYPE (t), t, true);
12744 unsigned int count = builder.encoded_nelts ();
12745 for (unsigned int i = 0; i < count; ++i)
12747 tree elt = VECTOR_CST_ELT (t, i);
12748 if (TREE_OVERFLOW (elt))
12749 elt = drop_tree_overflow (elt);
12750 builder.quick_push (elt);
12752 return builder.build ();
12755 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12756 and drop the flag. */
12757 t = copy_node (t);
12758 TREE_OVERFLOW (t) = 0;
12760 /* For constants that contain nested constants, drop the flag
12761 from those as well. */
12762 if (TREE_CODE (t) == COMPLEX_CST)
12764 if (TREE_OVERFLOW (TREE_REALPART (t)))
12765 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
12766 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
12767 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
12770 return t;
12773 /* Given a memory reference expression T, return its base address.
12774 The base address of a memory reference expression is the main
12775 object being referenced. For instance, the base address for
12776 'array[i].fld[j]' is 'array'. You can think of this as stripping
12777 away the offset part from a memory address.
12779 This function calls handled_component_p to strip away all the inner
12780 parts of the memory reference until it reaches the base object. */
12782 tree
12783 get_base_address (tree t)
12785 while (handled_component_p (t))
12786 t = TREE_OPERAND (t, 0);
12788 if ((TREE_CODE (t) == MEM_REF
12789 || TREE_CODE (t) == TARGET_MEM_REF)
12790 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12791 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12793 /* ??? Either the alias oracle or all callers need to properly deal
12794 with WITH_SIZE_EXPRs before we can look through those. */
12795 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12796 return NULL_TREE;
12798 return t;
12801 /* Return a tree of sizetype representing the size, in bytes, of the element
12802 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12804 tree
12805 array_ref_element_size (tree exp)
12807 tree aligned_size = TREE_OPERAND (exp, 3);
12808 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12809 location_t loc = EXPR_LOCATION (exp);
12811 /* If a size was specified in the ARRAY_REF, it's the size measured
12812 in alignment units of the element type. So multiply by that value. */
12813 if (aligned_size)
12815 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12816 sizetype from another type of the same width and signedness. */
12817 if (TREE_TYPE (aligned_size) != sizetype)
12818 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12819 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12820 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12823 /* Otherwise, take the size from that of the element type. Substitute
12824 any PLACEHOLDER_EXPR that we have. */
12825 else
12826 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12829 /* Return a tree representing the lower bound of the array mentioned in
12830 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12832 tree
12833 array_ref_low_bound (tree exp)
12835 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12837 /* If a lower bound is specified in EXP, use it. */
12838 if (TREE_OPERAND (exp, 2))
12839 return TREE_OPERAND (exp, 2);
12841 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12842 substituting for a PLACEHOLDER_EXPR as needed. */
12843 if (domain_type && TYPE_MIN_VALUE (domain_type))
12844 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12846 /* Otherwise, return a zero of the appropriate type. */
12847 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
12850 /* Return a tree representing the upper bound of the array mentioned in
12851 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12853 tree
12854 array_ref_up_bound (tree exp)
12856 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12858 /* If there is a domain type and it has an upper bound, use it, substituting
12859 for a PLACEHOLDER_EXPR as needed. */
12860 if (domain_type && TYPE_MAX_VALUE (domain_type))
12861 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12863 /* Otherwise fail. */
12864 return NULL_TREE;
12867 /* Returns true if REF is an array reference or a component reference
12868 to an array at the end of a structure.
12869 If this is the case, the array may be allocated larger
12870 than its upper bound implies. */
12872 bool
12873 array_at_struct_end_p (tree ref)
12875 tree atype;
12877 if (TREE_CODE (ref) == ARRAY_REF
12878 || TREE_CODE (ref) == ARRAY_RANGE_REF)
12880 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
12881 ref = TREE_OPERAND (ref, 0);
12883 else if (TREE_CODE (ref) == COMPONENT_REF
12884 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
12885 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
12886 else
12887 return false;
12889 if (TREE_CODE (ref) == STRING_CST)
12890 return false;
12892 tree ref_to_array = ref;
12893 while (handled_component_p (ref))
12895 /* If the reference chain contains a component reference to a
12896 non-union type and there follows another field the reference
12897 is not at the end of a structure. */
12898 if (TREE_CODE (ref) == COMPONENT_REF)
12900 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12902 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12903 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12904 nextf = DECL_CHAIN (nextf);
12905 if (nextf)
12906 return false;
12909 /* If we have a multi-dimensional array we do not consider
12910 a non-innermost dimension as flex array if the whole
12911 multi-dimensional array is at struct end.
12912 Same for an array of aggregates with a trailing array
12913 member. */
12914 else if (TREE_CODE (ref) == ARRAY_REF)
12915 return false;
12916 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
12918 /* If we view an underlying object as sth else then what we
12919 gathered up to now is what we have to rely on. */
12920 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
12921 break;
12922 else
12923 gcc_unreachable ();
12925 ref = TREE_OPERAND (ref, 0);
12928 /* The array now is at struct end. Treat flexible arrays as
12929 always subject to extend, even into just padding constrained by
12930 an underlying decl. */
12931 if (! TYPE_SIZE (atype)
12932 || ! TYPE_DOMAIN (atype)
12933 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12934 return true;
12936 if (TREE_CODE (ref) == MEM_REF
12937 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
12938 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
12940 /* If the reference is based on a declared entity, the size of the array
12941 is constrained by its given domain. (Do not trust commons PR/69368). */
12942 if (DECL_P (ref)
12943 && !(flag_unconstrained_commons
12944 && VAR_P (ref) && DECL_COMMON (ref))
12945 && DECL_SIZE_UNIT (ref)
12946 && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
12948 /* Check whether the array domain covers all of the available
12949 padding. */
12950 poly_int64 offset;
12951 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
12952 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
12953 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
12954 return true;
12955 if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
12956 return true;
12958 /* If at least one extra element fits it is a flexarray. */
12959 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12960 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
12961 + 2)
12962 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
12963 wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
12964 return true;
12966 return false;
12969 return true;
12972 /* Return a tree representing the offset, in bytes, of the field referenced
12973 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12975 tree
12976 component_ref_field_offset (tree exp)
12978 tree aligned_offset = TREE_OPERAND (exp, 2);
12979 tree field = TREE_OPERAND (exp, 1);
12980 location_t loc = EXPR_LOCATION (exp);
12982 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12983 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12984 value. */
12985 if (aligned_offset)
12987 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12988 sizetype from another type of the same width and signedness. */
12989 if (TREE_TYPE (aligned_offset) != sizetype)
12990 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12991 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12992 size_int (DECL_OFFSET_ALIGN (field)
12993 / BITS_PER_UNIT));
12996 /* Otherwise, take the offset from that of the field. Substitute
12997 any PLACEHOLDER_EXPR that we have. */
12998 else
12999 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
13002 /* Return the machine mode of T. For vectors, returns the mode of the
13003 inner type. The main use case is to feed the result to HONOR_NANS,
13004 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
13006 machine_mode
13007 element_mode (const_tree t)
13009 if (!TYPE_P (t))
13010 t = TREE_TYPE (t);
13011 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
13012 t = TREE_TYPE (t);
13013 return TYPE_MODE (t);
13016 /* Vector types need to re-check the target flags each time we report
13017 the machine mode. We need to do this because attribute target can
13018 change the result of vector_mode_supported_p and have_regs_of_mode
13019 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
13020 change on a per-function basis. */
13021 /* ??? Possibly a better solution is to run through all the types
13022 referenced by a function and re-compute the TYPE_MODE once, rather
13023 than make the TYPE_MODE macro call a function. */
13025 machine_mode
13026 vector_type_mode (const_tree t)
13028 machine_mode mode;
13030 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
13032 mode = t->type_common.mode;
13033 if (VECTOR_MODE_P (mode)
13034 && (!targetm.vector_mode_supported_p (mode)
13035 || !have_regs_of_mode[mode]))
13037 scalar_int_mode innermode;
13039 /* For integers, try mapping it to a same-sized scalar mode. */
13040 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
13042 poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
13043 * GET_MODE_BITSIZE (innermode));
13044 scalar_int_mode mode;
13045 if (int_mode_for_size (size, 0).exists (&mode)
13046 && have_regs_of_mode[mode])
13047 return mode;
13050 return BLKmode;
13053 return mode;
13056 /* Verify that basic properties of T match TV and thus T can be a variant of
13057 TV. TV should be the more specified variant (i.e. the main variant). */
13059 static bool
13060 verify_type_variant (const_tree t, tree tv)
13062 /* Type variant can differ by:
13064 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13065 ENCODE_QUAL_ADDR_SPACE.
13066 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13067 in this case some values may not be set in the variant types
13068 (see TYPE_COMPLETE_P checks).
13069 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13070 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13071 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13072 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13073 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13074 this is necessary to make it possible to merge types form different TUs
13075 - arrays, pointers and references may have TREE_TYPE that is a variant
13076 of TREE_TYPE of their main variants.
13077 - aggregates may have new TYPE_FIELDS list that list variants of
13078 the main variant TYPE_FIELDS.
13079 - vector types may differ by TYPE_VECTOR_OPAQUE
13082 /* Convenience macro for matching individual fields. */
13083 #define verify_variant_match(flag) \
13084 do { \
13085 if (flag (tv) != flag (t)) \
13087 error ("type variant differs by " #flag "."); \
13088 debug_tree (tv); \
13089 return false; \
13091 } while (false)
13093 /* tree_base checks. */
13095 verify_variant_match (TREE_CODE);
13096 /* FIXME: Ada builds non-artificial variants of artificial types. */
13097 if (TYPE_ARTIFICIAL (tv) && 0)
13098 verify_variant_match (TYPE_ARTIFICIAL);
13099 if (POINTER_TYPE_P (tv))
13100 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13101 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13102 verify_variant_match (TYPE_UNSIGNED);
13103 verify_variant_match (TYPE_PACKED);
13104 if (TREE_CODE (t) == REFERENCE_TYPE)
13105 verify_variant_match (TYPE_REF_IS_RVALUE);
13106 if (AGGREGATE_TYPE_P (t))
13107 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13108 else
13109 verify_variant_match (TYPE_SATURATING);
13110 /* FIXME: This check trigger during libstdc++ build. */
13111 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
13112 verify_variant_match (TYPE_FINAL_P);
13114 /* tree_type_common checks. */
13116 if (COMPLETE_TYPE_P (t))
13118 verify_variant_match (TYPE_MODE);
13119 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
13120 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
13121 verify_variant_match (TYPE_SIZE);
13122 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
13123 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
13124 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
13126 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
13127 TYPE_SIZE_UNIT (tv), 0));
13128 error ("type variant has different TYPE_SIZE_UNIT");
13129 debug_tree (tv);
13130 error ("type variant's TYPE_SIZE_UNIT");
13131 debug_tree (TYPE_SIZE_UNIT (tv));
13132 error ("type's TYPE_SIZE_UNIT");
13133 debug_tree (TYPE_SIZE_UNIT (t));
13134 return false;
13137 verify_variant_match (TYPE_PRECISION);
13138 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13139 if (RECORD_OR_UNION_TYPE_P (t))
13140 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13141 else if (TREE_CODE (t) == ARRAY_TYPE)
13142 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13143 /* During LTO we merge variant lists from diferent translation units
13144 that may differ BY TYPE_CONTEXT that in turn may point
13145 to TRANSLATION_UNIT_DECL.
13146 Ada also builds variants of types with different TYPE_CONTEXT. */
13147 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
13148 verify_variant_match (TYPE_CONTEXT);
13149 verify_variant_match (TYPE_STRING_FLAG);
13150 if (TYPE_ALIAS_SET_KNOWN_P (t))
13152 error ("type variant with TYPE_ALIAS_SET_KNOWN_P");
13153 debug_tree (tv);
13154 return false;
13157 /* tree_type_non_common checks. */
13159 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13160 and dangle the pointer from time to time. */
13161 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13162 && (in_lto_p || !TYPE_VFIELD (tv)
13163 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13165 error ("type variant has different TYPE_VFIELD");
13166 debug_tree (tv);
13167 return false;
13169 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13170 || TREE_CODE (t) == INTEGER_TYPE
13171 || TREE_CODE (t) == BOOLEAN_TYPE
13172 || TREE_CODE (t) == REAL_TYPE
13173 || TREE_CODE (t) == FIXED_POINT_TYPE)
13175 verify_variant_match (TYPE_MAX_VALUE);
13176 verify_variant_match (TYPE_MIN_VALUE);
13178 if (TREE_CODE (t) == METHOD_TYPE)
13179 verify_variant_match (TYPE_METHOD_BASETYPE);
13180 if (TREE_CODE (t) == OFFSET_TYPE)
13181 verify_variant_match (TYPE_OFFSET_BASETYPE);
13182 if (TREE_CODE (t) == ARRAY_TYPE)
13183 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13184 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13185 or even type's main variant. This is needed to make bootstrap pass
13186 and the bug seems new in GCC 5.
13187 C++ FE should be updated to make this consistent and we should check
13188 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13189 is a match with main variant.
13191 Also disable the check for Java for now because of parser hack that builds
13192 first an dummy BINFO and then sometimes replace it by real BINFO in some
13193 of the copies. */
13194 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13195 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13196 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13197 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13198 at LTO time only. */
13199 && (in_lto_p && odr_type_p (t)))
13201 error ("type variant has different TYPE_BINFO");
13202 debug_tree (tv);
13203 error ("type variant's TYPE_BINFO");
13204 debug_tree (TYPE_BINFO (tv));
13205 error ("type's TYPE_BINFO");
13206 debug_tree (TYPE_BINFO (t));
13207 return false;
13210 /* Check various uses of TYPE_VALUES_RAW. */
13211 if (TREE_CODE (t) == ENUMERAL_TYPE)
13212 verify_variant_match (TYPE_VALUES);
13213 else if (TREE_CODE (t) == ARRAY_TYPE)
13214 verify_variant_match (TYPE_DOMAIN);
13215 /* Permit incomplete variants of complete type. While FEs may complete
13216 all variants, this does not happen for C++ templates in all cases. */
13217 else if (RECORD_OR_UNION_TYPE_P (t)
13218 && COMPLETE_TYPE_P (t)
13219 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13221 tree f1, f2;
13223 /* Fortran builds qualified variants as new records with items of
13224 qualified type. Verify that they looks same. */
13225 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13226 f1 && f2;
13227 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13228 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13229 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13230 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13231 /* FIXME: gfc_nonrestricted_type builds all types as variants
13232 with exception of pointer types. It deeply copies the type
13233 which means that we may end up with a variant type
13234 referring non-variant pointer. We may change it to
13235 produce types as variants, too, like
13236 objc_get_protocol_qualified_type does. */
13237 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13238 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13239 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13240 break;
13241 if (f1 || f2)
13243 error ("type variant has different TYPE_FIELDS");
13244 debug_tree (tv);
13245 error ("first mismatch is field");
13246 debug_tree (f1);
13247 error ("and field");
13248 debug_tree (f2);
13249 return false;
13252 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13253 verify_variant_match (TYPE_ARG_TYPES);
13254 /* For C++ the qualified variant of array type is really an array type
13255 of qualified TREE_TYPE.
13256 objc builds variants of pointer where pointer to type is a variant, too
13257 in objc_get_protocol_qualified_type. */
13258 if (TREE_TYPE (t) != TREE_TYPE (tv)
13259 && ((TREE_CODE (t) != ARRAY_TYPE
13260 && !POINTER_TYPE_P (t))
13261 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13262 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13264 error ("type variant has different TREE_TYPE");
13265 debug_tree (tv);
13266 error ("type variant's TREE_TYPE");
13267 debug_tree (TREE_TYPE (tv));
13268 error ("type's TREE_TYPE");
13269 debug_tree (TREE_TYPE (t));
13270 return false;
13272 if (type_with_alias_set_p (t)
13273 && !gimple_canonical_types_compatible_p (t, tv, false))
13275 error ("type is not compatible with its variant");
13276 debug_tree (tv);
13277 error ("type variant's TREE_TYPE");
13278 debug_tree (TREE_TYPE (tv));
13279 error ("type's TREE_TYPE");
13280 debug_tree (TREE_TYPE (t));
13281 return false;
13283 return true;
13284 #undef verify_variant_match
13288 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13289 the middle-end types_compatible_p function. It needs to avoid
13290 claiming types are different for types that should be treated
13291 the same with respect to TBAA. Canonical types are also used
13292 for IL consistency checks via the useless_type_conversion_p
13293 predicate which does not handle all type kinds itself but falls
13294 back to pointer-comparison of TYPE_CANONICAL for aggregates
13295 for example. */
13297 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13298 type calculation because we need to allow inter-operability between signed
13299 and unsigned variants. */
13301 bool
13302 type_with_interoperable_signedness (const_tree type)
13304 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13305 signed char and unsigned char. Similarly fortran FE builds
13306 C_SIZE_T as signed type, while C defines it unsigned. */
13308 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13309 == INTEGER_TYPE
13310 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13311 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13314 /* Return true iff T1 and T2 are structurally identical for what
13315 TBAA is concerned.
13316 This function is used both by lto.c canonical type merging and by the
13317 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13318 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13319 only for LTO because only in these cases TYPE_CANONICAL equivalence
13320 correspond to one defined by gimple_canonical_types_compatible_p. */
13322 bool
13323 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13324 bool trust_type_canonical)
13326 /* Type variants should be same as the main variant. When not doing sanity
13327 checking to verify this fact, go to main variants and save some work. */
13328 if (trust_type_canonical)
13330 t1 = TYPE_MAIN_VARIANT (t1);
13331 t2 = TYPE_MAIN_VARIANT (t2);
13334 /* Check first for the obvious case of pointer identity. */
13335 if (t1 == t2)
13336 return true;
13338 /* Check that we have two types to compare. */
13339 if (t1 == NULL_TREE || t2 == NULL_TREE)
13340 return false;
13342 /* We consider complete types always compatible with incomplete type.
13343 This does not make sense for canonical type calculation and thus we
13344 need to ensure that we are never called on it.
13346 FIXME: For more correctness the function probably should have three modes
13347 1) mode assuming that types are complete mathcing their structure
13348 2) mode allowing incomplete types but producing equivalence classes
13349 and thus ignoring all info from complete types
13350 3) mode allowing incomplete types to match complete but checking
13351 compatibility between complete types.
13353 1 and 2 can be used for canonical type calculation. 3 is the real
13354 definition of type compatibility that can be used i.e. for warnings during
13355 declaration merging. */
13357 gcc_assert (!trust_type_canonical
13358 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13359 /* If the types have been previously registered and found equal
13360 they still are. */
13362 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13363 && trust_type_canonical)
13365 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13366 they are always NULL, but they are set to non-NULL for types
13367 constructed by build_pointer_type and variants. In this case the
13368 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13369 all pointers are considered equal. Be sure to not return false
13370 negatives. */
13371 gcc_checking_assert (canonical_type_used_p (t1)
13372 && canonical_type_used_p (t2));
13373 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13376 /* Can't be the same type if the types don't have the same code. */
13377 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13378 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13379 return false;
13381 /* Qualifiers do not matter for canonical type comparison purposes. */
13383 /* Void types and nullptr types are always the same. */
13384 if (TREE_CODE (t1) == VOID_TYPE
13385 || TREE_CODE (t1) == NULLPTR_TYPE)
13386 return true;
13388 /* Can't be the same type if they have different mode. */
13389 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13390 return false;
13392 /* Non-aggregate types can be handled cheaply. */
13393 if (INTEGRAL_TYPE_P (t1)
13394 || SCALAR_FLOAT_TYPE_P (t1)
13395 || FIXED_POINT_TYPE_P (t1)
13396 || TREE_CODE (t1) == VECTOR_TYPE
13397 || TREE_CODE (t1) == COMPLEX_TYPE
13398 || TREE_CODE (t1) == OFFSET_TYPE
13399 || POINTER_TYPE_P (t1))
13401 /* Can't be the same type if they have different recision. */
13402 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13403 return false;
13405 /* In some cases the signed and unsigned types are required to be
13406 inter-operable. */
13407 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13408 && !type_with_interoperable_signedness (t1))
13409 return false;
13411 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13412 interoperable with "signed char". Unless all frontends are revisited
13413 to agree on these types, we must ignore the flag completely. */
13415 /* Fortran standard define C_PTR type that is compatible with every
13416 C pointer. For this reason we need to glob all pointers into one.
13417 Still pointers in different address spaces are not compatible. */
13418 if (POINTER_TYPE_P (t1))
13420 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13421 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13422 return false;
13425 /* Tail-recurse to components. */
13426 if (TREE_CODE (t1) == VECTOR_TYPE
13427 || TREE_CODE (t1) == COMPLEX_TYPE)
13428 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13429 TREE_TYPE (t2),
13430 trust_type_canonical);
13432 return true;
13435 /* Do type-specific comparisons. */
13436 switch (TREE_CODE (t1))
13438 case ARRAY_TYPE:
13439 /* Array types are the same if the element types are the same and
13440 the number of elements are the same. */
13441 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13442 trust_type_canonical)
13443 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13444 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13445 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13446 return false;
13447 else
13449 tree i1 = TYPE_DOMAIN (t1);
13450 tree i2 = TYPE_DOMAIN (t2);
13452 /* For an incomplete external array, the type domain can be
13453 NULL_TREE. Check this condition also. */
13454 if (i1 == NULL_TREE && i2 == NULL_TREE)
13455 return true;
13456 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13457 return false;
13458 else
13460 tree min1 = TYPE_MIN_VALUE (i1);
13461 tree min2 = TYPE_MIN_VALUE (i2);
13462 tree max1 = TYPE_MAX_VALUE (i1);
13463 tree max2 = TYPE_MAX_VALUE (i2);
13465 /* The minimum/maximum values have to be the same. */
13466 if ((min1 == min2
13467 || (min1 && min2
13468 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13469 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13470 || operand_equal_p (min1, min2, 0))))
13471 && (max1 == max2
13472 || (max1 && max2
13473 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13474 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13475 || operand_equal_p (max1, max2, 0)))))
13476 return true;
13477 else
13478 return false;
13482 case METHOD_TYPE:
13483 case FUNCTION_TYPE:
13484 /* Function types are the same if the return type and arguments types
13485 are the same. */
13486 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13487 trust_type_canonical))
13488 return false;
13490 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13491 return true;
13492 else
13494 tree parms1, parms2;
13496 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13497 parms1 && parms2;
13498 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13500 if (!gimple_canonical_types_compatible_p
13501 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13502 trust_type_canonical))
13503 return false;
13506 if (parms1 || parms2)
13507 return false;
13509 return true;
13512 case RECORD_TYPE:
13513 case UNION_TYPE:
13514 case QUAL_UNION_TYPE:
13516 tree f1, f2;
13518 /* Don't try to compare variants of an incomplete type, before
13519 TYPE_FIELDS has been copied around. */
13520 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13521 return true;
13524 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13525 return false;
13527 /* For aggregate types, all the fields must be the same. */
13528 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13529 f1 || f2;
13530 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13532 /* Skip non-fields and zero-sized fields. */
13533 while (f1 && (TREE_CODE (f1) != FIELD_DECL
13534 || (DECL_SIZE (f1)
13535 && integer_zerop (DECL_SIZE (f1)))))
13536 f1 = TREE_CHAIN (f1);
13537 while (f2 && (TREE_CODE (f2) != FIELD_DECL
13538 || (DECL_SIZE (f2)
13539 && integer_zerop (DECL_SIZE (f2)))))
13540 f2 = TREE_CHAIN (f2);
13541 if (!f1 || !f2)
13542 break;
13543 /* The fields must have the same name, offset and type. */
13544 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13545 || !gimple_compare_field_offset (f1, f2)
13546 || !gimple_canonical_types_compatible_p
13547 (TREE_TYPE (f1), TREE_TYPE (f2),
13548 trust_type_canonical))
13549 return false;
13552 /* If one aggregate has more fields than the other, they
13553 are not the same. */
13554 if (f1 || f2)
13555 return false;
13557 return true;
13560 default:
13561 /* Consider all types with language specific trees in them mutually
13562 compatible. This is executed only from verify_type and false
13563 positives can be tolerated. */
13564 gcc_assert (!in_lto_p);
13565 return true;
13569 /* Verify type T. */
13571 void
13572 verify_type (const_tree t)
13574 bool error_found = false;
13575 tree mv = TYPE_MAIN_VARIANT (t);
13576 if (!mv)
13578 error ("Main variant is not defined");
13579 error_found = true;
13581 else if (mv != TYPE_MAIN_VARIANT (mv))
13583 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
13584 debug_tree (mv);
13585 error_found = true;
13587 else if (t != mv && !verify_type_variant (t, mv))
13588 error_found = true;
13590 tree ct = TYPE_CANONICAL (t);
13591 if (!ct)
13593 else if (TYPE_CANONICAL (t) != ct)
13595 error ("TYPE_CANONICAL has different TYPE_CANONICAL");
13596 debug_tree (ct);
13597 error_found = true;
13599 /* Method and function types can not be used to address memory and thus
13600 TYPE_CANONICAL really matters only for determining useless conversions.
13602 FIXME: C++ FE produce declarations of builtin functions that are not
13603 compatible with main variants. */
13604 else if (TREE_CODE (t) == FUNCTION_TYPE)
13606 else if (t != ct
13607 /* FIXME: gimple_canonical_types_compatible_p can not compare types
13608 with variably sized arrays because their sizes possibly
13609 gimplified to different variables. */
13610 && !variably_modified_type_p (ct, NULL)
13611 && !gimple_canonical_types_compatible_p (t, ct, false))
13613 error ("TYPE_CANONICAL is not compatible");
13614 debug_tree (ct);
13615 error_found = true;
13618 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13619 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13621 error ("TYPE_MODE of TYPE_CANONICAL is not compatible");
13622 debug_tree (ct);
13623 error_found = true;
13625 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
13627 error ("TYPE_CANONICAL of main variant is not main variant");
13628 debug_tree (ct);
13629 debug_tree (TYPE_MAIN_VARIANT (ct));
13630 error_found = true;
13634 /* Check various uses of TYPE_MIN_VALUE_RAW. */
13635 if (RECORD_OR_UNION_TYPE_P (t))
13637 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13638 and danagle the pointer from time to time. */
13639 if (TYPE_VFIELD (t)
13640 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13641 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13643 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
13644 debug_tree (TYPE_VFIELD (t));
13645 error_found = true;
13648 else if (TREE_CODE (t) == POINTER_TYPE)
13650 if (TYPE_NEXT_PTR_TO (t)
13651 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13653 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
13654 debug_tree (TYPE_NEXT_PTR_TO (t));
13655 error_found = true;
13658 else if (TREE_CODE (t) == REFERENCE_TYPE)
13660 if (TYPE_NEXT_REF_TO (t)
13661 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13663 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
13664 debug_tree (TYPE_NEXT_REF_TO (t));
13665 error_found = true;
13668 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13669 || TREE_CODE (t) == FIXED_POINT_TYPE)
13671 /* FIXME: The following check should pass:
13672 useless_type_conversion_p (const_cast <tree> (t),
13673 TREE_TYPE (TYPE_MIN_VALUE (t))
13674 but does not for C sizetypes in LTO. */
13677 /* Check various uses of TYPE_MAXVAL_RAW. */
13678 if (RECORD_OR_UNION_TYPE_P (t))
13680 if (!TYPE_BINFO (t))
13682 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13684 error ("TYPE_BINFO is not TREE_BINFO");
13685 debug_tree (TYPE_BINFO (t));
13686 error_found = true;
13688 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
13690 error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT");
13691 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13692 error_found = true;
13695 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13697 if (TYPE_METHOD_BASETYPE (t)
13698 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13699 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13701 error ("TYPE_METHOD_BASETYPE is not record nor union");
13702 debug_tree (TYPE_METHOD_BASETYPE (t));
13703 error_found = true;
13706 else if (TREE_CODE (t) == OFFSET_TYPE)
13708 if (TYPE_OFFSET_BASETYPE (t)
13709 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13710 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13712 error ("TYPE_OFFSET_BASETYPE is not record nor union");
13713 debug_tree (TYPE_OFFSET_BASETYPE (t));
13714 error_found = true;
13717 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13718 || TREE_CODE (t) == FIXED_POINT_TYPE)
13720 /* FIXME: The following check should pass:
13721 useless_type_conversion_p (const_cast <tree> (t),
13722 TREE_TYPE (TYPE_MAX_VALUE (t))
13723 but does not for C sizetypes in LTO. */
13725 else if (TREE_CODE (t) == ARRAY_TYPE)
13727 if (TYPE_ARRAY_MAX_SIZE (t)
13728 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13730 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
13731 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13732 error_found = true;
13735 else if (TYPE_MAX_VALUE_RAW (t))
13737 error ("TYPE_MAX_VALUE_RAW non-NULL");
13738 debug_tree (TYPE_MAX_VALUE_RAW (t));
13739 error_found = true;
13742 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13744 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
13745 debug_tree (TYPE_LANG_SLOT_1 (t));
13746 error_found = true;
13749 /* Check various uses of TYPE_VALUES_RAW. */
13750 if (TREE_CODE (t) == ENUMERAL_TYPE)
13751 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13753 tree value = TREE_VALUE (l);
13754 tree name = TREE_PURPOSE (l);
13756 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13757 CONST_DECL of ENUMERAL TYPE. */
13758 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13760 error ("Enum value is not CONST_DECL or INTEGER_CST");
13761 debug_tree (value);
13762 debug_tree (name);
13763 error_found = true;
13765 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13766 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13768 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
13769 debug_tree (value);
13770 debug_tree (name);
13771 error_found = true;
13773 if (TREE_CODE (name) != IDENTIFIER_NODE)
13775 error ("Enum value name is not IDENTIFIER_NODE");
13776 debug_tree (value);
13777 debug_tree (name);
13778 error_found = true;
13781 else if (TREE_CODE (t) == ARRAY_TYPE)
13783 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13785 error ("Array TYPE_DOMAIN is not integer type");
13786 debug_tree (TYPE_DOMAIN (t));
13787 error_found = true;
13790 else if (RECORD_OR_UNION_TYPE_P (t))
13792 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
13794 error ("TYPE_FIELDS defined in incomplete type");
13795 error_found = true;
13797 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13799 /* TODO: verify properties of decls. */
13800 if (TREE_CODE (fld) == FIELD_DECL)
13802 else if (TREE_CODE (fld) == TYPE_DECL)
13804 else if (TREE_CODE (fld) == CONST_DECL)
13806 else if (VAR_P (fld))
13808 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13810 else if (TREE_CODE (fld) == USING_DECL)
13812 else if (TREE_CODE (fld) == FUNCTION_DECL)
13814 else
13816 error ("Wrong tree in TYPE_FIELDS list");
13817 debug_tree (fld);
13818 error_found = true;
13822 else if (TREE_CODE (t) == INTEGER_TYPE
13823 || TREE_CODE (t) == BOOLEAN_TYPE
13824 || TREE_CODE (t) == OFFSET_TYPE
13825 || TREE_CODE (t) == REFERENCE_TYPE
13826 || TREE_CODE (t) == NULLPTR_TYPE
13827 || TREE_CODE (t) == POINTER_TYPE)
13829 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13831 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
13832 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13833 error_found = true;
13835 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13837 error ("TYPE_CACHED_VALUES is not TREE_VEC");
13838 debug_tree (TYPE_CACHED_VALUES (t));
13839 error_found = true;
13841 /* Verify just enough of cache to ensure that no one copied it to new type.
13842 All copying should go by copy_node that should clear it. */
13843 else if (TYPE_CACHED_VALUES_P (t))
13845 int i;
13846 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13847 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13848 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13850 error ("wrong TYPE_CACHED_VALUES entry");
13851 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13852 error_found = true;
13853 break;
13857 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13858 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13860 /* C++ FE uses TREE_PURPOSE to store initial values. */
13861 if (TREE_PURPOSE (l) && in_lto_p)
13863 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
13864 debug_tree (l);
13865 error_found = true;
13867 if (!TYPE_P (TREE_VALUE (l)))
13869 error ("Wrong entry in TYPE_ARG_TYPES list");
13870 debug_tree (l);
13871 error_found = true;
13874 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13876 error ("TYPE_VALUES_RAW field is non-NULL");
13877 debug_tree (TYPE_VALUES_RAW (t));
13878 error_found = true;
13880 if (TREE_CODE (t) != INTEGER_TYPE
13881 && TREE_CODE (t) != BOOLEAN_TYPE
13882 && TREE_CODE (t) != OFFSET_TYPE
13883 && TREE_CODE (t) != REFERENCE_TYPE
13884 && TREE_CODE (t) != NULLPTR_TYPE
13885 && TREE_CODE (t) != POINTER_TYPE
13886 && TYPE_CACHED_VALUES_P (t))
13888 error ("TYPE_CACHED_VALUES_P is set while it should not");
13889 error_found = true;
13891 if (TYPE_STRING_FLAG (t)
13892 && TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != INTEGER_TYPE)
13894 error ("TYPE_STRING_FLAG is set on wrong type code");
13895 error_found = true;
13898 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13899 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13900 of a type. */
13901 if (TREE_CODE (t) == METHOD_TYPE
13902 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
13904 error ("TYPE_METHOD_BASETYPE is not main variant");
13905 error_found = true;
13908 if (error_found)
13910 debug_tree (const_cast <tree> (t));
13911 internal_error ("verify_type failed");
13916 /* Return 1 if ARG interpreted as signed in its precision is known to be
13917 always positive or 2 if ARG is known to be always negative, or 3 if
13918 ARG may be positive or negative. */
13921 get_range_pos_neg (tree arg)
13923 if (arg == error_mark_node)
13924 return 3;
13926 int prec = TYPE_PRECISION (TREE_TYPE (arg));
13927 int cnt = 0;
13928 if (TREE_CODE (arg) == INTEGER_CST)
13930 wide_int w = wi::sext (wi::to_wide (arg), prec);
13931 if (wi::neg_p (w))
13932 return 2;
13933 else
13934 return 1;
13936 while (CONVERT_EXPR_P (arg)
13937 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
13938 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
13940 arg = TREE_OPERAND (arg, 0);
13941 /* Narrower value zero extended into wider type
13942 will always result in positive values. */
13943 if (TYPE_UNSIGNED (TREE_TYPE (arg))
13944 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
13945 return 1;
13946 prec = TYPE_PRECISION (TREE_TYPE (arg));
13947 if (++cnt > 30)
13948 return 3;
13951 if (TREE_CODE (arg) != SSA_NAME)
13952 return 3;
13953 wide_int arg_min, arg_max;
13954 while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
13956 gimple *g = SSA_NAME_DEF_STMT (arg);
13957 if (is_gimple_assign (g)
13958 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
13960 tree t = gimple_assign_rhs1 (g);
13961 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
13962 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
13964 if (TYPE_UNSIGNED (TREE_TYPE (t))
13965 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
13966 return 1;
13967 prec = TYPE_PRECISION (TREE_TYPE (t));
13968 arg = t;
13969 if (++cnt > 30)
13970 return 3;
13971 continue;
13974 return 3;
13976 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
13978 /* For unsigned values, the "positive" range comes
13979 below the "negative" range. */
13980 if (!wi::neg_p (wi::sext (arg_max, prec), SIGNED))
13981 return 1;
13982 if (wi::neg_p (wi::sext (arg_min, prec), SIGNED))
13983 return 2;
13985 else
13987 if (!wi::neg_p (wi::sext (arg_min, prec), SIGNED))
13988 return 1;
13989 if (wi::neg_p (wi::sext (arg_max, prec), SIGNED))
13990 return 2;
13992 return 3;
13998 /* Return true if ARG is marked with the nonnull attribute in the
13999 current function signature. */
14001 bool
14002 nonnull_arg_p (const_tree arg)
14004 tree t, attrs, fntype;
14005 unsigned HOST_WIDE_INT arg_num;
14007 gcc_assert (TREE_CODE (arg) == PARM_DECL
14008 && (POINTER_TYPE_P (TREE_TYPE (arg))
14009 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
14011 /* The static chain decl is always non null. */
14012 if (arg == cfun->static_chain_decl)
14013 return true;
14015 /* THIS argument of method is always non-NULL. */
14016 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
14017 && arg == DECL_ARGUMENTS (cfun->decl)
14018 && flag_delete_null_pointer_checks)
14019 return true;
14021 /* Values passed by reference are always non-NULL. */
14022 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
14023 && flag_delete_null_pointer_checks)
14024 return true;
14026 fntype = TREE_TYPE (cfun->decl);
14027 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
14029 attrs = lookup_attribute ("nonnull", attrs);
14031 /* If "nonnull" wasn't specified, we know nothing about the argument. */
14032 if (attrs == NULL_TREE)
14033 return false;
14035 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
14036 if (TREE_VALUE (attrs) == NULL_TREE)
14037 return true;
14039 /* Get the position number for ARG in the function signature. */
14040 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
14042 t = DECL_CHAIN (t), arg_num++)
14044 if (t == arg)
14045 break;
14048 gcc_assert (t == arg);
14050 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14051 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14053 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14054 return true;
14058 return false;
14061 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14062 information. */
14064 location_t
14065 set_block (location_t loc, tree block)
14067 location_t pure_loc = get_pure_location (loc);
14068 source_range src_range = get_range_from_loc (line_table, loc);
14069 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
14072 location_t
14073 set_source_range (tree expr, location_t start, location_t finish)
14075 source_range src_range;
14076 src_range.m_start = start;
14077 src_range.m_finish = finish;
14078 return set_source_range (expr, src_range);
14081 location_t
14082 set_source_range (tree expr, source_range src_range)
14084 if (!EXPR_P (expr))
14085 return UNKNOWN_LOCATION;
14087 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
14088 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
14089 pure_loc,
14090 src_range,
14091 NULL);
14092 SET_EXPR_LOCATION (expr, adhoc);
14093 return adhoc;
14096 /* Return the name of combined function FN, for debugging purposes. */
14098 const char *
14099 combined_fn_name (combined_fn fn)
14101 if (builtin_fn_p (fn))
14103 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14104 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14106 else
14107 return internal_fn_name (as_internal_fn (fn));
14110 /* Return a bitmap with a bit set corresponding to each argument in
14111 a function call type FNTYPE declared with attribute nonnull,
14112 or null if none of the function's argument are nonnull. The caller
14113 must free the bitmap. */
14115 bitmap
14116 get_nonnull_args (const_tree fntype)
14118 if (fntype == NULL_TREE)
14119 return NULL;
14121 tree attrs = TYPE_ATTRIBUTES (fntype);
14122 if (!attrs)
14123 return NULL;
14125 bitmap argmap = NULL;
14127 /* A function declaration can specify multiple attribute nonnull,
14128 each with zero or more arguments. The loop below creates a bitmap
14129 representing a union of all the arguments. An empty (but non-null)
14130 bitmap means that all arguments have been declaraed nonnull. */
14131 for ( ; attrs; attrs = TREE_CHAIN (attrs))
14133 attrs = lookup_attribute ("nonnull", attrs);
14134 if (!attrs)
14135 break;
14137 if (!argmap)
14138 argmap = BITMAP_ALLOC (NULL);
14140 if (!TREE_VALUE (attrs))
14142 /* Clear the bitmap in case a previous attribute nonnull
14143 set it and this one overrides it for all arguments. */
14144 bitmap_clear (argmap);
14145 return argmap;
14148 /* Iterate over the indices of the format arguments declared nonnull
14149 and set a bit for each. */
14150 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
14152 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
14153 bitmap_set_bit (argmap, val);
14157 return argmap;
14160 /* Returns true if TYPE is a type where it and all of its subobjects
14161 (recursively) are of structure, union, or array type. */
14163 static bool
14164 default_is_empty_type (tree type)
14166 if (RECORD_OR_UNION_TYPE_P (type))
14168 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
14169 if (TREE_CODE (field) == FIELD_DECL
14170 && !DECL_PADDING_P (field)
14171 && !default_is_empty_type (TREE_TYPE (field)))
14172 return false;
14173 return true;
14175 else if (TREE_CODE (type) == ARRAY_TYPE)
14176 return (integer_minus_onep (array_type_nelts (type))
14177 || TYPE_DOMAIN (type) == NULL_TREE
14178 || default_is_empty_type (TREE_TYPE (type)));
14179 return false;
14182 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
14183 that shouldn't be passed via stack. */
14185 bool
14186 default_is_empty_record (const_tree type)
14188 if (!abi_version_at_least (12))
14189 return false;
14191 if (type == error_mark_node)
14192 return false;
14194 if (TREE_ADDRESSABLE (type))
14195 return false;
14197 return default_is_empty_type (TYPE_MAIN_VARIANT (type));
14200 /* Like int_size_in_bytes, but handle empty records specially. */
14202 HOST_WIDE_INT
14203 arg_int_size_in_bytes (const_tree type)
14205 return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
14208 /* Like size_in_bytes, but handle empty records specially. */
14210 tree
14211 arg_size_in_bytes (const_tree type)
14213 return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
14216 /* Return true if an expression with CODE has to have the same result type as
14217 its first operand. */
14219 bool
14220 expr_type_first_operand_type_p (tree_code code)
14222 switch (code)
14224 case NEGATE_EXPR:
14225 case ABS_EXPR:
14226 case BIT_NOT_EXPR:
14227 case PAREN_EXPR:
14228 case CONJ_EXPR:
14230 case PLUS_EXPR:
14231 case MINUS_EXPR:
14232 case MULT_EXPR:
14233 case TRUNC_DIV_EXPR:
14234 case CEIL_DIV_EXPR:
14235 case FLOOR_DIV_EXPR:
14236 case ROUND_DIV_EXPR:
14237 case TRUNC_MOD_EXPR:
14238 case CEIL_MOD_EXPR:
14239 case FLOOR_MOD_EXPR:
14240 case ROUND_MOD_EXPR:
14241 case RDIV_EXPR:
14242 case EXACT_DIV_EXPR:
14243 case MIN_EXPR:
14244 case MAX_EXPR:
14245 case BIT_IOR_EXPR:
14246 case BIT_XOR_EXPR:
14247 case BIT_AND_EXPR:
14249 case LSHIFT_EXPR:
14250 case RSHIFT_EXPR:
14251 case LROTATE_EXPR:
14252 case RROTATE_EXPR:
14253 return true;
14255 default:
14256 return false;
14260 /* List of pointer types used to declare builtins before we have seen their
14261 real declaration.
14263 Keep the size up to date in tree.h ! */
14264 const builtin_structptr_type builtin_structptr_types[6] =
14266 { fileptr_type_node, ptr_type_node, "FILE" },
14267 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
14268 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
14269 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
14270 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
14271 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
14274 #if CHECKING_P
14276 namespace selftest {
14278 /* Selftests for tree. */
14280 /* Verify that integer constants are sane. */
14282 static void
14283 test_integer_constants ()
14285 ASSERT_TRUE (integer_type_node != NULL);
14286 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
14288 tree type = integer_type_node;
14290 tree zero = build_zero_cst (type);
14291 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
14292 ASSERT_EQ (type, TREE_TYPE (zero));
14294 tree one = build_int_cst (type, 1);
14295 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
14296 ASSERT_EQ (type, TREE_TYPE (zero));
14299 /* Verify identifiers. */
14301 static void
14302 test_identifiers ()
14304 tree identifier = get_identifier ("foo");
14305 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
14306 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
14309 /* Verify LABEL_DECL. */
14311 static void
14312 test_labels ()
14314 tree identifier = get_identifier ("err");
14315 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
14316 identifier, void_type_node);
14317 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
14318 ASSERT_FALSE (FORCED_LABEL (label_decl));
14321 /* Return a new VECTOR_CST node whose type is TYPE and whose values
14322 are given by VALS. */
14324 static tree
14325 build_vector (tree type, vec<tree> vals MEM_STAT_DECL)
14327 gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
14328 tree_vector_builder builder (type, vals.length (), 1);
14329 builder.splice (vals);
14330 return builder.build ();
14333 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
14335 static void
14336 check_vector_cst (vec<tree> expected, tree actual)
14338 ASSERT_KNOWN_EQ (expected.length (),
14339 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
14340 for (unsigned int i = 0; i < expected.length (); ++i)
14341 ASSERT_EQ (wi::to_wide (expected[i]),
14342 wi::to_wide (vector_cst_elt (actual, i)));
14345 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
14346 and that its elements match EXPECTED. */
14348 static void
14349 check_vector_cst_duplicate (vec<tree> expected, tree actual,
14350 unsigned int npatterns)
14352 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14353 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
14354 ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
14355 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
14356 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
14357 check_vector_cst (expected, actual);
14360 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
14361 and NPATTERNS background elements, and that its elements match
14362 EXPECTED. */
14364 static void
14365 check_vector_cst_fill (vec<tree> expected, tree actual,
14366 unsigned int npatterns)
14368 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14369 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
14370 ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
14371 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
14372 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
14373 check_vector_cst (expected, actual);
14376 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
14377 and that its elements match EXPECTED. */
14379 static void
14380 check_vector_cst_stepped (vec<tree> expected, tree actual,
14381 unsigned int npatterns)
14383 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14384 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
14385 ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
14386 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
14387 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
14388 check_vector_cst (expected, actual);
14391 /* Test the creation of VECTOR_CSTs. */
14393 static void
14394 test_vector_cst_patterns ()
14396 auto_vec<tree, 8> elements (8);
14397 elements.quick_grow (8);
14398 tree element_type = build_nonstandard_integer_type (16, true);
14399 tree vector_type = build_vector_type (element_type, 8);
14401 /* Test a simple linear series with a base of 0 and a step of 1:
14402 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
14403 for (unsigned int i = 0; i < 8; ++i)
14404 elements[i] = build_int_cst (element_type, i);
14405 check_vector_cst_stepped (elements, build_vector (vector_type, elements), 1);
14407 /* Try the same with the first element replaced by 100:
14408 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
14409 elements[0] = build_int_cst (element_type, 100);
14410 check_vector_cst_stepped (elements, build_vector (vector_type, elements), 1);
14412 /* Try a series that wraps around.
14413 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
14414 for (unsigned int i = 1; i < 8; ++i)
14415 elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
14416 check_vector_cst_stepped (elements, build_vector (vector_type, elements), 1);
14418 /* Try a downward series:
14419 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
14420 for (unsigned int i = 1; i < 8; ++i)
14421 elements[i] = build_int_cst (element_type, 80 - i);
14422 check_vector_cst_stepped (elements, build_vector (vector_type, elements), 1);
14424 /* Try two interleaved series with different bases and steps:
14425 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
14426 elements[1] = build_int_cst (element_type, 53);
14427 for (unsigned int i = 2; i < 8; i += 2)
14429 elements[i] = build_int_cst (element_type, 70 - i * 2);
14430 elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
14432 check_vector_cst_stepped (elements, build_vector (vector_type, elements), 2);
14434 /* Try a duplicated value:
14435 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
14436 for (unsigned int i = 1; i < 8; ++i)
14437 elements[i] = elements[0];
14438 check_vector_cst_duplicate (elements,
14439 build_vector (vector_type, elements), 1);
14441 /* Try an interleaved duplicated value:
14442 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
14443 elements[1] = build_int_cst (element_type, 55);
14444 for (unsigned int i = 2; i < 8; ++i)
14445 elements[i] = elements[i - 2];
14446 check_vector_cst_duplicate (elements,
14447 build_vector (vector_type, elements), 2);
14449 /* Try a duplicated value with 2 exceptions
14450 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
14451 elements[0] = build_int_cst (element_type, 41);
14452 elements[1] = build_int_cst (element_type, 97);
14453 check_vector_cst_fill (elements, build_vector (vector_type, elements), 2);
14455 /* Try with and without a step
14456 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
14457 for (unsigned int i = 3; i < 8; i += 2)
14458 elements[i] = build_int_cst (element_type, i * 7);
14459 check_vector_cst_stepped (elements, build_vector (vector_type, elements), 2);
14461 /* Try a fully-general constant:
14462 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
14463 elements[5] = build_int_cst (element_type, 9990);
14464 check_vector_cst_fill (elements, build_vector (vector_type, elements), 4);
14467 /* Run all of the selftests within this file. */
14469 void
14470 tree_c_tests ()
14472 test_integer_constants ();
14473 test_identifiers ();
14474 test_labels ();
14475 test_vector_cst_patterns ();
14478 } // namespace selftest
14480 #endif /* CHECKING_P */
14482 #include "gt-tree.h"