Make more use of VECTOR_CST_ENCODED_ELT
[official-gcc.git] / gcc / tree.c
blob053670cb6b979fd26f913abcc84f6b6a2c310976
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "params.h"
58 #include "langhooks-def.h"
59 #include "tree-diagnostic.h"
60 #include "except.h"
61 #include "builtins.h"
62 #include "print-tree.h"
63 #include "ipa-utils.h"
64 #include "selftest.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "rtl.h"
68 #include "regs.h"
69 #include "tree-vector-builder.h"
71 /* Tree code classes. */
73 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
74 #define END_OF_BASE_TREE_CODES tcc_exceptional,
76 const enum tree_code_class tree_code_type[] = {
77 #include "all-tree.def"
80 #undef DEFTREECODE
81 #undef END_OF_BASE_TREE_CODES
83 /* Table indexed by tree code giving number of expression
84 operands beyond the fixed part of the node structure.
85 Not used for types or decls. */
87 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
88 #define END_OF_BASE_TREE_CODES 0,
90 const unsigned char tree_code_length[] = {
91 #include "all-tree.def"
94 #undef DEFTREECODE
95 #undef END_OF_BASE_TREE_CODES
97 /* Names of tree components.
98 Used for printing out the tree and error messages. */
99 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
100 #define END_OF_BASE_TREE_CODES "@dummy",
102 static const char *const tree_code_name[] = {
103 #include "all-tree.def"
106 #undef DEFTREECODE
107 #undef END_OF_BASE_TREE_CODES
109 /* Each tree code class has an associated string representation.
110 These must correspond to the tree_code_class entries. */
112 const char *const tree_code_class_strings[] =
114 "exceptional",
115 "constant",
116 "type",
117 "declaration",
118 "reference",
119 "comparison",
120 "unary",
121 "binary",
122 "statement",
123 "vl_exp",
124 "expression"
127 /* obstack.[ch] explicitly declined to prototype this. */
128 extern int _obstack_allocated_p (struct obstack *h, void *obj);
130 /* Statistics-gathering stuff. */
132 static int tree_code_counts[MAX_TREE_CODES];
133 int tree_node_counts[(int) all_kinds];
134 int tree_node_sizes[(int) all_kinds];
136 /* Keep in sync with tree.h:enum tree_node_kind. */
137 static const char * const tree_node_kind_names[] = {
138 "decls",
139 "types",
140 "blocks",
141 "stmts",
142 "refs",
143 "exprs",
144 "constants",
145 "identifiers",
146 "vecs",
147 "binfos",
148 "ssa names",
149 "constructors",
150 "random kinds",
151 "lang_decl kinds",
152 "lang_type kinds",
153 "omp clauses",
156 /* Unique id for next decl created. */
157 static GTY(()) int next_decl_uid;
158 /* Unique id for next type created. */
159 static GTY(()) unsigned next_type_uid = 1;
160 /* Unique id for next debug decl created. Use negative numbers,
161 to catch erroneous uses. */
162 static GTY(()) int next_debug_decl_uid;
164 /* Since we cannot rehash a type after it is in the table, we have to
165 keep the hash code. */
167 struct GTY((for_user)) type_hash {
168 unsigned long hash;
169 tree type;
172 /* Initial size of the hash table (rounded to next prime). */
173 #define TYPE_HASH_INITIAL_SIZE 1000
175 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
177 static hashval_t hash (type_hash *t) { return t->hash; }
178 static bool equal (type_hash *a, type_hash *b);
180 static int
181 keep_cache_entry (type_hash *&t)
183 return ggc_marked_p (t->type);
187 /* Now here is the hash table. When recording a type, it is added to
188 the slot whose index is the hash code. Note that the hash table is
189 used for several kinds of types (function types, array types and
190 array index range types, for now). While all these live in the
191 same table, they are completely independent, and the hash code is
192 computed differently for each of these. */
194 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
196 /* Hash table and temporary node for larger integer const values. */
197 static GTY (()) tree int_cst_node;
199 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
201 static hashval_t hash (tree t);
202 static bool equal (tree x, tree y);
205 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
207 /* Hash table for optimization flags and target option flags. Use the same
208 hash table for both sets of options. Nodes for building the current
209 optimization and target option nodes. The assumption is most of the time
210 the options created will already be in the hash table, so we avoid
211 allocating and freeing up a node repeatably. */
212 static GTY (()) tree cl_optimization_node;
213 static GTY (()) tree cl_target_option_node;
215 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
217 static hashval_t hash (tree t);
218 static bool equal (tree x, tree y);
221 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
223 /* General tree->tree mapping structure for use in hash tables. */
226 static GTY ((cache))
227 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
229 static GTY ((cache))
230 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
232 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
234 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
236 static bool
237 equal (tree_vec_map *a, tree_vec_map *b)
239 return a->base.from == b->base.from;
242 static int
243 keep_cache_entry (tree_vec_map *&m)
245 return ggc_marked_p (m->base.from);
249 static GTY ((cache))
250 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
252 static void set_type_quals (tree, int);
253 static void print_type_hash_statistics (void);
254 static void print_debug_expr_statistics (void);
255 static void print_value_expr_statistics (void);
257 tree global_trees[TI_MAX];
258 tree integer_types[itk_none];
260 bool int_n_enabled_p[NUM_INT_N_ENTS];
261 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
263 bool tree_contains_struct[MAX_TREE_CODES][64];
265 /* Number of operands for each OpenMP clause. */
266 unsigned const char omp_clause_num_ops[] =
268 0, /* OMP_CLAUSE_ERROR */
269 1, /* OMP_CLAUSE_PRIVATE */
270 1, /* OMP_CLAUSE_SHARED */
271 1, /* OMP_CLAUSE_FIRSTPRIVATE */
272 2, /* OMP_CLAUSE_LASTPRIVATE */
273 5, /* OMP_CLAUSE_REDUCTION */
274 1, /* OMP_CLAUSE_COPYIN */
275 1, /* OMP_CLAUSE_COPYPRIVATE */
276 3, /* OMP_CLAUSE_LINEAR */
277 2, /* OMP_CLAUSE_ALIGNED */
278 1, /* OMP_CLAUSE_DEPEND */
279 1, /* OMP_CLAUSE_UNIFORM */
280 1, /* OMP_CLAUSE_TO_DECLARE */
281 1, /* OMP_CLAUSE_LINK */
282 2, /* OMP_CLAUSE_FROM */
283 2, /* OMP_CLAUSE_TO */
284 2, /* OMP_CLAUSE_MAP */
285 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
286 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
287 2, /* OMP_CLAUSE__CACHE_ */
288 2, /* OMP_CLAUSE_GANG */
289 1, /* OMP_CLAUSE_ASYNC */
290 1, /* OMP_CLAUSE_WAIT */
291 0, /* OMP_CLAUSE_AUTO */
292 0, /* OMP_CLAUSE_SEQ */
293 1, /* OMP_CLAUSE__LOOPTEMP_ */
294 1, /* OMP_CLAUSE_IF */
295 1, /* OMP_CLAUSE_NUM_THREADS */
296 1, /* OMP_CLAUSE_SCHEDULE */
297 0, /* OMP_CLAUSE_NOWAIT */
298 1, /* OMP_CLAUSE_ORDERED */
299 0, /* OMP_CLAUSE_DEFAULT */
300 3, /* OMP_CLAUSE_COLLAPSE */
301 0, /* OMP_CLAUSE_UNTIED */
302 1, /* OMP_CLAUSE_FINAL */
303 0, /* OMP_CLAUSE_MERGEABLE */
304 1, /* OMP_CLAUSE_DEVICE */
305 1, /* OMP_CLAUSE_DIST_SCHEDULE */
306 0, /* OMP_CLAUSE_INBRANCH */
307 0, /* OMP_CLAUSE_NOTINBRANCH */
308 1, /* OMP_CLAUSE_NUM_TEAMS */
309 1, /* OMP_CLAUSE_THREAD_LIMIT */
310 0, /* OMP_CLAUSE_PROC_BIND */
311 1, /* OMP_CLAUSE_SAFELEN */
312 1, /* OMP_CLAUSE_SIMDLEN */
313 0, /* OMP_CLAUSE_FOR */
314 0, /* OMP_CLAUSE_PARALLEL */
315 0, /* OMP_CLAUSE_SECTIONS */
316 0, /* OMP_CLAUSE_TASKGROUP */
317 1, /* OMP_CLAUSE_PRIORITY */
318 1, /* OMP_CLAUSE_GRAINSIZE */
319 1, /* OMP_CLAUSE_NUM_TASKS */
320 0, /* OMP_CLAUSE_NOGROUP */
321 0, /* OMP_CLAUSE_THREADS */
322 0, /* OMP_CLAUSE_SIMD */
323 1, /* OMP_CLAUSE_HINT */
324 0, /* OMP_CLAUSE_DEFALTMAP */
325 1, /* OMP_CLAUSE__SIMDUID_ */
326 0, /* OMP_CLAUSE__SIMT_ */
327 0, /* OMP_CLAUSE_INDEPENDENT */
328 1, /* OMP_CLAUSE_WORKER */
329 1, /* OMP_CLAUSE_VECTOR */
330 1, /* OMP_CLAUSE_NUM_GANGS */
331 1, /* OMP_CLAUSE_NUM_WORKERS */
332 1, /* OMP_CLAUSE_VECTOR_LENGTH */
333 3, /* OMP_CLAUSE_TILE */
334 2, /* OMP_CLAUSE__GRIDDIM_ */
337 const char * const omp_clause_code_name[] =
339 "error_clause",
340 "private",
341 "shared",
342 "firstprivate",
343 "lastprivate",
344 "reduction",
345 "copyin",
346 "copyprivate",
347 "linear",
348 "aligned",
349 "depend",
350 "uniform",
351 "to",
352 "link",
353 "from",
354 "to",
355 "map",
356 "use_device_ptr",
357 "is_device_ptr",
358 "_cache_",
359 "gang",
360 "async",
361 "wait",
362 "auto",
363 "seq",
364 "_looptemp_",
365 "if",
366 "num_threads",
367 "schedule",
368 "nowait",
369 "ordered",
370 "default",
371 "collapse",
372 "untied",
373 "final",
374 "mergeable",
375 "device",
376 "dist_schedule",
377 "inbranch",
378 "notinbranch",
379 "num_teams",
380 "thread_limit",
381 "proc_bind",
382 "safelen",
383 "simdlen",
384 "for",
385 "parallel",
386 "sections",
387 "taskgroup",
388 "priority",
389 "grainsize",
390 "num_tasks",
391 "nogroup",
392 "threads",
393 "simd",
394 "hint",
395 "defaultmap",
396 "_simduid_",
397 "_simt_",
398 "independent",
399 "worker",
400 "vector",
401 "num_gangs",
402 "num_workers",
403 "vector_length",
404 "tile",
405 "_griddim_"
409 /* Return the tree node structure used by tree code CODE. */
411 static inline enum tree_node_structure_enum
412 tree_node_structure_for_code (enum tree_code code)
414 switch (TREE_CODE_CLASS (code))
416 case tcc_declaration:
418 switch (code)
420 case FIELD_DECL:
421 return TS_FIELD_DECL;
422 case PARM_DECL:
423 return TS_PARM_DECL;
424 case VAR_DECL:
425 return TS_VAR_DECL;
426 case LABEL_DECL:
427 return TS_LABEL_DECL;
428 case RESULT_DECL:
429 return TS_RESULT_DECL;
430 case DEBUG_EXPR_DECL:
431 return TS_DECL_WRTL;
432 case CONST_DECL:
433 return TS_CONST_DECL;
434 case TYPE_DECL:
435 return TS_TYPE_DECL;
436 case FUNCTION_DECL:
437 return TS_FUNCTION_DECL;
438 case TRANSLATION_UNIT_DECL:
439 return TS_TRANSLATION_UNIT_DECL;
440 default:
441 return TS_DECL_NON_COMMON;
444 case tcc_type:
445 return TS_TYPE_NON_COMMON;
446 case tcc_reference:
447 case tcc_comparison:
448 case tcc_unary:
449 case tcc_binary:
450 case tcc_expression:
451 case tcc_statement:
452 case tcc_vl_exp:
453 return TS_EXP;
454 default: /* tcc_constant and tcc_exceptional */
455 break;
457 switch (code)
459 /* tcc_constant cases. */
460 case VOID_CST: return TS_TYPED;
461 case INTEGER_CST: return TS_INT_CST;
462 case REAL_CST: return TS_REAL_CST;
463 case FIXED_CST: return TS_FIXED_CST;
464 case COMPLEX_CST: return TS_COMPLEX;
465 case VECTOR_CST: return TS_VECTOR;
466 case STRING_CST: return TS_STRING;
467 /* tcc_exceptional cases. */
468 case ERROR_MARK: return TS_COMMON;
469 case IDENTIFIER_NODE: return TS_IDENTIFIER;
470 case TREE_LIST: return TS_LIST;
471 case TREE_VEC: return TS_VEC;
472 case SSA_NAME: return TS_SSA_NAME;
473 case PLACEHOLDER_EXPR: return TS_COMMON;
474 case STATEMENT_LIST: return TS_STATEMENT_LIST;
475 case BLOCK: return TS_BLOCK;
476 case CONSTRUCTOR: return TS_CONSTRUCTOR;
477 case TREE_BINFO: return TS_BINFO;
478 case OMP_CLAUSE: return TS_OMP_CLAUSE;
479 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
480 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
482 default:
483 gcc_unreachable ();
488 /* Initialize tree_contains_struct to describe the hierarchy of tree
489 nodes. */
491 static void
492 initialize_tree_contains_struct (void)
494 unsigned i;
496 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
498 enum tree_code code;
499 enum tree_node_structure_enum ts_code;
501 code = (enum tree_code) i;
502 ts_code = tree_node_structure_for_code (code);
504 /* Mark the TS structure itself. */
505 tree_contains_struct[code][ts_code] = 1;
507 /* Mark all the structures that TS is derived from. */
508 switch (ts_code)
510 case TS_TYPED:
511 case TS_BLOCK:
512 case TS_OPTIMIZATION:
513 case TS_TARGET_OPTION:
514 MARK_TS_BASE (code);
515 break;
517 case TS_COMMON:
518 case TS_INT_CST:
519 case TS_REAL_CST:
520 case TS_FIXED_CST:
521 case TS_VECTOR:
522 case TS_STRING:
523 case TS_COMPLEX:
524 case TS_SSA_NAME:
525 case TS_CONSTRUCTOR:
526 case TS_EXP:
527 case TS_STATEMENT_LIST:
528 MARK_TS_TYPED (code);
529 break;
531 case TS_IDENTIFIER:
532 case TS_DECL_MINIMAL:
533 case TS_TYPE_COMMON:
534 case TS_LIST:
535 case TS_VEC:
536 case TS_BINFO:
537 case TS_OMP_CLAUSE:
538 MARK_TS_COMMON (code);
539 break;
541 case TS_TYPE_WITH_LANG_SPECIFIC:
542 MARK_TS_TYPE_COMMON (code);
543 break;
545 case TS_TYPE_NON_COMMON:
546 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
547 break;
549 case TS_DECL_COMMON:
550 MARK_TS_DECL_MINIMAL (code);
551 break;
553 case TS_DECL_WRTL:
554 case TS_CONST_DECL:
555 MARK_TS_DECL_COMMON (code);
556 break;
558 case TS_DECL_NON_COMMON:
559 MARK_TS_DECL_WITH_VIS (code);
560 break;
562 case TS_DECL_WITH_VIS:
563 case TS_PARM_DECL:
564 case TS_LABEL_DECL:
565 case TS_RESULT_DECL:
566 MARK_TS_DECL_WRTL (code);
567 break;
569 case TS_FIELD_DECL:
570 MARK_TS_DECL_COMMON (code);
571 break;
573 case TS_VAR_DECL:
574 MARK_TS_DECL_WITH_VIS (code);
575 break;
577 case TS_TYPE_DECL:
578 case TS_FUNCTION_DECL:
579 MARK_TS_DECL_NON_COMMON (code);
580 break;
582 case TS_TRANSLATION_UNIT_DECL:
583 MARK_TS_DECL_COMMON (code);
584 break;
586 default:
587 gcc_unreachable ();
591 /* Basic consistency checks for attributes used in fold. */
592 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
593 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
594 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
595 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
596 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
597 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
598 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
599 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
600 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
601 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
602 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
603 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
604 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
605 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
606 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
607 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
608 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
609 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
610 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
611 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
612 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
613 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
614 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
615 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
616 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
617 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
618 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
619 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
620 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
621 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
622 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
623 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
624 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
625 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
626 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
627 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
628 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
629 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
630 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
631 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
635 /* Init tree.c. */
637 void
638 init_ttree (void)
640 /* Initialize the hash table of types. */
641 type_hash_table
642 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
644 debug_expr_for_decl
645 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
647 value_expr_for_decl
648 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
650 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
652 int_cst_node = make_int_cst (1, 1);
654 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
656 cl_optimization_node = make_node (OPTIMIZATION_NODE);
657 cl_target_option_node = make_node (TARGET_OPTION_NODE);
659 /* Initialize the tree_contains_struct array. */
660 initialize_tree_contains_struct ();
661 lang_hooks.init_ts ();
665 /* The name of the object as the assembler will see it (but before any
666 translations made by ASM_OUTPUT_LABELREF). Often this is the same
667 as DECL_NAME. It is an IDENTIFIER_NODE. */
668 tree
669 decl_assembler_name (tree decl)
671 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
672 lang_hooks.set_decl_assembler_name (decl);
673 return DECL_ASSEMBLER_NAME_RAW (decl);
676 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
677 (either of which may be NULL). Inform the FE, if this changes the
678 name. */
680 void
681 overwrite_decl_assembler_name (tree decl, tree name)
683 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
684 lang_hooks.overwrite_decl_assembler_name (decl, name);
687 /* When the target supports COMDAT groups, this indicates which group the
688 DECL is associated with. This can be either an IDENTIFIER_NODE or a
689 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
690 tree
691 decl_comdat_group (const_tree node)
693 struct symtab_node *snode = symtab_node::get (node);
694 if (!snode)
695 return NULL;
696 return snode->get_comdat_group ();
699 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
700 tree
701 decl_comdat_group_id (const_tree node)
703 struct symtab_node *snode = symtab_node::get (node);
704 if (!snode)
705 return NULL;
706 return snode->get_comdat_group_id ();
709 /* When the target supports named section, return its name as IDENTIFIER_NODE
710 or NULL if it is in no section. */
711 const char *
712 decl_section_name (const_tree node)
714 struct symtab_node *snode = symtab_node::get (node);
715 if (!snode)
716 return NULL;
717 return snode->get_section ();
720 /* Set section name of NODE to VALUE (that is expected to be
721 identifier node) */
722 void
723 set_decl_section_name (tree node, const char *value)
725 struct symtab_node *snode;
727 if (value == NULL)
729 snode = symtab_node::get (node);
730 if (!snode)
731 return;
733 else if (VAR_P (node))
734 snode = varpool_node::get_create (node);
735 else
736 snode = cgraph_node::get_create (node);
737 snode->set_section (value);
740 /* Return TLS model of a variable NODE. */
741 enum tls_model
742 decl_tls_model (const_tree node)
744 struct varpool_node *snode = varpool_node::get (node);
745 if (!snode)
746 return TLS_MODEL_NONE;
747 return snode->tls_model;
750 /* Set TLS model of variable NODE to MODEL. */
751 void
752 set_decl_tls_model (tree node, enum tls_model model)
754 struct varpool_node *vnode;
756 if (model == TLS_MODEL_NONE)
758 vnode = varpool_node::get (node);
759 if (!vnode)
760 return;
762 else
763 vnode = varpool_node::get_create (node);
764 vnode->tls_model = model;
767 /* Compute the number of bytes occupied by a tree with code CODE.
768 This function cannot be used for nodes that have variable sizes,
769 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
770 size_t
771 tree_code_size (enum tree_code code)
773 switch (TREE_CODE_CLASS (code))
775 case tcc_declaration: /* A decl node */
776 switch (code)
778 case FIELD_DECL: return sizeof (tree_field_decl);
779 case PARM_DECL: return sizeof (tree_parm_decl);
780 case VAR_DECL: return sizeof (tree_var_decl);
781 case LABEL_DECL: return sizeof (tree_label_decl);
782 case RESULT_DECL: return sizeof (tree_result_decl);
783 case CONST_DECL: return sizeof (tree_const_decl);
784 case TYPE_DECL: return sizeof (tree_type_decl);
785 case FUNCTION_DECL: return sizeof (tree_function_decl);
786 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
787 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
788 case NAMESPACE_DECL:
789 case IMPORTED_DECL:
790 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
791 default:
792 gcc_checking_assert (code >= NUM_TREE_CODES);
793 return lang_hooks.tree_size (code);
796 case tcc_type: /* a type node */
797 switch (code)
799 case OFFSET_TYPE:
800 case ENUMERAL_TYPE:
801 case BOOLEAN_TYPE:
802 case INTEGER_TYPE:
803 case REAL_TYPE:
804 case POINTER_TYPE:
805 case REFERENCE_TYPE:
806 case NULLPTR_TYPE:
807 case FIXED_POINT_TYPE:
808 case COMPLEX_TYPE:
809 case VECTOR_TYPE:
810 case ARRAY_TYPE:
811 case RECORD_TYPE:
812 case UNION_TYPE:
813 case QUAL_UNION_TYPE:
814 case VOID_TYPE:
815 case POINTER_BOUNDS_TYPE:
816 case FUNCTION_TYPE:
817 case METHOD_TYPE:
818 case LANG_TYPE: return sizeof (tree_type_non_common);
819 default:
820 gcc_checking_assert (code >= NUM_TREE_CODES);
821 return lang_hooks.tree_size (code);
824 case tcc_reference: /* a reference */
825 case tcc_expression: /* an expression */
826 case tcc_statement: /* an expression with side effects */
827 case tcc_comparison: /* a comparison expression */
828 case tcc_unary: /* a unary arithmetic expression */
829 case tcc_binary: /* a binary arithmetic expression */
830 return (sizeof (struct tree_exp)
831 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
833 case tcc_constant: /* a constant */
834 switch (code)
836 case VOID_CST: return sizeof (tree_typed);
837 case INTEGER_CST: gcc_unreachable ();
838 case REAL_CST: return sizeof (tree_real_cst);
839 case FIXED_CST: return sizeof (tree_fixed_cst);
840 case COMPLEX_CST: return sizeof (tree_complex);
841 case VECTOR_CST: gcc_unreachable ();
842 case STRING_CST: gcc_unreachable ();
843 default:
844 gcc_checking_assert (code >= NUM_TREE_CODES);
845 return lang_hooks.tree_size (code);
848 case tcc_exceptional: /* something random, like an identifier. */
849 switch (code)
851 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
852 case TREE_LIST: return sizeof (tree_list);
854 case ERROR_MARK:
855 case PLACEHOLDER_EXPR: return sizeof (tree_common);
857 case TREE_VEC: gcc_unreachable ();
858 case OMP_CLAUSE: gcc_unreachable ();
860 case SSA_NAME: return sizeof (tree_ssa_name);
862 case STATEMENT_LIST: return sizeof (tree_statement_list);
863 case BLOCK: return sizeof (struct tree_block);
864 case CONSTRUCTOR: return sizeof (tree_constructor);
865 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
866 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
868 default:
869 gcc_checking_assert (code >= NUM_TREE_CODES);
870 return lang_hooks.tree_size (code);
873 default:
874 gcc_unreachable ();
878 /* Compute the number of bytes occupied by NODE. This routine only
879 looks at TREE_CODE, except for those nodes that have variable sizes. */
880 size_t
881 tree_size (const_tree node)
883 const enum tree_code code = TREE_CODE (node);
884 switch (code)
886 case INTEGER_CST:
887 return (sizeof (struct tree_int_cst)
888 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
890 case TREE_BINFO:
891 return (offsetof (struct tree_binfo, base_binfos)
892 + vec<tree, va_gc>
893 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
895 case TREE_VEC:
896 return (sizeof (struct tree_vec)
897 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
899 case VECTOR_CST:
900 return (sizeof (struct tree_vector)
901 + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
903 case STRING_CST:
904 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
906 case OMP_CLAUSE:
907 return (sizeof (struct tree_omp_clause)
908 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
909 * sizeof (tree));
911 default:
912 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
913 return (sizeof (struct tree_exp)
914 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
915 else
916 return tree_code_size (code);
920 /* Record interesting allocation statistics for a tree node with CODE
921 and LENGTH. */
923 static void
924 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
925 size_t length ATTRIBUTE_UNUSED)
927 enum tree_code_class type = TREE_CODE_CLASS (code);
928 tree_node_kind kind;
930 if (!GATHER_STATISTICS)
931 return;
933 switch (type)
935 case tcc_declaration: /* A decl node */
936 kind = d_kind;
937 break;
939 case tcc_type: /* a type node */
940 kind = t_kind;
941 break;
943 case tcc_statement: /* an expression with side effects */
944 kind = s_kind;
945 break;
947 case tcc_reference: /* a reference */
948 kind = r_kind;
949 break;
951 case tcc_expression: /* an expression */
952 case tcc_comparison: /* a comparison expression */
953 case tcc_unary: /* a unary arithmetic expression */
954 case tcc_binary: /* a binary arithmetic expression */
955 kind = e_kind;
956 break;
958 case tcc_constant: /* a constant */
959 kind = c_kind;
960 break;
962 case tcc_exceptional: /* something random, like an identifier. */
963 switch (code)
965 case IDENTIFIER_NODE:
966 kind = id_kind;
967 break;
969 case TREE_VEC:
970 kind = vec_kind;
971 break;
973 case TREE_BINFO:
974 kind = binfo_kind;
975 break;
977 case SSA_NAME:
978 kind = ssa_name_kind;
979 break;
981 case BLOCK:
982 kind = b_kind;
983 break;
985 case CONSTRUCTOR:
986 kind = constr_kind;
987 break;
989 case OMP_CLAUSE:
990 kind = omp_clause_kind;
991 break;
993 default:
994 kind = x_kind;
995 break;
997 break;
999 case tcc_vl_exp:
1000 kind = e_kind;
1001 break;
1003 default:
1004 gcc_unreachable ();
1007 tree_code_counts[(int) code]++;
1008 tree_node_counts[(int) kind]++;
1009 tree_node_sizes[(int) kind] += length;
1012 /* Allocate and return a new UID from the DECL_UID namespace. */
1015 allocate_decl_uid (void)
1017 return next_decl_uid++;
1020 /* Return a newly allocated node of code CODE. For decl and type
1021 nodes, some other fields are initialized. The rest of the node is
1022 initialized to zero. This function cannot be used for TREE_VEC,
1023 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1024 tree_code_size.
1026 Achoo! I got a code in the node. */
1028 tree
1029 make_node (enum tree_code code MEM_STAT_DECL)
1031 tree t;
1032 enum tree_code_class type = TREE_CODE_CLASS (code);
1033 size_t length = tree_code_size (code);
1035 record_node_allocation_statistics (code, length);
1037 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1038 TREE_SET_CODE (t, code);
1040 switch (type)
1042 case tcc_statement:
1043 TREE_SIDE_EFFECTS (t) = 1;
1044 break;
1046 case tcc_declaration:
1047 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1049 if (code == FUNCTION_DECL)
1051 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1052 SET_DECL_MODE (t, FUNCTION_MODE);
1054 else
1055 SET_DECL_ALIGN (t, 1);
1057 DECL_SOURCE_LOCATION (t) = input_location;
1058 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1059 DECL_UID (t) = --next_debug_decl_uid;
1060 else
1062 DECL_UID (t) = allocate_decl_uid ();
1063 SET_DECL_PT_UID (t, -1);
1065 if (TREE_CODE (t) == LABEL_DECL)
1066 LABEL_DECL_UID (t) = -1;
1068 break;
1070 case tcc_type:
1071 TYPE_UID (t) = next_type_uid++;
1072 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1073 TYPE_USER_ALIGN (t) = 0;
1074 TYPE_MAIN_VARIANT (t) = t;
1075 TYPE_CANONICAL (t) = t;
1077 /* Default to no attributes for type, but let target change that. */
1078 TYPE_ATTRIBUTES (t) = NULL_TREE;
1079 targetm.set_default_type_attributes (t);
1081 /* We have not yet computed the alias set for this type. */
1082 TYPE_ALIAS_SET (t) = -1;
1083 break;
1085 case tcc_constant:
1086 TREE_CONSTANT (t) = 1;
1087 break;
1089 case tcc_expression:
1090 switch (code)
1092 case INIT_EXPR:
1093 case MODIFY_EXPR:
1094 case VA_ARG_EXPR:
1095 case PREDECREMENT_EXPR:
1096 case PREINCREMENT_EXPR:
1097 case POSTDECREMENT_EXPR:
1098 case POSTINCREMENT_EXPR:
1099 /* All of these have side-effects, no matter what their
1100 operands are. */
1101 TREE_SIDE_EFFECTS (t) = 1;
1102 break;
1104 default:
1105 break;
1107 break;
1109 case tcc_exceptional:
1110 switch (code)
1112 case TARGET_OPTION_NODE:
1113 TREE_TARGET_OPTION(t)
1114 = ggc_cleared_alloc<struct cl_target_option> ();
1115 break;
1117 case OPTIMIZATION_NODE:
1118 TREE_OPTIMIZATION (t)
1119 = ggc_cleared_alloc<struct cl_optimization> ();
1120 break;
1122 default:
1123 break;
1125 break;
1127 default:
1128 /* Other classes need no special treatment. */
1129 break;
1132 return t;
1135 /* Free tree node. */
1137 void
1138 free_node (tree node)
1140 enum tree_code code = TREE_CODE (node);
1141 if (GATHER_STATISTICS)
1143 tree_code_counts[(int) TREE_CODE (node)]--;
1144 tree_node_counts[(int) t_kind]--;
1145 tree_node_sizes[(int) t_kind] -= tree_size (node);
1147 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1148 vec_free (CONSTRUCTOR_ELTS (node));
1149 else if (code == BLOCK)
1150 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1151 else if (code == TREE_BINFO)
1152 vec_free (BINFO_BASE_ACCESSES (node));
1153 ggc_free (node);
1156 /* Return a new node with the same contents as NODE except that its
1157 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1159 tree
1160 copy_node (tree node MEM_STAT_DECL)
1162 tree t;
1163 enum tree_code code = TREE_CODE (node);
1164 size_t length;
1166 gcc_assert (code != STATEMENT_LIST);
1168 length = tree_size (node);
1169 record_node_allocation_statistics (code, length);
1170 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1171 memcpy (t, node, length);
1173 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1174 TREE_CHAIN (t) = 0;
1175 TREE_ASM_WRITTEN (t) = 0;
1176 TREE_VISITED (t) = 0;
1178 if (TREE_CODE_CLASS (code) == tcc_declaration)
1180 if (code == DEBUG_EXPR_DECL)
1181 DECL_UID (t) = --next_debug_decl_uid;
1182 else
1184 DECL_UID (t) = allocate_decl_uid ();
1185 if (DECL_PT_UID_SET_P (node))
1186 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1188 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1189 && DECL_HAS_VALUE_EXPR_P (node))
1191 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1192 DECL_HAS_VALUE_EXPR_P (t) = 1;
1194 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1195 if (VAR_P (node))
1197 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1198 t->decl_with_vis.symtab_node = NULL;
1200 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1202 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1203 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1205 if (TREE_CODE (node) == FUNCTION_DECL)
1207 DECL_STRUCT_FUNCTION (t) = NULL;
1208 t->decl_with_vis.symtab_node = NULL;
1211 else if (TREE_CODE_CLASS (code) == tcc_type)
1213 TYPE_UID (t) = next_type_uid++;
1214 /* The following is so that the debug code for
1215 the copy is different from the original type.
1216 The two statements usually duplicate each other
1217 (because they clear fields of the same union),
1218 but the optimizer should catch that. */
1219 TYPE_SYMTAB_ADDRESS (t) = 0;
1220 TYPE_SYMTAB_DIE (t) = 0;
1222 /* Do not copy the values cache. */
1223 if (TYPE_CACHED_VALUES_P (t))
1225 TYPE_CACHED_VALUES_P (t) = 0;
1226 TYPE_CACHED_VALUES (t) = NULL_TREE;
1229 else if (code == TARGET_OPTION_NODE)
1231 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1232 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1233 sizeof (struct cl_target_option));
1235 else if (code == OPTIMIZATION_NODE)
1237 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1238 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1239 sizeof (struct cl_optimization));
1242 return t;
1245 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1246 For example, this can copy a list made of TREE_LIST nodes. */
1248 tree
1249 copy_list (tree list)
1251 tree head;
1252 tree prev, next;
1254 if (list == 0)
1255 return 0;
1257 head = prev = copy_node (list);
1258 next = TREE_CHAIN (list);
1259 while (next)
1261 TREE_CHAIN (prev) = copy_node (next);
1262 prev = TREE_CHAIN (prev);
1263 next = TREE_CHAIN (next);
1265 return head;
1269 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1270 INTEGER_CST with value CST and type TYPE. */
1272 static unsigned int
1273 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1275 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1276 /* We need extra HWIs if CST is an unsigned integer with its
1277 upper bit set. */
1278 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1279 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1280 return cst.get_len ();
1283 /* Return a new INTEGER_CST with value CST and type TYPE. */
1285 static tree
1286 build_new_int_cst (tree type, const wide_int &cst)
1288 unsigned int len = cst.get_len ();
1289 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1290 tree nt = make_int_cst (len, ext_len);
1292 if (len < ext_len)
1294 --ext_len;
1295 TREE_INT_CST_ELT (nt, ext_len)
1296 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1297 for (unsigned int i = len; i < ext_len; ++i)
1298 TREE_INT_CST_ELT (nt, i) = -1;
1300 else if (TYPE_UNSIGNED (type)
1301 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1303 len--;
1304 TREE_INT_CST_ELT (nt, len)
1305 = zext_hwi (cst.elt (len),
1306 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1309 for (unsigned int i = 0; i < len; i++)
1310 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1311 TREE_TYPE (nt) = type;
1312 return nt;
1315 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1317 tree
1318 build_int_cst (tree type, HOST_WIDE_INT low)
1320 /* Support legacy code. */
1321 if (!type)
1322 type = integer_type_node;
1324 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1327 tree
1328 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1330 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1333 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1335 tree
1336 build_int_cst_type (tree type, HOST_WIDE_INT low)
1338 gcc_assert (type);
1339 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1342 /* Constructs tree in type TYPE from with value given by CST. Signedness
1343 of CST is assumed to be the same as the signedness of TYPE. */
1345 tree
1346 double_int_to_tree (tree type, double_int cst)
1348 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1351 /* We force the wide_int CST to the range of the type TYPE by sign or
1352 zero extending it. OVERFLOWABLE indicates if we are interested in
1353 overflow of the value, when >0 we are only interested in signed
1354 overflow, for <0 we are interested in any overflow. OVERFLOWED
1355 indicates whether overflow has already occurred. CONST_OVERFLOWED
1356 indicates whether constant overflow has already occurred. We force
1357 T's value to be within range of T's type (by setting to 0 or 1 all
1358 the bits outside the type's range). We set TREE_OVERFLOWED if,
1359 OVERFLOWED is nonzero,
1360 or OVERFLOWABLE is >0 and signed overflow occurs
1361 or OVERFLOWABLE is <0 and any overflow occurs
1362 We return a new tree node for the extended wide_int. The node
1363 is shared if no overflow flags are set. */
1366 tree
1367 force_fit_type (tree type, const wide_int_ref &cst,
1368 int overflowable, bool overflowed)
1370 signop sign = TYPE_SIGN (type);
1372 /* If we need to set overflow flags, return a new unshared node. */
1373 if (overflowed || !wi::fits_to_tree_p (cst, type))
1375 if (overflowed
1376 || overflowable < 0
1377 || (overflowable > 0 && sign == SIGNED))
1379 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1380 tree t = build_new_int_cst (type, tmp);
1381 TREE_OVERFLOW (t) = 1;
1382 return t;
1386 /* Else build a shared node. */
1387 return wide_int_to_tree (type, cst);
1390 /* These are the hash table functions for the hash table of INTEGER_CST
1391 nodes of a sizetype. */
1393 /* Return the hash code X, an INTEGER_CST. */
1395 hashval_t
1396 int_cst_hasher::hash (tree x)
1398 const_tree const t = x;
1399 hashval_t code = TYPE_UID (TREE_TYPE (t));
1400 int i;
1402 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1403 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1405 return code;
1408 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1409 is the same as that given by *Y, which is the same. */
1411 bool
1412 int_cst_hasher::equal (tree x, tree y)
1414 const_tree const xt = x;
1415 const_tree const yt = y;
1417 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1418 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1419 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1420 return false;
1422 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1423 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1424 return false;
1426 return true;
1429 /* Create an INT_CST node of TYPE and value CST.
1430 The returned node is always shared. For small integers we use a
1431 per-type vector cache, for larger ones we use a single hash table.
1432 The value is extended from its precision according to the sign of
1433 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1434 the upper bits and ensures that hashing and value equality based
1435 upon the underlying HOST_WIDE_INTs works without masking. */
1437 tree
1438 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1440 tree t;
1441 int ix = -1;
1442 int limit = 0;
1444 gcc_assert (type);
1445 unsigned int prec = TYPE_PRECISION (type);
1446 signop sgn = TYPE_SIGN (type);
1448 /* Verify that everything is canonical. */
1449 int l = pcst.get_len ();
1450 if (l > 1)
1452 if (pcst.elt (l - 1) == 0)
1453 gcc_checking_assert (pcst.elt (l - 2) < 0);
1454 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1455 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1458 wide_int cst = wide_int::from (pcst, prec, sgn);
1459 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1461 if (ext_len == 1)
1463 /* We just need to store a single HOST_WIDE_INT. */
1464 HOST_WIDE_INT hwi;
1465 if (TYPE_UNSIGNED (type))
1466 hwi = cst.to_uhwi ();
1467 else
1468 hwi = cst.to_shwi ();
1470 switch (TREE_CODE (type))
1472 case NULLPTR_TYPE:
1473 gcc_assert (hwi == 0);
1474 /* Fallthru. */
1476 case POINTER_TYPE:
1477 case REFERENCE_TYPE:
1478 case POINTER_BOUNDS_TYPE:
1479 /* Cache NULL pointer and zero bounds. */
1480 if (hwi == 0)
1482 limit = 1;
1483 ix = 0;
1485 break;
1487 case BOOLEAN_TYPE:
1488 /* Cache false or true. */
1489 limit = 2;
1490 if (IN_RANGE (hwi, 0, 1))
1491 ix = hwi;
1492 break;
1494 case INTEGER_TYPE:
1495 case OFFSET_TYPE:
1496 if (TYPE_SIGN (type) == UNSIGNED)
1498 /* Cache [0, N). */
1499 limit = INTEGER_SHARE_LIMIT;
1500 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1501 ix = hwi;
1503 else
1505 /* Cache [-1, N). */
1506 limit = INTEGER_SHARE_LIMIT + 1;
1507 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1508 ix = hwi + 1;
1510 break;
1512 case ENUMERAL_TYPE:
1513 break;
1515 default:
1516 gcc_unreachable ();
1519 if (ix >= 0)
1521 /* Look for it in the type's vector of small shared ints. */
1522 if (!TYPE_CACHED_VALUES_P (type))
1524 TYPE_CACHED_VALUES_P (type) = 1;
1525 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1528 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1529 if (t)
1530 /* Make sure no one is clobbering the shared constant. */
1531 gcc_checking_assert (TREE_TYPE (t) == type
1532 && TREE_INT_CST_NUNITS (t) == 1
1533 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1534 && TREE_INT_CST_EXT_NUNITS (t) == 1
1535 && TREE_INT_CST_ELT (t, 0) == hwi);
1536 else
1538 /* Create a new shared int. */
1539 t = build_new_int_cst (type, cst);
1540 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1543 else
1545 /* Use the cache of larger shared ints, using int_cst_node as
1546 a temporary. */
1548 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1549 TREE_TYPE (int_cst_node) = type;
1551 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1552 t = *slot;
1553 if (!t)
1555 /* Insert this one into the hash table. */
1556 t = int_cst_node;
1557 *slot = t;
1558 /* Make a new node for next time round. */
1559 int_cst_node = make_int_cst (1, 1);
1563 else
1565 /* The value either hashes properly or we drop it on the floor
1566 for the gc to take care of. There will not be enough of them
1567 to worry about. */
1569 tree nt = build_new_int_cst (type, cst);
1570 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1571 t = *slot;
1572 if (!t)
1574 /* Insert this one into the hash table. */
1575 t = nt;
1576 *slot = t;
1578 else
1579 ggc_free (nt);
1582 return t;
1585 void
1586 cache_integer_cst (tree t)
1588 tree type = TREE_TYPE (t);
1589 int ix = -1;
1590 int limit = 0;
1591 int prec = TYPE_PRECISION (type);
1593 gcc_assert (!TREE_OVERFLOW (t));
1595 switch (TREE_CODE (type))
1597 case NULLPTR_TYPE:
1598 gcc_assert (integer_zerop (t));
1599 /* Fallthru. */
1601 case POINTER_TYPE:
1602 case REFERENCE_TYPE:
1603 /* Cache NULL pointer. */
1604 if (integer_zerop (t))
1606 limit = 1;
1607 ix = 0;
1609 break;
1611 case BOOLEAN_TYPE:
1612 /* Cache false or true. */
1613 limit = 2;
1614 if (wi::ltu_p (wi::to_wide (t), 2))
1615 ix = TREE_INT_CST_ELT (t, 0);
1616 break;
1618 case INTEGER_TYPE:
1619 case OFFSET_TYPE:
1620 if (TYPE_UNSIGNED (type))
1622 /* Cache 0..N */
1623 limit = INTEGER_SHARE_LIMIT;
1625 /* This is a little hokie, but if the prec is smaller than
1626 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1627 obvious test will not get the correct answer. */
1628 if (prec < HOST_BITS_PER_WIDE_INT)
1630 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1631 ix = tree_to_uhwi (t);
1633 else if (wi::ltu_p (wi::to_wide (t), INTEGER_SHARE_LIMIT))
1634 ix = tree_to_uhwi (t);
1636 else
1638 /* Cache -1..N */
1639 limit = INTEGER_SHARE_LIMIT + 1;
1641 if (integer_minus_onep (t))
1642 ix = 0;
1643 else if (!wi::neg_p (wi::to_wide (t)))
1645 if (prec < HOST_BITS_PER_WIDE_INT)
1647 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1648 ix = tree_to_shwi (t) + 1;
1650 else if (wi::ltu_p (wi::to_wide (t), INTEGER_SHARE_LIMIT))
1651 ix = tree_to_shwi (t) + 1;
1654 break;
1656 case ENUMERAL_TYPE:
1657 break;
1659 default:
1660 gcc_unreachable ();
1663 if (ix >= 0)
1665 /* Look for it in the type's vector of small shared ints. */
1666 if (!TYPE_CACHED_VALUES_P (type))
1668 TYPE_CACHED_VALUES_P (type) = 1;
1669 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1672 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1673 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1675 else
1677 /* Use the cache of larger shared ints. */
1678 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1679 /* If there is already an entry for the number verify it's the
1680 same. */
1681 if (*slot)
1682 gcc_assert (wi::to_wide (tree (*slot)) == wi::to_wide (t));
1683 else
1684 /* Otherwise insert this one into the hash table. */
1685 *slot = t;
1690 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1691 and the rest are zeros. */
1693 tree
1694 build_low_bits_mask (tree type, unsigned bits)
1696 gcc_assert (bits <= TYPE_PRECISION (type));
1698 return wide_int_to_tree (type, wi::mask (bits, false,
1699 TYPE_PRECISION (type)));
1702 /* Checks that X is integer constant that can be expressed in (unsigned)
1703 HOST_WIDE_INT without loss of precision. */
1705 bool
1706 cst_and_fits_in_hwi (const_tree x)
1708 return (TREE_CODE (x) == INTEGER_CST
1709 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
1712 /* Build a newly constructed VECTOR_CST with the given values of
1713 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
1715 tree
1716 make_vector (unsigned log2_npatterns,
1717 unsigned int nelts_per_pattern MEM_STAT_DECL)
1719 gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
1720 tree t;
1721 unsigned npatterns = 1 << log2_npatterns;
1722 unsigned encoded_nelts = npatterns * nelts_per_pattern;
1723 unsigned length = (sizeof (struct tree_vector)
1724 + (encoded_nelts - 1) * sizeof (tree));
1726 record_node_allocation_statistics (VECTOR_CST, length);
1728 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1730 TREE_SET_CODE (t, VECTOR_CST);
1731 TREE_CONSTANT (t) = 1;
1732 VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
1733 VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
1735 return t;
1738 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1739 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1741 tree
1742 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1744 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1745 unsigned HOST_WIDE_INT idx;
1746 tree value;
1748 tree_vector_builder vec (type, nelts, 1);
1749 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1751 if (TREE_CODE (value) == VECTOR_CST)
1752 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
1753 vec.quick_push (VECTOR_CST_ELT (value, i));
1754 else
1755 vec.quick_push (value);
1757 while (vec.length () < nelts)
1758 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
1760 return vec.build ();
1763 /* Build a vector of type VECTYPE where all the elements are SCs. */
1764 tree
1765 build_vector_from_val (tree vectype, tree sc)
1767 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1769 if (sc == error_mark_node)
1770 return sc;
1772 /* Verify that the vector type is suitable for SC. Note that there
1773 is some inconsistency in the type-system with respect to restrict
1774 qualifications of pointers. Vector types always have a main-variant
1775 element type and the qualification is applied to the vector-type.
1776 So TREE_TYPE (vector-type) does not return a properly qualified
1777 vector element-type. */
1778 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1779 TREE_TYPE (vectype)));
1781 if (CONSTANT_CLASS_P (sc))
1783 tree_vector_builder v (vectype, 1, 1);
1784 v.quick_push (sc);
1785 return v.build ();
1787 else
1789 vec<constructor_elt, va_gc> *v;
1790 vec_alloc (v, nunits);
1791 for (i = 0; i < nunits; ++i)
1792 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1793 return build_constructor (vectype, v);
1797 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
1798 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
1800 void
1801 recompute_constructor_flags (tree c)
1803 unsigned int i;
1804 tree val;
1805 bool constant_p = true;
1806 bool side_effects_p = false;
1807 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
1809 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
1811 /* Mostly ctors will have elts that don't have side-effects, so
1812 the usual case is to scan all the elements. Hence a single
1813 loop for both const and side effects, rather than one loop
1814 each (with early outs). */
1815 if (!TREE_CONSTANT (val))
1816 constant_p = false;
1817 if (TREE_SIDE_EFFECTS (val))
1818 side_effects_p = true;
1821 TREE_SIDE_EFFECTS (c) = side_effects_p;
1822 TREE_CONSTANT (c) = constant_p;
1825 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
1826 CONSTRUCTOR C. */
1828 void
1829 verify_constructor_flags (tree c)
1831 unsigned int i;
1832 tree val;
1833 bool constant_p = TREE_CONSTANT (c);
1834 bool side_effects_p = TREE_SIDE_EFFECTS (c);
1835 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
1837 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
1839 if (constant_p && !TREE_CONSTANT (val))
1840 internal_error ("non-constant element in constant CONSTRUCTOR");
1841 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
1842 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
1846 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1847 are in the vec pointed to by VALS. */
1848 tree
1849 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1851 tree c = make_node (CONSTRUCTOR);
1853 TREE_TYPE (c) = type;
1854 CONSTRUCTOR_ELTS (c) = vals;
1856 recompute_constructor_flags (c);
1858 return c;
1861 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1862 INDEX and VALUE. */
1863 tree
1864 build_constructor_single (tree type, tree index, tree value)
1866 vec<constructor_elt, va_gc> *v;
1867 constructor_elt elt = {index, value};
1869 vec_alloc (v, 1);
1870 v->quick_push (elt);
1872 return build_constructor (type, v);
1876 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1877 are in a list pointed to by VALS. */
1878 tree
1879 build_constructor_from_list (tree type, tree vals)
1881 tree t;
1882 vec<constructor_elt, va_gc> *v = NULL;
1884 if (vals)
1886 vec_alloc (v, list_length (vals));
1887 for (t = vals; t; t = TREE_CHAIN (t))
1888 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1891 return build_constructor (type, v);
1894 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1895 of elements, provided as index/value pairs. */
1897 tree
1898 build_constructor_va (tree type, int nelts, ...)
1900 vec<constructor_elt, va_gc> *v = NULL;
1901 va_list p;
1903 va_start (p, nelts);
1904 vec_alloc (v, nelts);
1905 while (nelts--)
1907 tree index = va_arg (p, tree);
1908 tree value = va_arg (p, tree);
1909 CONSTRUCTOR_APPEND_ELT (v, index, value);
1911 va_end (p);
1912 return build_constructor (type, v);
1915 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1917 tree
1918 build_fixed (tree type, FIXED_VALUE_TYPE f)
1920 tree v;
1921 FIXED_VALUE_TYPE *fp;
1923 v = make_node (FIXED_CST);
1924 fp = ggc_alloc<fixed_value> ();
1925 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1927 TREE_TYPE (v) = type;
1928 TREE_FIXED_CST_PTR (v) = fp;
1929 return v;
1932 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1934 tree
1935 build_real (tree type, REAL_VALUE_TYPE d)
1937 tree v;
1938 REAL_VALUE_TYPE *dp;
1939 int overflow = 0;
1941 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1942 Consider doing it via real_convert now. */
1944 v = make_node (REAL_CST);
1945 dp = ggc_alloc<real_value> ();
1946 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1948 TREE_TYPE (v) = type;
1949 TREE_REAL_CST_PTR (v) = dp;
1950 TREE_OVERFLOW (v) = overflow;
1951 return v;
1954 /* Like build_real, but first truncate D to the type. */
1956 tree
1957 build_real_truncate (tree type, REAL_VALUE_TYPE d)
1959 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
1962 /* Return a new REAL_CST node whose type is TYPE
1963 and whose value is the integer value of the INTEGER_CST node I. */
1965 REAL_VALUE_TYPE
1966 real_value_from_int_cst (const_tree type, const_tree i)
1968 REAL_VALUE_TYPE d;
1970 /* Clear all bits of the real value type so that we can later do
1971 bitwise comparisons to see if two values are the same. */
1972 memset (&d, 0, sizeof d);
1974 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
1975 TYPE_SIGN (TREE_TYPE (i)));
1976 return d;
1979 /* Given a tree representing an integer constant I, return a tree
1980 representing the same value as a floating-point constant of type TYPE. */
1982 tree
1983 build_real_from_int_cst (tree type, const_tree i)
1985 tree v;
1986 int overflow = TREE_OVERFLOW (i);
1988 v = build_real (type, real_value_from_int_cst (type, i));
1990 TREE_OVERFLOW (v) |= overflow;
1991 return v;
1994 /* Return a newly constructed STRING_CST node whose value is
1995 the LEN characters at STR.
1996 Note that for a C string literal, LEN should include the trailing NUL.
1997 The TREE_TYPE is not initialized. */
1999 tree
2000 build_string (int len, const char *str)
2002 tree s;
2003 size_t length;
2005 /* Do not waste bytes provided by padding of struct tree_string. */
2006 length = len + offsetof (struct tree_string, str) + 1;
2008 record_node_allocation_statistics (STRING_CST, length);
2010 s = (tree) ggc_internal_alloc (length);
2012 memset (s, 0, sizeof (struct tree_typed));
2013 TREE_SET_CODE (s, STRING_CST);
2014 TREE_CONSTANT (s) = 1;
2015 TREE_STRING_LENGTH (s) = len;
2016 memcpy (s->string.str, str, len);
2017 s->string.str[len] = '\0';
2019 return s;
2022 /* Return a newly constructed COMPLEX_CST node whose value is
2023 specified by the real and imaginary parts REAL and IMAG.
2024 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2025 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2027 tree
2028 build_complex (tree type, tree real, tree imag)
2030 tree t = make_node (COMPLEX_CST);
2032 TREE_REALPART (t) = real;
2033 TREE_IMAGPART (t) = imag;
2034 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2035 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2036 return t;
2039 /* Build a complex (inf +- 0i), such as for the result of cproj.
2040 TYPE is the complex tree type of the result. If NEG is true, the
2041 imaginary zero is negative. */
2043 tree
2044 build_complex_inf (tree type, bool neg)
2046 REAL_VALUE_TYPE rinf, rzero = dconst0;
2048 real_inf (&rinf);
2049 rzero.sign = neg;
2050 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2051 build_real (TREE_TYPE (type), rzero));
2054 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2055 element is set to 1. In particular, this is 1 + i for complex types. */
2057 tree
2058 build_each_one_cst (tree type)
2060 if (TREE_CODE (type) == COMPLEX_TYPE)
2062 tree scalar = build_one_cst (TREE_TYPE (type));
2063 return build_complex (type, scalar, scalar);
2065 else
2066 return build_one_cst (type);
2069 /* Return a constant of arithmetic type TYPE which is the
2070 multiplicative identity of the set TYPE. */
2072 tree
2073 build_one_cst (tree type)
2075 switch (TREE_CODE (type))
2077 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2078 case POINTER_TYPE: case REFERENCE_TYPE:
2079 case OFFSET_TYPE:
2080 return build_int_cst (type, 1);
2082 case REAL_TYPE:
2083 return build_real (type, dconst1);
2085 case FIXED_POINT_TYPE:
2086 /* We can only generate 1 for accum types. */
2087 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2088 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2090 case VECTOR_TYPE:
2092 tree scalar = build_one_cst (TREE_TYPE (type));
2094 return build_vector_from_val (type, scalar);
2097 case COMPLEX_TYPE:
2098 return build_complex (type,
2099 build_one_cst (TREE_TYPE (type)),
2100 build_zero_cst (TREE_TYPE (type)));
2102 default:
2103 gcc_unreachable ();
2107 /* Return an integer of type TYPE containing all 1's in as much precision as
2108 it contains, or a complex or vector whose subparts are such integers. */
2110 tree
2111 build_all_ones_cst (tree type)
2113 if (TREE_CODE (type) == COMPLEX_TYPE)
2115 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2116 return build_complex (type, scalar, scalar);
2118 else
2119 return build_minus_one_cst (type);
2122 /* Return a constant of arithmetic type TYPE which is the
2123 opposite of the multiplicative identity of the set TYPE. */
2125 tree
2126 build_minus_one_cst (tree type)
2128 switch (TREE_CODE (type))
2130 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2131 case POINTER_TYPE: case REFERENCE_TYPE:
2132 case OFFSET_TYPE:
2133 return build_int_cst (type, -1);
2135 case REAL_TYPE:
2136 return build_real (type, dconstm1);
2138 case FIXED_POINT_TYPE:
2139 /* We can only generate 1 for accum types. */
2140 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2141 return build_fixed (type,
2142 fixed_from_double_int (double_int_minus_one,
2143 SCALAR_TYPE_MODE (type)));
2145 case VECTOR_TYPE:
2147 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2149 return build_vector_from_val (type, scalar);
2152 case COMPLEX_TYPE:
2153 return build_complex (type,
2154 build_minus_one_cst (TREE_TYPE (type)),
2155 build_zero_cst (TREE_TYPE (type)));
2157 default:
2158 gcc_unreachable ();
2162 /* Build 0 constant of type TYPE. This is used by constructor folding
2163 and thus the constant should be represented in memory by
2164 zero(es). */
2166 tree
2167 build_zero_cst (tree type)
2169 switch (TREE_CODE (type))
2171 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2172 case POINTER_TYPE: case REFERENCE_TYPE:
2173 case OFFSET_TYPE: case NULLPTR_TYPE:
2174 return build_int_cst (type, 0);
2176 case REAL_TYPE:
2177 return build_real (type, dconst0);
2179 case FIXED_POINT_TYPE:
2180 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2182 case VECTOR_TYPE:
2184 tree scalar = build_zero_cst (TREE_TYPE (type));
2186 return build_vector_from_val (type, scalar);
2189 case COMPLEX_TYPE:
2191 tree zero = build_zero_cst (TREE_TYPE (type));
2193 return build_complex (type, zero, zero);
2196 default:
2197 if (!AGGREGATE_TYPE_P (type))
2198 return fold_convert (type, integer_zero_node);
2199 return build_constructor (type, NULL);
2204 /* Build a BINFO with LEN language slots. */
2206 tree
2207 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2209 tree t;
2210 size_t length = (offsetof (struct tree_binfo, base_binfos)
2211 + vec<tree, va_gc>::embedded_size (base_binfos));
2213 record_node_allocation_statistics (TREE_BINFO, length);
2215 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2217 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2219 TREE_SET_CODE (t, TREE_BINFO);
2221 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2223 return t;
2226 /* Create a CASE_LABEL_EXPR tree node and return it. */
2228 tree
2229 build_case_label (tree low_value, tree high_value, tree label_decl)
2231 tree t = make_node (CASE_LABEL_EXPR);
2233 TREE_TYPE (t) = void_type_node;
2234 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2236 CASE_LOW (t) = low_value;
2237 CASE_HIGH (t) = high_value;
2238 CASE_LABEL (t) = label_decl;
2239 CASE_CHAIN (t) = NULL_TREE;
2241 return t;
2244 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2245 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2246 The latter determines the length of the HOST_WIDE_INT vector. */
2248 tree
2249 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2251 tree t;
2252 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2253 + sizeof (struct tree_int_cst));
2255 gcc_assert (len);
2256 record_node_allocation_statistics (INTEGER_CST, length);
2258 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2260 TREE_SET_CODE (t, INTEGER_CST);
2261 TREE_INT_CST_NUNITS (t) = len;
2262 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2263 /* to_offset can only be applied to trees that are offset_int-sized
2264 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2265 must be exactly the precision of offset_int and so LEN is correct. */
2266 if (ext_len <= OFFSET_INT_ELTS)
2267 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2268 else
2269 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2271 TREE_CONSTANT (t) = 1;
2273 return t;
2276 /* Build a newly constructed TREE_VEC node of length LEN. */
2278 tree
2279 make_tree_vec (int len MEM_STAT_DECL)
2281 tree t;
2282 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2284 record_node_allocation_statistics (TREE_VEC, length);
2286 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2288 TREE_SET_CODE (t, TREE_VEC);
2289 TREE_VEC_LENGTH (t) = len;
2291 return t;
2294 /* Grow a TREE_VEC node to new length LEN. */
2296 tree
2297 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2299 gcc_assert (TREE_CODE (v) == TREE_VEC);
2301 int oldlen = TREE_VEC_LENGTH (v);
2302 gcc_assert (len > oldlen);
2304 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2305 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2307 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2309 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2311 TREE_VEC_LENGTH (v) = len;
2313 return v;
2316 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2317 fixed, and scalar, complex or vector. */
2320 zerop (const_tree expr)
2322 return (integer_zerop (expr)
2323 || real_zerop (expr)
2324 || fixed_zerop (expr));
2327 /* Return 1 if EXPR is the integer constant zero or a complex constant
2328 of zero. */
2331 integer_zerop (const_tree expr)
2333 switch (TREE_CODE (expr))
2335 case INTEGER_CST:
2336 return wi::to_wide (expr) == 0;
2337 case COMPLEX_CST:
2338 return (integer_zerop (TREE_REALPART (expr))
2339 && integer_zerop (TREE_IMAGPART (expr)));
2340 case VECTOR_CST:
2341 return (VECTOR_CST_NPATTERNS (expr) == 1
2342 && VECTOR_CST_DUPLICATE_P (expr)
2343 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2344 default:
2345 return false;
2349 /* Return 1 if EXPR is the integer constant one or the corresponding
2350 complex constant. */
2353 integer_onep (const_tree expr)
2355 switch (TREE_CODE (expr))
2357 case INTEGER_CST:
2358 return wi::eq_p (wi::to_widest (expr), 1);
2359 case COMPLEX_CST:
2360 return (integer_onep (TREE_REALPART (expr))
2361 && integer_zerop (TREE_IMAGPART (expr)));
2362 case VECTOR_CST:
2363 return (VECTOR_CST_NPATTERNS (expr) == 1
2364 && VECTOR_CST_DUPLICATE_P (expr)
2365 && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2366 default:
2367 return false;
2371 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2372 return 1 if every piece is the integer constant one. */
2375 integer_each_onep (const_tree expr)
2377 if (TREE_CODE (expr) == COMPLEX_CST)
2378 return (integer_onep (TREE_REALPART (expr))
2379 && integer_onep (TREE_IMAGPART (expr)));
2380 else
2381 return integer_onep (expr);
2384 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2385 it contains, or a complex or vector whose subparts are such integers. */
2388 integer_all_onesp (const_tree expr)
2390 if (TREE_CODE (expr) == COMPLEX_CST
2391 && integer_all_onesp (TREE_REALPART (expr))
2392 && integer_all_onesp (TREE_IMAGPART (expr)))
2393 return 1;
2395 else if (TREE_CODE (expr) == VECTOR_CST)
2396 return (VECTOR_CST_NPATTERNS (expr) == 1
2397 && VECTOR_CST_DUPLICATE_P (expr)
2398 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2400 else if (TREE_CODE (expr) != INTEGER_CST)
2401 return 0;
2403 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2404 == wi::to_wide (expr));
2407 /* Return 1 if EXPR is the integer constant minus one. */
2410 integer_minus_onep (const_tree expr)
2412 if (TREE_CODE (expr) == COMPLEX_CST)
2413 return (integer_all_onesp (TREE_REALPART (expr))
2414 && integer_zerop (TREE_IMAGPART (expr)));
2415 else
2416 return integer_all_onesp (expr);
2419 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2420 one bit on). */
2423 integer_pow2p (const_tree expr)
2425 if (TREE_CODE (expr) == COMPLEX_CST
2426 && integer_pow2p (TREE_REALPART (expr))
2427 && integer_zerop (TREE_IMAGPART (expr)))
2428 return 1;
2430 if (TREE_CODE (expr) != INTEGER_CST)
2431 return 0;
2433 return wi::popcount (wi::to_wide (expr)) == 1;
2436 /* Return 1 if EXPR is an integer constant other than zero or a
2437 complex constant other than zero. */
2440 integer_nonzerop (const_tree expr)
2442 return ((TREE_CODE (expr) == INTEGER_CST
2443 && wi::to_wide (expr) != 0)
2444 || (TREE_CODE (expr) == COMPLEX_CST
2445 && (integer_nonzerop (TREE_REALPART (expr))
2446 || integer_nonzerop (TREE_IMAGPART (expr)))));
2449 /* Return 1 if EXPR is the integer constant one. For vector,
2450 return 1 if every piece is the integer constant minus one
2451 (representing the value TRUE). */
2454 integer_truep (const_tree expr)
2456 if (TREE_CODE (expr) == VECTOR_CST)
2457 return integer_all_onesp (expr);
2458 return integer_onep (expr);
2461 /* Return 1 if EXPR is the fixed-point constant zero. */
2464 fixed_zerop (const_tree expr)
2466 return (TREE_CODE (expr) == FIXED_CST
2467 && TREE_FIXED_CST (expr).data.is_zero ());
2470 /* Return the power of two represented by a tree node known to be a
2471 power of two. */
2474 tree_log2 (const_tree expr)
2476 if (TREE_CODE (expr) == COMPLEX_CST)
2477 return tree_log2 (TREE_REALPART (expr));
2479 return wi::exact_log2 (wi::to_wide (expr));
2482 /* Similar, but return the largest integer Y such that 2 ** Y is less
2483 than or equal to EXPR. */
2486 tree_floor_log2 (const_tree expr)
2488 if (TREE_CODE (expr) == COMPLEX_CST)
2489 return tree_log2 (TREE_REALPART (expr));
2491 return wi::floor_log2 (wi::to_wide (expr));
2494 /* Return number of known trailing zero bits in EXPR, or, if the value of
2495 EXPR is known to be zero, the precision of it's type. */
2497 unsigned int
2498 tree_ctz (const_tree expr)
2500 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2501 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2502 return 0;
2504 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2505 switch (TREE_CODE (expr))
2507 case INTEGER_CST:
2508 ret1 = wi::ctz (wi::to_wide (expr));
2509 return MIN (ret1, prec);
2510 case SSA_NAME:
2511 ret1 = wi::ctz (get_nonzero_bits (expr));
2512 return MIN (ret1, prec);
2513 case PLUS_EXPR:
2514 case MINUS_EXPR:
2515 case BIT_IOR_EXPR:
2516 case BIT_XOR_EXPR:
2517 case MIN_EXPR:
2518 case MAX_EXPR:
2519 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2520 if (ret1 == 0)
2521 return ret1;
2522 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2523 return MIN (ret1, ret2);
2524 case POINTER_PLUS_EXPR:
2525 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2526 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2527 /* Second operand is sizetype, which could be in theory
2528 wider than pointer's precision. Make sure we never
2529 return more than prec. */
2530 ret2 = MIN (ret2, prec);
2531 return MIN (ret1, ret2);
2532 case BIT_AND_EXPR:
2533 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2534 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2535 return MAX (ret1, ret2);
2536 case MULT_EXPR:
2537 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2538 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2539 return MIN (ret1 + ret2, prec);
2540 case LSHIFT_EXPR:
2541 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2542 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2543 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2545 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2546 return MIN (ret1 + ret2, prec);
2548 return ret1;
2549 case RSHIFT_EXPR:
2550 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2551 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2553 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2554 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2555 if (ret1 > ret2)
2556 return ret1 - ret2;
2558 return 0;
2559 case TRUNC_DIV_EXPR:
2560 case CEIL_DIV_EXPR:
2561 case FLOOR_DIV_EXPR:
2562 case ROUND_DIV_EXPR:
2563 case EXACT_DIV_EXPR:
2564 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2565 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2567 int l = tree_log2 (TREE_OPERAND (expr, 1));
2568 if (l >= 0)
2570 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2571 ret2 = l;
2572 if (ret1 > ret2)
2573 return ret1 - ret2;
2576 return 0;
2577 CASE_CONVERT:
2578 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2579 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2580 ret1 = prec;
2581 return MIN (ret1, prec);
2582 case SAVE_EXPR:
2583 return tree_ctz (TREE_OPERAND (expr, 0));
2584 case COND_EXPR:
2585 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2586 if (ret1 == 0)
2587 return 0;
2588 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2589 return MIN (ret1, ret2);
2590 case COMPOUND_EXPR:
2591 return tree_ctz (TREE_OPERAND (expr, 1));
2592 case ADDR_EXPR:
2593 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2594 if (ret1 > BITS_PER_UNIT)
2596 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2597 return MIN (ret1, prec);
2599 return 0;
2600 default:
2601 return 0;
2605 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2606 decimal float constants, so don't return 1 for them. */
2609 real_zerop (const_tree expr)
2611 switch (TREE_CODE (expr))
2613 case REAL_CST:
2614 return real_equal (&TREE_REAL_CST (expr), &dconst0)
2615 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2616 case COMPLEX_CST:
2617 return real_zerop (TREE_REALPART (expr))
2618 && real_zerop (TREE_IMAGPART (expr));
2619 case VECTOR_CST:
2621 /* Don't simply check for a duplicate because the predicate
2622 accepts both +0.0 and -0.0. */
2623 unsigned count = vector_cst_encoded_nelts (expr);
2624 for (unsigned int i = 0; i < count; ++i)
2625 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
2626 return false;
2627 return true;
2629 default:
2630 return false;
2634 /* Return 1 if EXPR is the real constant one in real or complex form.
2635 Trailing zeroes matter for decimal float constants, so don't return
2636 1 for them. */
2639 real_onep (const_tree expr)
2641 switch (TREE_CODE (expr))
2643 case REAL_CST:
2644 return real_equal (&TREE_REAL_CST (expr), &dconst1)
2645 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2646 case COMPLEX_CST:
2647 return real_onep (TREE_REALPART (expr))
2648 && real_zerop (TREE_IMAGPART (expr));
2649 case VECTOR_CST:
2650 return (VECTOR_CST_NPATTERNS (expr) == 1
2651 && VECTOR_CST_DUPLICATE_P (expr)
2652 && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2653 default:
2654 return false;
2658 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2659 matter for decimal float constants, so don't return 1 for them. */
2662 real_minus_onep (const_tree expr)
2664 switch (TREE_CODE (expr))
2666 case REAL_CST:
2667 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2668 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2669 case COMPLEX_CST:
2670 return real_minus_onep (TREE_REALPART (expr))
2671 && real_zerop (TREE_IMAGPART (expr));
2672 case VECTOR_CST:
2673 return (VECTOR_CST_NPATTERNS (expr) == 1
2674 && VECTOR_CST_DUPLICATE_P (expr)
2675 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2676 default:
2677 return false;
2681 /* Nonzero if EXP is a constant or a cast of a constant. */
2684 really_constant_p (const_tree exp)
2686 /* This is not quite the same as STRIP_NOPS. It does more. */
2687 while (CONVERT_EXPR_P (exp)
2688 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2689 exp = TREE_OPERAND (exp, 0);
2690 return TREE_CONSTANT (exp);
2693 /* Return first list element whose TREE_VALUE is ELEM.
2694 Return 0 if ELEM is not in LIST. */
2696 tree
2697 value_member (tree elem, tree list)
2699 while (list)
2701 if (elem == TREE_VALUE (list))
2702 return list;
2703 list = TREE_CHAIN (list);
2705 return NULL_TREE;
2708 /* Return first list element whose TREE_PURPOSE is ELEM.
2709 Return 0 if ELEM is not in LIST. */
2711 tree
2712 purpose_member (const_tree elem, tree list)
2714 while (list)
2716 if (elem == TREE_PURPOSE (list))
2717 return list;
2718 list = TREE_CHAIN (list);
2720 return NULL_TREE;
2723 /* Return true if ELEM is in V. */
2725 bool
2726 vec_member (const_tree elem, vec<tree, va_gc> *v)
2728 unsigned ix;
2729 tree t;
2730 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2731 if (elem == t)
2732 return true;
2733 return false;
2736 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2737 NULL_TREE. */
2739 tree
2740 chain_index (int idx, tree chain)
2742 for (; chain && idx > 0; --idx)
2743 chain = TREE_CHAIN (chain);
2744 return chain;
2747 /* Return nonzero if ELEM is part of the chain CHAIN. */
2750 chain_member (const_tree elem, const_tree chain)
2752 while (chain)
2754 if (elem == chain)
2755 return 1;
2756 chain = DECL_CHAIN (chain);
2759 return 0;
2762 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2763 We expect a null pointer to mark the end of the chain.
2764 This is the Lisp primitive `length'. */
2767 list_length (const_tree t)
2769 const_tree p = t;
2770 #ifdef ENABLE_TREE_CHECKING
2771 const_tree q = t;
2772 #endif
2773 int len = 0;
2775 while (p)
2777 p = TREE_CHAIN (p);
2778 #ifdef ENABLE_TREE_CHECKING
2779 if (len % 2)
2780 q = TREE_CHAIN (q);
2781 gcc_assert (p != q);
2782 #endif
2783 len++;
2786 return len;
2789 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2790 UNION_TYPE TYPE, or NULL_TREE if none. */
2792 tree
2793 first_field (const_tree type)
2795 tree t = TYPE_FIELDS (type);
2796 while (t && TREE_CODE (t) != FIELD_DECL)
2797 t = TREE_CHAIN (t);
2798 return t;
2801 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2802 by modifying the last node in chain 1 to point to chain 2.
2803 This is the Lisp primitive `nconc'. */
2805 tree
2806 chainon (tree op1, tree op2)
2808 tree t1;
2810 if (!op1)
2811 return op2;
2812 if (!op2)
2813 return op1;
2815 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2816 continue;
2817 TREE_CHAIN (t1) = op2;
2819 #ifdef ENABLE_TREE_CHECKING
2821 tree t2;
2822 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2823 gcc_assert (t2 != t1);
2825 #endif
2827 return op1;
2830 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2832 tree
2833 tree_last (tree chain)
2835 tree next;
2836 if (chain)
2837 while ((next = TREE_CHAIN (chain)))
2838 chain = next;
2839 return chain;
2842 /* Reverse the order of elements in the chain T,
2843 and return the new head of the chain (old last element). */
2845 tree
2846 nreverse (tree t)
2848 tree prev = 0, decl, next;
2849 for (decl = t; decl; decl = next)
2851 /* We shouldn't be using this function to reverse BLOCK chains; we
2852 have blocks_nreverse for that. */
2853 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2854 next = TREE_CHAIN (decl);
2855 TREE_CHAIN (decl) = prev;
2856 prev = decl;
2858 return prev;
2861 /* Return a newly created TREE_LIST node whose
2862 purpose and value fields are PARM and VALUE. */
2864 tree
2865 build_tree_list (tree parm, tree value MEM_STAT_DECL)
2867 tree t = make_node (TREE_LIST PASS_MEM_STAT);
2868 TREE_PURPOSE (t) = parm;
2869 TREE_VALUE (t) = value;
2870 return t;
2873 /* Build a chain of TREE_LIST nodes from a vector. */
2875 tree
2876 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2878 tree ret = NULL_TREE;
2879 tree *pp = &ret;
2880 unsigned int i;
2881 tree t;
2882 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2884 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
2885 pp = &TREE_CHAIN (*pp);
2887 return ret;
2890 /* Return a newly created TREE_LIST node whose
2891 purpose and value fields are PURPOSE and VALUE
2892 and whose TREE_CHAIN is CHAIN. */
2894 tree
2895 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
2897 tree node;
2899 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2900 memset (node, 0, sizeof (struct tree_common));
2902 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2904 TREE_SET_CODE (node, TREE_LIST);
2905 TREE_CHAIN (node) = chain;
2906 TREE_PURPOSE (node) = purpose;
2907 TREE_VALUE (node) = value;
2908 return node;
2911 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2912 trees. */
2914 vec<tree, va_gc> *
2915 ctor_to_vec (tree ctor)
2917 vec<tree, va_gc> *vec;
2918 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2919 unsigned int ix;
2920 tree val;
2922 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2923 vec->quick_push (val);
2925 return vec;
2928 /* Return the size nominally occupied by an object of type TYPE
2929 when it resides in memory. The value is measured in units of bytes,
2930 and its data type is that normally used for type sizes
2931 (which is the first type created by make_signed_type or
2932 make_unsigned_type). */
2934 tree
2935 size_in_bytes_loc (location_t loc, const_tree type)
2937 tree t;
2939 if (type == error_mark_node)
2940 return integer_zero_node;
2942 type = TYPE_MAIN_VARIANT (type);
2943 t = TYPE_SIZE_UNIT (type);
2945 if (t == 0)
2947 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
2948 return size_zero_node;
2951 return t;
2954 /* Return the size of TYPE (in bytes) as a wide integer
2955 or return -1 if the size can vary or is larger than an integer. */
2957 HOST_WIDE_INT
2958 int_size_in_bytes (const_tree type)
2960 tree t;
2962 if (type == error_mark_node)
2963 return 0;
2965 type = TYPE_MAIN_VARIANT (type);
2966 t = TYPE_SIZE_UNIT (type);
2968 if (t && tree_fits_uhwi_p (t))
2969 return TREE_INT_CST_LOW (t);
2970 else
2971 return -1;
2974 /* Return the maximum size of TYPE (in bytes) as a wide integer
2975 or return -1 if the size can vary or is larger than an integer. */
2977 HOST_WIDE_INT
2978 max_int_size_in_bytes (const_tree type)
2980 HOST_WIDE_INT size = -1;
2981 tree size_tree;
2983 /* If this is an array type, check for a possible MAX_SIZE attached. */
2985 if (TREE_CODE (type) == ARRAY_TYPE)
2987 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2989 if (size_tree && tree_fits_uhwi_p (size_tree))
2990 size = tree_to_uhwi (size_tree);
2993 /* If we still haven't been able to get a size, see if the language
2994 can compute a maximum size. */
2996 if (size == -1)
2998 size_tree = lang_hooks.types.max_size (type);
3000 if (size_tree && tree_fits_uhwi_p (size_tree))
3001 size = tree_to_uhwi (size_tree);
3004 return size;
3007 /* Return the bit position of FIELD, in bits from the start of the record.
3008 This is a tree of type bitsizetype. */
3010 tree
3011 bit_position (const_tree field)
3013 return bit_from_pos (DECL_FIELD_OFFSET (field),
3014 DECL_FIELD_BIT_OFFSET (field));
3017 /* Return the byte position of FIELD, in bytes from the start of the record.
3018 This is a tree of type sizetype. */
3020 tree
3021 byte_position (const_tree field)
3023 return byte_from_pos (DECL_FIELD_OFFSET (field),
3024 DECL_FIELD_BIT_OFFSET (field));
3027 /* Likewise, but return as an integer. It must be representable in
3028 that way (since it could be a signed value, we don't have the
3029 option of returning -1 like int_size_in_byte can. */
3031 HOST_WIDE_INT
3032 int_byte_position (const_tree field)
3034 return tree_to_shwi (byte_position (field));
3037 /* Return the strictest alignment, in bits, that T is known to have. */
3039 unsigned int
3040 expr_align (const_tree t)
3042 unsigned int align0, align1;
3044 switch (TREE_CODE (t))
3046 CASE_CONVERT: case NON_LVALUE_EXPR:
3047 /* If we have conversions, we know that the alignment of the
3048 object must meet each of the alignments of the types. */
3049 align0 = expr_align (TREE_OPERAND (t, 0));
3050 align1 = TYPE_ALIGN (TREE_TYPE (t));
3051 return MAX (align0, align1);
3053 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
3054 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
3055 case CLEANUP_POINT_EXPR:
3056 /* These don't change the alignment of an object. */
3057 return expr_align (TREE_OPERAND (t, 0));
3059 case COND_EXPR:
3060 /* The best we can do is say that the alignment is the least aligned
3061 of the two arms. */
3062 align0 = expr_align (TREE_OPERAND (t, 1));
3063 align1 = expr_align (TREE_OPERAND (t, 2));
3064 return MIN (align0, align1);
3066 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3067 meaningfully, it's always 1. */
3068 case LABEL_DECL: case CONST_DECL:
3069 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3070 case FUNCTION_DECL:
3071 gcc_assert (DECL_ALIGN (t) != 0);
3072 return DECL_ALIGN (t);
3074 default:
3075 break;
3078 /* Otherwise take the alignment from that of the type. */
3079 return TYPE_ALIGN (TREE_TYPE (t));
3082 /* Return, as a tree node, the number of elements for TYPE (which is an
3083 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3085 tree
3086 array_type_nelts (const_tree type)
3088 tree index_type, min, max;
3090 /* If they did it with unspecified bounds, then we should have already
3091 given an error about it before we got here. */
3092 if (! TYPE_DOMAIN (type))
3093 return error_mark_node;
3095 index_type = TYPE_DOMAIN (type);
3096 min = TYPE_MIN_VALUE (index_type);
3097 max = TYPE_MAX_VALUE (index_type);
3099 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3100 if (!max)
3101 return error_mark_node;
3103 return (integer_zerop (min)
3104 ? max
3105 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3108 /* If arg is static -- a reference to an object in static storage -- then
3109 return the object. This is not the same as the C meaning of `static'.
3110 If arg isn't static, return NULL. */
3112 tree
3113 staticp (tree arg)
3115 switch (TREE_CODE (arg))
3117 case FUNCTION_DECL:
3118 /* Nested functions are static, even though taking their address will
3119 involve a trampoline as we unnest the nested function and create
3120 the trampoline on the tree level. */
3121 return arg;
3123 case VAR_DECL:
3124 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3125 && ! DECL_THREAD_LOCAL_P (arg)
3126 && ! DECL_DLLIMPORT_P (arg)
3127 ? arg : NULL);
3129 case CONST_DECL:
3130 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3131 ? arg : NULL);
3133 case CONSTRUCTOR:
3134 return TREE_STATIC (arg) ? arg : NULL;
3136 case LABEL_DECL:
3137 case STRING_CST:
3138 return arg;
3140 case COMPONENT_REF:
3141 /* If the thing being referenced is not a field, then it is
3142 something language specific. */
3143 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3145 /* If we are referencing a bitfield, we can't evaluate an
3146 ADDR_EXPR at compile time and so it isn't a constant. */
3147 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3148 return NULL;
3150 return staticp (TREE_OPERAND (arg, 0));
3152 case BIT_FIELD_REF:
3153 return NULL;
3155 case INDIRECT_REF:
3156 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3158 case ARRAY_REF:
3159 case ARRAY_RANGE_REF:
3160 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3161 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3162 return staticp (TREE_OPERAND (arg, 0));
3163 else
3164 return NULL;
3166 case COMPOUND_LITERAL_EXPR:
3167 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3169 default:
3170 return NULL;
3177 /* Return whether OP is a DECL whose address is function-invariant. */
3179 bool
3180 decl_address_invariant_p (const_tree op)
3182 /* The conditions below are slightly less strict than the one in
3183 staticp. */
3185 switch (TREE_CODE (op))
3187 case PARM_DECL:
3188 case RESULT_DECL:
3189 case LABEL_DECL:
3190 case FUNCTION_DECL:
3191 return true;
3193 case VAR_DECL:
3194 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3195 || DECL_THREAD_LOCAL_P (op)
3196 || DECL_CONTEXT (op) == current_function_decl
3197 || decl_function_context (op) == current_function_decl)
3198 return true;
3199 break;
3201 case CONST_DECL:
3202 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3203 || decl_function_context (op) == current_function_decl)
3204 return true;
3205 break;
3207 default:
3208 break;
3211 return false;
3214 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3216 bool
3217 decl_address_ip_invariant_p (const_tree op)
3219 /* The conditions below are slightly less strict than the one in
3220 staticp. */
3222 switch (TREE_CODE (op))
3224 case LABEL_DECL:
3225 case FUNCTION_DECL:
3226 case STRING_CST:
3227 return true;
3229 case VAR_DECL:
3230 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3231 && !DECL_DLLIMPORT_P (op))
3232 || DECL_THREAD_LOCAL_P (op))
3233 return true;
3234 break;
3236 case CONST_DECL:
3237 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3238 return true;
3239 break;
3241 default:
3242 break;
3245 return false;
3249 /* Return true if T is function-invariant (internal function, does
3250 not handle arithmetic; that's handled in skip_simple_arithmetic and
3251 tree_invariant_p). */
3253 static bool
3254 tree_invariant_p_1 (tree t)
3256 tree op;
3258 if (TREE_CONSTANT (t)
3259 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3260 return true;
3262 switch (TREE_CODE (t))
3264 case SAVE_EXPR:
3265 return true;
3267 case ADDR_EXPR:
3268 op = TREE_OPERAND (t, 0);
3269 while (handled_component_p (op))
3271 switch (TREE_CODE (op))
3273 case ARRAY_REF:
3274 case ARRAY_RANGE_REF:
3275 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3276 || TREE_OPERAND (op, 2) != NULL_TREE
3277 || TREE_OPERAND (op, 3) != NULL_TREE)
3278 return false;
3279 break;
3281 case COMPONENT_REF:
3282 if (TREE_OPERAND (op, 2) != NULL_TREE)
3283 return false;
3284 break;
3286 default:;
3288 op = TREE_OPERAND (op, 0);
3291 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3293 default:
3294 break;
3297 return false;
3300 /* Return true if T is function-invariant. */
3302 bool
3303 tree_invariant_p (tree t)
3305 tree inner = skip_simple_arithmetic (t);
3306 return tree_invariant_p_1 (inner);
3309 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3310 Do this to any expression which may be used in more than one place,
3311 but must be evaluated only once.
3313 Normally, expand_expr would reevaluate the expression each time.
3314 Calling save_expr produces something that is evaluated and recorded
3315 the first time expand_expr is called on it. Subsequent calls to
3316 expand_expr just reuse the recorded value.
3318 The call to expand_expr that generates code that actually computes
3319 the value is the first call *at compile time*. Subsequent calls
3320 *at compile time* generate code to use the saved value.
3321 This produces correct result provided that *at run time* control
3322 always flows through the insns made by the first expand_expr
3323 before reaching the other places where the save_expr was evaluated.
3324 You, the caller of save_expr, must make sure this is so.
3326 Constants, and certain read-only nodes, are returned with no
3327 SAVE_EXPR because that is safe. Expressions containing placeholders
3328 are not touched; see tree.def for an explanation of what these
3329 are used for. */
3331 tree
3332 save_expr (tree expr)
3334 tree inner;
3336 /* If the tree evaluates to a constant, then we don't want to hide that
3337 fact (i.e. this allows further folding, and direct checks for constants).
3338 However, a read-only object that has side effects cannot be bypassed.
3339 Since it is no problem to reevaluate literals, we just return the
3340 literal node. */
3341 inner = skip_simple_arithmetic (expr);
3342 if (TREE_CODE (inner) == ERROR_MARK)
3343 return inner;
3345 if (tree_invariant_p_1 (inner))
3346 return expr;
3348 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3349 it means that the size or offset of some field of an object depends on
3350 the value within another field.
3352 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3353 and some variable since it would then need to be both evaluated once and
3354 evaluated more than once. Front-ends must assure this case cannot
3355 happen by surrounding any such subexpressions in their own SAVE_EXPR
3356 and forcing evaluation at the proper time. */
3357 if (contains_placeholder_p (inner))
3358 return expr;
3360 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3362 /* This expression might be placed ahead of a jump to ensure that the
3363 value was computed on both sides of the jump. So make sure it isn't
3364 eliminated as dead. */
3365 TREE_SIDE_EFFECTS (expr) = 1;
3366 return expr;
3369 /* Look inside EXPR into any simple arithmetic operations. Return the
3370 outermost non-arithmetic or non-invariant node. */
3372 tree
3373 skip_simple_arithmetic (tree expr)
3375 /* We don't care about whether this can be used as an lvalue in this
3376 context. */
3377 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3378 expr = TREE_OPERAND (expr, 0);
3380 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3381 a constant, it will be more efficient to not make another SAVE_EXPR since
3382 it will allow better simplification and GCSE will be able to merge the
3383 computations if they actually occur. */
3384 while (true)
3386 if (UNARY_CLASS_P (expr))
3387 expr = TREE_OPERAND (expr, 0);
3388 else if (BINARY_CLASS_P (expr))
3390 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3391 expr = TREE_OPERAND (expr, 0);
3392 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3393 expr = TREE_OPERAND (expr, 1);
3394 else
3395 break;
3397 else
3398 break;
3401 return expr;
3404 /* Look inside EXPR into simple arithmetic operations involving constants.
3405 Return the outermost non-arithmetic or non-constant node. */
3407 tree
3408 skip_simple_constant_arithmetic (tree expr)
3410 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3411 expr = TREE_OPERAND (expr, 0);
3413 while (true)
3415 if (UNARY_CLASS_P (expr))
3416 expr = TREE_OPERAND (expr, 0);
3417 else if (BINARY_CLASS_P (expr))
3419 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3420 expr = TREE_OPERAND (expr, 0);
3421 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3422 expr = TREE_OPERAND (expr, 1);
3423 else
3424 break;
3426 else
3427 break;
3430 return expr;
3433 /* Return which tree structure is used by T. */
3435 enum tree_node_structure_enum
3436 tree_node_structure (const_tree t)
3438 const enum tree_code code = TREE_CODE (t);
3439 return tree_node_structure_for_code (code);
3442 /* Set various status flags when building a CALL_EXPR object T. */
3444 static void
3445 process_call_operands (tree t)
3447 bool side_effects = TREE_SIDE_EFFECTS (t);
3448 bool read_only = false;
3449 int i = call_expr_flags (t);
3451 /* Calls have side-effects, except those to const or pure functions. */
3452 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3453 side_effects = true;
3454 /* Propagate TREE_READONLY of arguments for const functions. */
3455 if (i & ECF_CONST)
3456 read_only = true;
3458 if (!side_effects || read_only)
3459 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3461 tree op = TREE_OPERAND (t, i);
3462 if (op && TREE_SIDE_EFFECTS (op))
3463 side_effects = true;
3464 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3465 read_only = false;
3468 TREE_SIDE_EFFECTS (t) = side_effects;
3469 TREE_READONLY (t) = read_only;
3472 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3473 size or offset that depends on a field within a record. */
3475 bool
3476 contains_placeholder_p (const_tree exp)
3478 enum tree_code code;
3480 if (!exp)
3481 return 0;
3483 code = TREE_CODE (exp);
3484 if (code == PLACEHOLDER_EXPR)
3485 return 1;
3487 switch (TREE_CODE_CLASS (code))
3489 case tcc_reference:
3490 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3491 position computations since they will be converted into a
3492 WITH_RECORD_EXPR involving the reference, which will assume
3493 here will be valid. */
3494 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3496 case tcc_exceptional:
3497 if (code == TREE_LIST)
3498 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3499 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3500 break;
3502 case tcc_unary:
3503 case tcc_binary:
3504 case tcc_comparison:
3505 case tcc_expression:
3506 switch (code)
3508 case COMPOUND_EXPR:
3509 /* Ignoring the first operand isn't quite right, but works best. */
3510 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3512 case COND_EXPR:
3513 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3514 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3515 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3517 case SAVE_EXPR:
3518 /* The save_expr function never wraps anything containing
3519 a PLACEHOLDER_EXPR. */
3520 return 0;
3522 default:
3523 break;
3526 switch (TREE_CODE_LENGTH (code))
3528 case 1:
3529 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3530 case 2:
3531 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3532 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3533 default:
3534 return 0;
3537 case tcc_vl_exp:
3538 switch (code)
3540 case CALL_EXPR:
3542 const_tree arg;
3543 const_call_expr_arg_iterator iter;
3544 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3545 if (CONTAINS_PLACEHOLDER_P (arg))
3546 return 1;
3547 return 0;
3549 default:
3550 return 0;
3553 default:
3554 return 0;
3556 return 0;
3559 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3560 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3561 field positions. */
3563 static bool
3564 type_contains_placeholder_1 (const_tree type)
3566 /* If the size contains a placeholder or the parent type (component type in
3567 the case of arrays) type involves a placeholder, this type does. */
3568 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3569 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3570 || (!POINTER_TYPE_P (type)
3571 && TREE_TYPE (type)
3572 && type_contains_placeholder_p (TREE_TYPE (type))))
3573 return true;
3575 /* Now do type-specific checks. Note that the last part of the check above
3576 greatly limits what we have to do below. */
3577 switch (TREE_CODE (type))
3579 case VOID_TYPE:
3580 case POINTER_BOUNDS_TYPE:
3581 case COMPLEX_TYPE:
3582 case ENUMERAL_TYPE:
3583 case BOOLEAN_TYPE:
3584 case POINTER_TYPE:
3585 case OFFSET_TYPE:
3586 case REFERENCE_TYPE:
3587 case METHOD_TYPE:
3588 case FUNCTION_TYPE:
3589 case VECTOR_TYPE:
3590 case NULLPTR_TYPE:
3591 return false;
3593 case INTEGER_TYPE:
3594 case REAL_TYPE:
3595 case FIXED_POINT_TYPE:
3596 /* Here we just check the bounds. */
3597 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3598 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3600 case ARRAY_TYPE:
3601 /* We have already checked the component type above, so just check
3602 the domain type. Flexible array members have a null domain. */
3603 return TYPE_DOMAIN (type) ?
3604 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
3606 case RECORD_TYPE:
3607 case UNION_TYPE:
3608 case QUAL_UNION_TYPE:
3610 tree field;
3612 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3613 if (TREE_CODE (field) == FIELD_DECL
3614 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3615 || (TREE_CODE (type) == QUAL_UNION_TYPE
3616 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3617 || type_contains_placeholder_p (TREE_TYPE (field))))
3618 return true;
3620 return false;
3623 default:
3624 gcc_unreachable ();
3628 /* Wrapper around above function used to cache its result. */
3630 bool
3631 type_contains_placeholder_p (tree type)
3633 bool result;
3635 /* If the contains_placeholder_bits field has been initialized,
3636 then we know the answer. */
3637 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3638 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3640 /* Indicate that we've seen this type node, and the answer is false.
3641 This is what we want to return if we run into recursion via fields. */
3642 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3644 /* Compute the real value. */
3645 result = type_contains_placeholder_1 (type);
3647 /* Store the real value. */
3648 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3650 return result;
3653 /* Push tree EXP onto vector QUEUE if it is not already present. */
3655 static void
3656 push_without_duplicates (tree exp, vec<tree> *queue)
3658 unsigned int i;
3659 tree iter;
3661 FOR_EACH_VEC_ELT (*queue, i, iter)
3662 if (simple_cst_equal (iter, exp) == 1)
3663 break;
3665 if (!iter)
3666 queue->safe_push (exp);
3669 /* Given a tree EXP, find all occurrences of references to fields
3670 in a PLACEHOLDER_EXPR and place them in vector REFS without
3671 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3672 we assume here that EXP contains only arithmetic expressions
3673 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3674 argument list. */
3676 void
3677 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3679 enum tree_code code = TREE_CODE (exp);
3680 tree inner;
3681 int i;
3683 /* We handle TREE_LIST and COMPONENT_REF separately. */
3684 if (code == TREE_LIST)
3686 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3687 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3689 else if (code == COMPONENT_REF)
3691 for (inner = TREE_OPERAND (exp, 0);
3692 REFERENCE_CLASS_P (inner);
3693 inner = TREE_OPERAND (inner, 0))
3696 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3697 push_without_duplicates (exp, refs);
3698 else
3699 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3701 else
3702 switch (TREE_CODE_CLASS (code))
3704 case tcc_constant:
3705 break;
3707 case tcc_declaration:
3708 /* Variables allocated to static storage can stay. */
3709 if (!TREE_STATIC (exp))
3710 push_without_duplicates (exp, refs);
3711 break;
3713 case tcc_expression:
3714 /* This is the pattern built in ada/make_aligning_type. */
3715 if (code == ADDR_EXPR
3716 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3718 push_without_duplicates (exp, refs);
3719 break;
3722 /* Fall through. */
3724 case tcc_exceptional:
3725 case tcc_unary:
3726 case tcc_binary:
3727 case tcc_comparison:
3728 case tcc_reference:
3729 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3730 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3731 break;
3733 case tcc_vl_exp:
3734 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3735 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3736 break;
3738 default:
3739 gcc_unreachable ();
3743 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3744 return a tree with all occurrences of references to F in a
3745 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3746 CONST_DECLs. Note that we assume here that EXP contains only
3747 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3748 occurring only in their argument list. */
3750 tree
3751 substitute_in_expr (tree exp, tree f, tree r)
3753 enum tree_code code = TREE_CODE (exp);
3754 tree op0, op1, op2, op3;
3755 tree new_tree;
3757 /* We handle TREE_LIST and COMPONENT_REF separately. */
3758 if (code == TREE_LIST)
3760 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3761 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3762 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3763 return exp;
3765 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3767 else if (code == COMPONENT_REF)
3769 tree inner;
3771 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3772 and it is the right field, replace it with R. */
3773 for (inner = TREE_OPERAND (exp, 0);
3774 REFERENCE_CLASS_P (inner);
3775 inner = TREE_OPERAND (inner, 0))
3778 /* The field. */
3779 op1 = TREE_OPERAND (exp, 1);
3781 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3782 return r;
3784 /* If this expression hasn't been completed let, leave it alone. */
3785 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3786 return exp;
3788 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3789 if (op0 == TREE_OPERAND (exp, 0))
3790 return exp;
3792 new_tree
3793 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3795 else
3796 switch (TREE_CODE_CLASS (code))
3798 case tcc_constant:
3799 return exp;
3801 case tcc_declaration:
3802 if (exp == f)
3803 return r;
3804 else
3805 return exp;
3807 case tcc_expression:
3808 if (exp == f)
3809 return r;
3811 /* Fall through. */
3813 case tcc_exceptional:
3814 case tcc_unary:
3815 case tcc_binary:
3816 case tcc_comparison:
3817 case tcc_reference:
3818 switch (TREE_CODE_LENGTH (code))
3820 case 0:
3821 return exp;
3823 case 1:
3824 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3825 if (op0 == TREE_OPERAND (exp, 0))
3826 return exp;
3828 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3829 break;
3831 case 2:
3832 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3833 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3835 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3836 return exp;
3838 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3839 break;
3841 case 3:
3842 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3843 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3844 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3846 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3847 && op2 == TREE_OPERAND (exp, 2))
3848 return exp;
3850 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3851 break;
3853 case 4:
3854 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3855 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3856 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3857 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3859 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3860 && op2 == TREE_OPERAND (exp, 2)
3861 && op3 == TREE_OPERAND (exp, 3))
3862 return exp;
3864 new_tree
3865 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3866 break;
3868 default:
3869 gcc_unreachable ();
3871 break;
3873 case tcc_vl_exp:
3875 int i;
3877 new_tree = NULL_TREE;
3879 /* If we are trying to replace F with a constant or with another
3880 instance of one of the arguments of the call, inline back
3881 functions which do nothing else than computing a value from
3882 the arguments they are passed. This makes it possible to
3883 fold partially or entirely the replacement expression. */
3884 if (code == CALL_EXPR)
3886 bool maybe_inline = false;
3887 if (CONSTANT_CLASS_P (r))
3888 maybe_inline = true;
3889 else
3890 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
3891 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
3893 maybe_inline = true;
3894 break;
3896 if (maybe_inline)
3898 tree t = maybe_inline_call_in_expr (exp);
3899 if (t)
3900 return SUBSTITUTE_IN_EXPR (t, f, r);
3904 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3906 tree op = TREE_OPERAND (exp, i);
3907 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3908 if (new_op != op)
3910 if (!new_tree)
3911 new_tree = copy_node (exp);
3912 TREE_OPERAND (new_tree, i) = new_op;
3916 if (new_tree)
3918 new_tree = fold (new_tree);
3919 if (TREE_CODE (new_tree) == CALL_EXPR)
3920 process_call_operands (new_tree);
3922 else
3923 return exp;
3925 break;
3927 default:
3928 gcc_unreachable ();
3931 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3933 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3934 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3936 return new_tree;
3939 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3940 for it within OBJ, a tree that is an object or a chain of references. */
3942 tree
3943 substitute_placeholder_in_expr (tree exp, tree obj)
3945 enum tree_code code = TREE_CODE (exp);
3946 tree op0, op1, op2, op3;
3947 tree new_tree;
3949 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3950 in the chain of OBJ. */
3951 if (code == PLACEHOLDER_EXPR)
3953 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3954 tree elt;
3956 for (elt = obj; elt != 0;
3957 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3958 || TREE_CODE (elt) == COND_EXPR)
3959 ? TREE_OPERAND (elt, 1)
3960 : (REFERENCE_CLASS_P (elt)
3961 || UNARY_CLASS_P (elt)
3962 || BINARY_CLASS_P (elt)
3963 || VL_EXP_CLASS_P (elt)
3964 || EXPRESSION_CLASS_P (elt))
3965 ? TREE_OPERAND (elt, 0) : 0))
3966 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3967 return elt;
3969 for (elt = obj; elt != 0;
3970 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3971 || TREE_CODE (elt) == COND_EXPR)
3972 ? TREE_OPERAND (elt, 1)
3973 : (REFERENCE_CLASS_P (elt)
3974 || UNARY_CLASS_P (elt)
3975 || BINARY_CLASS_P (elt)
3976 || VL_EXP_CLASS_P (elt)
3977 || EXPRESSION_CLASS_P (elt))
3978 ? TREE_OPERAND (elt, 0) : 0))
3979 if (POINTER_TYPE_P (TREE_TYPE (elt))
3980 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3981 == need_type))
3982 return fold_build1 (INDIRECT_REF, need_type, elt);
3984 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3985 survives until RTL generation, there will be an error. */
3986 return exp;
3989 /* TREE_LIST is special because we need to look at TREE_VALUE
3990 and TREE_CHAIN, not TREE_OPERANDS. */
3991 else if (code == TREE_LIST)
3993 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3994 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3995 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3996 return exp;
3998 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4000 else
4001 switch (TREE_CODE_CLASS (code))
4003 case tcc_constant:
4004 case tcc_declaration:
4005 return exp;
4007 case tcc_exceptional:
4008 case tcc_unary:
4009 case tcc_binary:
4010 case tcc_comparison:
4011 case tcc_expression:
4012 case tcc_reference:
4013 case tcc_statement:
4014 switch (TREE_CODE_LENGTH (code))
4016 case 0:
4017 return exp;
4019 case 1:
4020 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4021 if (op0 == TREE_OPERAND (exp, 0))
4022 return exp;
4024 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4025 break;
4027 case 2:
4028 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4029 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4031 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4032 return exp;
4034 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4035 break;
4037 case 3:
4038 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4039 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4040 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4042 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4043 && op2 == TREE_OPERAND (exp, 2))
4044 return exp;
4046 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4047 break;
4049 case 4:
4050 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4051 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4052 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4053 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4055 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4056 && op2 == TREE_OPERAND (exp, 2)
4057 && op3 == TREE_OPERAND (exp, 3))
4058 return exp;
4060 new_tree
4061 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4062 break;
4064 default:
4065 gcc_unreachable ();
4067 break;
4069 case tcc_vl_exp:
4071 int i;
4073 new_tree = NULL_TREE;
4075 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4077 tree op = TREE_OPERAND (exp, i);
4078 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4079 if (new_op != op)
4081 if (!new_tree)
4082 new_tree = copy_node (exp);
4083 TREE_OPERAND (new_tree, i) = new_op;
4087 if (new_tree)
4089 new_tree = fold (new_tree);
4090 if (TREE_CODE (new_tree) == CALL_EXPR)
4091 process_call_operands (new_tree);
4093 else
4094 return exp;
4096 break;
4098 default:
4099 gcc_unreachable ();
4102 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4104 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4105 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4107 return new_tree;
4111 /* Subroutine of stabilize_reference; this is called for subtrees of
4112 references. Any expression with side-effects must be put in a SAVE_EXPR
4113 to ensure that it is only evaluated once.
4115 We don't put SAVE_EXPR nodes around everything, because assigning very
4116 simple expressions to temporaries causes us to miss good opportunities
4117 for optimizations. Among other things, the opportunity to fold in the
4118 addition of a constant into an addressing mode often gets lost, e.g.
4119 "y[i+1] += x;". In general, we take the approach that we should not make
4120 an assignment unless we are forced into it - i.e., that any non-side effect
4121 operator should be allowed, and that cse should take care of coalescing
4122 multiple utterances of the same expression should that prove fruitful. */
4124 static tree
4125 stabilize_reference_1 (tree e)
4127 tree result;
4128 enum tree_code code = TREE_CODE (e);
4130 /* We cannot ignore const expressions because it might be a reference
4131 to a const array but whose index contains side-effects. But we can
4132 ignore things that are actual constant or that already have been
4133 handled by this function. */
4135 if (tree_invariant_p (e))
4136 return e;
4138 switch (TREE_CODE_CLASS (code))
4140 case tcc_exceptional:
4141 case tcc_type:
4142 case tcc_declaration:
4143 case tcc_comparison:
4144 case tcc_statement:
4145 case tcc_expression:
4146 case tcc_reference:
4147 case tcc_vl_exp:
4148 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4149 so that it will only be evaluated once. */
4150 /* The reference (r) and comparison (<) classes could be handled as
4151 below, but it is generally faster to only evaluate them once. */
4152 if (TREE_SIDE_EFFECTS (e))
4153 return save_expr (e);
4154 return e;
4156 case tcc_constant:
4157 /* Constants need no processing. In fact, we should never reach
4158 here. */
4159 return e;
4161 case tcc_binary:
4162 /* Division is slow and tends to be compiled with jumps,
4163 especially the division by powers of 2 that is often
4164 found inside of an array reference. So do it just once. */
4165 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4166 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4167 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4168 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4169 return save_expr (e);
4170 /* Recursively stabilize each operand. */
4171 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4172 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4173 break;
4175 case tcc_unary:
4176 /* Recursively stabilize each operand. */
4177 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4178 break;
4180 default:
4181 gcc_unreachable ();
4184 TREE_TYPE (result) = TREE_TYPE (e);
4185 TREE_READONLY (result) = TREE_READONLY (e);
4186 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4187 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4189 return result;
4192 /* Stabilize a reference so that we can use it any number of times
4193 without causing its operands to be evaluated more than once.
4194 Returns the stabilized reference. This works by means of save_expr,
4195 so see the caveats in the comments about save_expr.
4197 Also allows conversion expressions whose operands are references.
4198 Any other kind of expression is returned unchanged. */
4200 tree
4201 stabilize_reference (tree ref)
4203 tree result;
4204 enum tree_code code = TREE_CODE (ref);
4206 switch (code)
4208 case VAR_DECL:
4209 case PARM_DECL:
4210 case RESULT_DECL:
4211 /* No action is needed in this case. */
4212 return ref;
4214 CASE_CONVERT:
4215 case FLOAT_EXPR:
4216 case FIX_TRUNC_EXPR:
4217 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4218 break;
4220 case INDIRECT_REF:
4221 result = build_nt (INDIRECT_REF,
4222 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4223 break;
4225 case COMPONENT_REF:
4226 result = build_nt (COMPONENT_REF,
4227 stabilize_reference (TREE_OPERAND (ref, 0)),
4228 TREE_OPERAND (ref, 1), NULL_TREE);
4229 break;
4231 case BIT_FIELD_REF:
4232 result = build_nt (BIT_FIELD_REF,
4233 stabilize_reference (TREE_OPERAND (ref, 0)),
4234 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4235 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4236 break;
4238 case ARRAY_REF:
4239 result = build_nt (ARRAY_REF,
4240 stabilize_reference (TREE_OPERAND (ref, 0)),
4241 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4242 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4243 break;
4245 case ARRAY_RANGE_REF:
4246 result = build_nt (ARRAY_RANGE_REF,
4247 stabilize_reference (TREE_OPERAND (ref, 0)),
4248 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4249 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4250 break;
4252 case COMPOUND_EXPR:
4253 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4254 it wouldn't be ignored. This matters when dealing with
4255 volatiles. */
4256 return stabilize_reference_1 (ref);
4258 /* If arg isn't a kind of lvalue we recognize, make no change.
4259 Caller should recognize the error for an invalid lvalue. */
4260 default:
4261 return ref;
4263 case ERROR_MARK:
4264 return error_mark_node;
4267 TREE_TYPE (result) = TREE_TYPE (ref);
4268 TREE_READONLY (result) = TREE_READONLY (ref);
4269 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4270 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4272 return result;
4275 /* Low-level constructors for expressions. */
4277 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4278 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4280 void
4281 recompute_tree_invariant_for_addr_expr (tree t)
4283 tree node;
4284 bool tc = true, se = false;
4286 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4288 /* We started out assuming this address is both invariant and constant, but
4289 does not have side effects. Now go down any handled components and see if
4290 any of them involve offsets that are either non-constant or non-invariant.
4291 Also check for side-effects.
4293 ??? Note that this code makes no attempt to deal with the case where
4294 taking the address of something causes a copy due to misalignment. */
4296 #define UPDATE_FLAGS(NODE) \
4297 do { tree _node = (NODE); \
4298 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4299 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4301 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4302 node = TREE_OPERAND (node, 0))
4304 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4305 array reference (probably made temporarily by the G++ front end),
4306 so ignore all the operands. */
4307 if ((TREE_CODE (node) == ARRAY_REF
4308 || TREE_CODE (node) == ARRAY_RANGE_REF)
4309 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4311 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4312 if (TREE_OPERAND (node, 2))
4313 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4314 if (TREE_OPERAND (node, 3))
4315 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4317 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4318 FIELD_DECL, apparently. The G++ front end can put something else
4319 there, at least temporarily. */
4320 else if (TREE_CODE (node) == COMPONENT_REF
4321 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4323 if (TREE_OPERAND (node, 2))
4324 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4328 node = lang_hooks.expr_to_decl (node, &tc, &se);
4330 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4331 the address, since &(*a)->b is a form of addition. If it's a constant, the
4332 address is constant too. If it's a decl, its address is constant if the
4333 decl is static. Everything else is not constant and, furthermore,
4334 taking the address of a volatile variable is not volatile. */
4335 if (TREE_CODE (node) == INDIRECT_REF
4336 || TREE_CODE (node) == MEM_REF)
4337 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4338 else if (CONSTANT_CLASS_P (node))
4340 else if (DECL_P (node))
4341 tc &= (staticp (node) != NULL_TREE);
4342 else
4344 tc = false;
4345 se |= TREE_SIDE_EFFECTS (node);
4349 TREE_CONSTANT (t) = tc;
4350 TREE_SIDE_EFFECTS (t) = se;
4351 #undef UPDATE_FLAGS
4354 /* Build an expression of code CODE, data type TYPE, and operands as
4355 specified. Expressions and reference nodes can be created this way.
4356 Constants, decls, types and misc nodes cannot be.
4358 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4359 enough for all extant tree codes. */
4361 tree
4362 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4364 tree t;
4366 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4368 t = make_node (code PASS_MEM_STAT);
4369 TREE_TYPE (t) = tt;
4371 return t;
4374 tree
4375 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4377 int length = sizeof (struct tree_exp);
4378 tree t;
4380 record_node_allocation_statistics (code, length);
4382 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4384 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4386 memset (t, 0, sizeof (struct tree_common));
4388 TREE_SET_CODE (t, code);
4390 TREE_TYPE (t) = type;
4391 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4392 TREE_OPERAND (t, 0) = node;
4393 if (node && !TYPE_P (node))
4395 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4396 TREE_READONLY (t) = TREE_READONLY (node);
4399 if (TREE_CODE_CLASS (code) == tcc_statement)
4400 TREE_SIDE_EFFECTS (t) = 1;
4401 else switch (code)
4403 case VA_ARG_EXPR:
4404 /* All of these have side-effects, no matter what their
4405 operands are. */
4406 TREE_SIDE_EFFECTS (t) = 1;
4407 TREE_READONLY (t) = 0;
4408 break;
4410 case INDIRECT_REF:
4411 /* Whether a dereference is readonly has nothing to do with whether
4412 its operand is readonly. */
4413 TREE_READONLY (t) = 0;
4414 break;
4416 case ADDR_EXPR:
4417 if (node)
4418 recompute_tree_invariant_for_addr_expr (t);
4419 break;
4421 default:
4422 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4423 && node && !TYPE_P (node)
4424 && TREE_CONSTANT (node))
4425 TREE_CONSTANT (t) = 1;
4426 if (TREE_CODE_CLASS (code) == tcc_reference
4427 && node && TREE_THIS_VOLATILE (node))
4428 TREE_THIS_VOLATILE (t) = 1;
4429 break;
4432 return t;
4435 #define PROCESS_ARG(N) \
4436 do { \
4437 TREE_OPERAND (t, N) = arg##N; \
4438 if (arg##N &&!TYPE_P (arg##N)) \
4440 if (TREE_SIDE_EFFECTS (arg##N)) \
4441 side_effects = 1; \
4442 if (!TREE_READONLY (arg##N) \
4443 && !CONSTANT_CLASS_P (arg##N)) \
4444 (void) (read_only = 0); \
4445 if (!TREE_CONSTANT (arg##N)) \
4446 (void) (constant = 0); \
4448 } while (0)
4450 tree
4451 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4453 bool constant, read_only, side_effects, div_by_zero;
4454 tree t;
4456 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4458 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4459 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4460 /* When sizetype precision doesn't match that of pointers
4461 we need to be able to build explicit extensions or truncations
4462 of the offset argument. */
4463 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4464 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4465 && TREE_CODE (arg1) == INTEGER_CST);
4467 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4468 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4469 && ptrofftype_p (TREE_TYPE (arg1)));
4471 t = make_node (code PASS_MEM_STAT);
4472 TREE_TYPE (t) = tt;
4474 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4475 result based on those same flags for the arguments. But if the
4476 arguments aren't really even `tree' expressions, we shouldn't be trying
4477 to do this. */
4479 /* Expressions without side effects may be constant if their
4480 arguments are as well. */
4481 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4482 || TREE_CODE_CLASS (code) == tcc_binary);
4483 read_only = 1;
4484 side_effects = TREE_SIDE_EFFECTS (t);
4486 switch (code)
4488 case TRUNC_DIV_EXPR:
4489 case CEIL_DIV_EXPR:
4490 case FLOOR_DIV_EXPR:
4491 case ROUND_DIV_EXPR:
4492 case EXACT_DIV_EXPR:
4493 case CEIL_MOD_EXPR:
4494 case FLOOR_MOD_EXPR:
4495 case ROUND_MOD_EXPR:
4496 case TRUNC_MOD_EXPR:
4497 div_by_zero = integer_zerop (arg1);
4498 break;
4499 default:
4500 div_by_zero = false;
4503 PROCESS_ARG (0);
4504 PROCESS_ARG (1);
4506 TREE_SIDE_EFFECTS (t) = side_effects;
4507 if (code == MEM_REF)
4509 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4511 tree o = TREE_OPERAND (arg0, 0);
4512 TREE_READONLY (t) = TREE_READONLY (o);
4513 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4516 else
4518 TREE_READONLY (t) = read_only;
4519 /* Don't mark X / 0 as constant. */
4520 TREE_CONSTANT (t) = constant && !div_by_zero;
4521 TREE_THIS_VOLATILE (t)
4522 = (TREE_CODE_CLASS (code) == tcc_reference
4523 && arg0 && TREE_THIS_VOLATILE (arg0));
4526 return t;
4530 tree
4531 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
4532 tree arg2 MEM_STAT_DECL)
4534 bool constant, read_only, side_effects;
4535 tree t;
4537 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4538 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4540 t = make_node (code PASS_MEM_STAT);
4541 TREE_TYPE (t) = tt;
4543 read_only = 1;
4545 /* As a special exception, if COND_EXPR has NULL branches, we
4546 assume that it is a gimple statement and always consider
4547 it to have side effects. */
4548 if (code == COND_EXPR
4549 && tt == void_type_node
4550 && arg1 == NULL_TREE
4551 && arg2 == NULL_TREE)
4552 side_effects = true;
4553 else
4554 side_effects = TREE_SIDE_EFFECTS (t);
4556 PROCESS_ARG (0);
4557 PROCESS_ARG (1);
4558 PROCESS_ARG (2);
4560 if (code == COND_EXPR)
4561 TREE_READONLY (t) = read_only;
4563 TREE_SIDE_EFFECTS (t) = side_effects;
4564 TREE_THIS_VOLATILE (t)
4565 = (TREE_CODE_CLASS (code) == tcc_reference
4566 && arg0 && TREE_THIS_VOLATILE (arg0));
4568 return t;
4571 tree
4572 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
4573 tree arg2, tree arg3 MEM_STAT_DECL)
4575 bool constant, read_only, side_effects;
4576 tree t;
4578 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4580 t = make_node (code PASS_MEM_STAT);
4581 TREE_TYPE (t) = tt;
4583 side_effects = TREE_SIDE_EFFECTS (t);
4585 PROCESS_ARG (0);
4586 PROCESS_ARG (1);
4587 PROCESS_ARG (2);
4588 PROCESS_ARG (3);
4590 TREE_SIDE_EFFECTS (t) = side_effects;
4591 TREE_THIS_VOLATILE (t)
4592 = (TREE_CODE_CLASS (code) == tcc_reference
4593 && arg0 && TREE_THIS_VOLATILE (arg0));
4595 return t;
4598 tree
4599 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
4600 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4602 bool constant, read_only, side_effects;
4603 tree t;
4605 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4607 t = make_node (code PASS_MEM_STAT);
4608 TREE_TYPE (t) = tt;
4610 side_effects = TREE_SIDE_EFFECTS (t);
4612 PROCESS_ARG (0);
4613 PROCESS_ARG (1);
4614 PROCESS_ARG (2);
4615 PROCESS_ARG (3);
4616 PROCESS_ARG (4);
4618 TREE_SIDE_EFFECTS (t) = side_effects;
4619 if (code == TARGET_MEM_REF)
4621 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4623 tree o = TREE_OPERAND (arg0, 0);
4624 TREE_READONLY (t) = TREE_READONLY (o);
4625 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4628 else
4629 TREE_THIS_VOLATILE (t)
4630 = (TREE_CODE_CLASS (code) == tcc_reference
4631 && arg0 && TREE_THIS_VOLATILE (arg0));
4633 return t;
4636 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4637 on the pointer PTR. */
4639 tree
4640 build_simple_mem_ref_loc (location_t loc, tree ptr)
4642 HOST_WIDE_INT offset = 0;
4643 tree ptype = TREE_TYPE (ptr);
4644 tree tem;
4645 /* For convenience allow addresses that collapse to a simple base
4646 and offset. */
4647 if (TREE_CODE (ptr) == ADDR_EXPR
4648 && (handled_component_p (TREE_OPERAND (ptr, 0))
4649 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4651 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4652 gcc_assert (ptr);
4653 if (TREE_CODE (ptr) == MEM_REF)
4655 offset += mem_ref_offset (ptr).to_short_addr ();
4656 ptr = TREE_OPERAND (ptr, 0);
4658 else
4659 ptr = build_fold_addr_expr (ptr);
4660 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4662 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4663 ptr, build_int_cst (ptype, offset));
4664 SET_EXPR_LOCATION (tem, loc);
4665 return tem;
4668 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4670 offset_int
4671 mem_ref_offset (const_tree t)
4673 return offset_int::from (wi::to_wide (TREE_OPERAND (t, 1)), SIGNED);
4676 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4677 offsetted by OFFSET units. */
4679 tree
4680 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4682 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4683 build_fold_addr_expr (base),
4684 build_int_cst (ptr_type_node, offset));
4685 tree addr = build1 (ADDR_EXPR, type, ref);
4686 recompute_tree_invariant_for_addr_expr (addr);
4687 return addr;
4690 /* Similar except don't specify the TREE_TYPE
4691 and leave the TREE_SIDE_EFFECTS as 0.
4692 It is permissible for arguments to be null,
4693 or even garbage if their values do not matter. */
4695 tree
4696 build_nt (enum tree_code code, ...)
4698 tree t;
4699 int length;
4700 int i;
4701 va_list p;
4703 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4705 va_start (p, code);
4707 t = make_node (code);
4708 length = TREE_CODE_LENGTH (code);
4710 for (i = 0; i < length; i++)
4711 TREE_OPERAND (t, i) = va_arg (p, tree);
4713 va_end (p);
4714 return t;
4717 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4718 tree vec. */
4720 tree
4721 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4723 tree ret, t;
4724 unsigned int ix;
4726 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4727 CALL_EXPR_FN (ret) = fn;
4728 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4729 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4730 CALL_EXPR_ARG (ret, ix) = t;
4731 return ret;
4734 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4735 We do NOT enter this node in any sort of symbol table.
4737 LOC is the location of the decl.
4739 layout_decl is used to set up the decl's storage layout.
4740 Other slots are initialized to 0 or null pointers. */
4742 tree
4743 build_decl (location_t loc, enum tree_code code, tree name,
4744 tree type MEM_STAT_DECL)
4746 tree t;
4748 t = make_node (code PASS_MEM_STAT);
4749 DECL_SOURCE_LOCATION (t) = loc;
4751 /* if (type == error_mark_node)
4752 type = integer_type_node; */
4753 /* That is not done, deliberately, so that having error_mark_node
4754 as the type can suppress useless errors in the use of this variable. */
4756 DECL_NAME (t) = name;
4757 TREE_TYPE (t) = type;
4759 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4760 layout_decl (t, 0);
4762 return t;
4765 /* Builds and returns function declaration with NAME and TYPE. */
4767 tree
4768 build_fn_decl (const char *name, tree type)
4770 tree id = get_identifier (name);
4771 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4773 DECL_EXTERNAL (decl) = 1;
4774 TREE_PUBLIC (decl) = 1;
4775 DECL_ARTIFICIAL (decl) = 1;
4776 TREE_NOTHROW (decl) = 1;
4778 return decl;
4781 vec<tree, va_gc> *all_translation_units;
4783 /* Builds a new translation-unit decl with name NAME, queues it in the
4784 global list of translation-unit decls and returns it. */
4786 tree
4787 build_translation_unit_decl (tree name)
4789 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4790 name, NULL_TREE);
4791 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4792 vec_safe_push (all_translation_units, tu);
4793 return tu;
4797 /* BLOCK nodes are used to represent the structure of binding contours
4798 and declarations, once those contours have been exited and their contents
4799 compiled. This information is used for outputting debugging info. */
4801 tree
4802 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4804 tree block = make_node (BLOCK);
4806 BLOCK_VARS (block) = vars;
4807 BLOCK_SUBBLOCKS (block) = subblocks;
4808 BLOCK_SUPERCONTEXT (block) = supercontext;
4809 BLOCK_CHAIN (block) = chain;
4810 return block;
4814 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4816 LOC is the location to use in tree T. */
4818 void
4819 protected_set_expr_location (tree t, location_t loc)
4821 if (CAN_HAVE_LOCATION_P (t))
4822 SET_EXPR_LOCATION (t, loc);
4825 /* Reset the expression *EXPR_P, a size or position.
4827 ??? We could reset all non-constant sizes or positions. But it's cheap
4828 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4830 We need to reset self-referential sizes or positions because they cannot
4831 be gimplified and thus can contain a CALL_EXPR after the gimplification
4832 is finished, which will run afoul of LTO streaming. And they need to be
4833 reset to something essentially dummy but not constant, so as to preserve
4834 the properties of the object they are attached to. */
4836 static inline void
4837 free_lang_data_in_one_sizepos (tree *expr_p)
4839 tree expr = *expr_p;
4840 if (CONTAINS_PLACEHOLDER_P (expr))
4841 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4845 /* Reset all the fields in a binfo node BINFO. We only keep
4846 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4848 static void
4849 free_lang_data_in_binfo (tree binfo)
4851 unsigned i;
4852 tree t;
4854 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4856 BINFO_VIRTUALS (binfo) = NULL_TREE;
4857 BINFO_BASE_ACCESSES (binfo) = NULL;
4858 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4859 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4861 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4862 free_lang_data_in_binfo (t);
4866 /* Reset all language specific information still present in TYPE. */
4868 static void
4869 free_lang_data_in_type (tree type)
4871 gcc_assert (TYPE_P (type));
4873 /* Give the FE a chance to remove its own data first. */
4874 lang_hooks.free_lang_data (type);
4876 TREE_LANG_FLAG_0 (type) = 0;
4877 TREE_LANG_FLAG_1 (type) = 0;
4878 TREE_LANG_FLAG_2 (type) = 0;
4879 TREE_LANG_FLAG_3 (type) = 0;
4880 TREE_LANG_FLAG_4 (type) = 0;
4881 TREE_LANG_FLAG_5 (type) = 0;
4882 TREE_LANG_FLAG_6 (type) = 0;
4884 if (TREE_CODE (type) == FUNCTION_TYPE)
4886 /* Remove the const and volatile qualifiers from arguments. The
4887 C++ front end removes them, but the C front end does not,
4888 leading to false ODR violation errors when merging two
4889 instances of the same function signature compiled by
4890 different front ends. */
4891 for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4893 tree arg_type = TREE_VALUE (p);
4895 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
4897 int quals = TYPE_QUALS (arg_type)
4898 & ~TYPE_QUAL_CONST
4899 & ~TYPE_QUAL_VOLATILE;
4900 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
4901 free_lang_data_in_type (TREE_VALUE (p));
4903 /* C++ FE uses TREE_PURPOSE to store initial values. */
4904 TREE_PURPOSE (p) = NULL;
4907 else if (TREE_CODE (type) == METHOD_TYPE)
4908 for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4909 /* C++ FE uses TREE_PURPOSE to store initial values. */
4910 TREE_PURPOSE (p) = NULL;
4911 else if (RECORD_OR_UNION_TYPE_P (type))
4913 /* Remove members that are not FIELD_DECLs (and maybe
4914 TYPE_DECLs) from the field list of an aggregate. These occur
4915 in C++. */
4916 for (tree *prev = &TYPE_FIELDS (type), member; (member = *prev);)
4917 if (TREE_CODE (member) == FIELD_DECL
4918 || (TREE_CODE (member) == TYPE_DECL
4919 && !DECL_IGNORED_P (member)
4920 && debug_info_level > DINFO_LEVEL_TERSE
4921 && !is_redundant_typedef (member)))
4922 prev = &DECL_CHAIN (member);
4923 else
4924 *prev = DECL_CHAIN (member);
4926 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
4927 and danagle the pointer from time to time. */
4928 if (TYPE_VFIELD (type) && TREE_CODE (TYPE_VFIELD (type)) != FIELD_DECL)
4929 TYPE_VFIELD (type) = NULL_TREE;
4931 if (TYPE_BINFO (type))
4933 free_lang_data_in_binfo (TYPE_BINFO (type));
4934 /* We need to preserve link to bases and virtual table for all
4935 polymorphic types to make devirtualization machinery working.
4936 Debug output cares only about bases, but output also
4937 virtual table pointers so merging of -fdevirtualize and
4938 -fno-devirtualize units is easier. */
4939 if ((!BINFO_VTABLE (TYPE_BINFO (type))
4940 || !flag_devirtualize)
4941 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
4942 && !BINFO_VTABLE (TYPE_BINFO (type)))
4943 || debug_info_level != DINFO_LEVEL_NONE))
4944 TYPE_BINFO (type) = NULL;
4947 else if (INTEGRAL_TYPE_P (type)
4948 || SCALAR_FLOAT_TYPE_P (type)
4949 || FIXED_POINT_TYPE_P (type))
4951 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
4952 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
4955 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
4957 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
4958 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
4960 if (TYPE_CONTEXT (type)
4961 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
4963 tree ctx = TYPE_CONTEXT (type);
4966 ctx = BLOCK_SUPERCONTEXT (ctx);
4968 while (ctx && TREE_CODE (ctx) == BLOCK);
4969 TYPE_CONTEXT (type) = ctx;
4974 /* Return true if DECL may need an assembler name to be set. */
4976 static inline bool
4977 need_assembler_name_p (tree decl)
4979 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
4980 Rule merging. This makes type_odr_p to return true on those types during
4981 LTO and by comparing the mangled name, we can say what types are intended
4982 to be equivalent across compilation unit.
4984 We do not store names of type_in_anonymous_namespace_p.
4986 Record, union and enumeration type have linkage that allows use
4987 to check type_in_anonymous_namespace_p. We do not mangle compound types
4988 that always can be compared structurally.
4990 Similarly for builtin types, we compare properties of their main variant.
4991 A special case are integer types where mangling do make differences
4992 between char/signed char/unsigned char etc. Storing name for these makes
4993 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
4994 See cp/mangle.c:write_builtin_type for details. */
4996 if (flag_lto_odr_type_mering
4997 && TREE_CODE (decl) == TYPE_DECL
4998 && DECL_NAME (decl)
4999 && decl == TYPE_NAME (TREE_TYPE (decl))
5000 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
5001 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5002 && (type_with_linkage_p (TREE_TYPE (decl))
5003 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5004 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5005 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5006 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5007 if (!VAR_OR_FUNCTION_DECL_P (decl))
5008 return false;
5010 /* If DECL already has its assembler name set, it does not need a
5011 new one. */
5012 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5013 || DECL_ASSEMBLER_NAME_SET_P (decl))
5014 return false;
5016 /* Abstract decls do not need an assembler name. */
5017 if (DECL_ABSTRACT_P (decl))
5018 return false;
5020 /* For VAR_DECLs, only static, public and external symbols need an
5021 assembler name. */
5022 if (VAR_P (decl)
5023 && !TREE_STATIC (decl)
5024 && !TREE_PUBLIC (decl)
5025 && !DECL_EXTERNAL (decl))
5026 return false;
5028 if (TREE_CODE (decl) == FUNCTION_DECL)
5030 /* Do not set assembler name on builtins. Allow RTL expansion to
5031 decide whether to expand inline or via a regular call. */
5032 if (DECL_BUILT_IN (decl)
5033 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5034 return false;
5036 /* Functions represented in the callgraph need an assembler name. */
5037 if (cgraph_node::get (decl) != NULL)
5038 return true;
5040 /* Unused and not public functions don't need an assembler name. */
5041 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5042 return false;
5045 return true;
5049 /* Reset all language specific information still present in symbol
5050 DECL. */
5052 static void
5053 free_lang_data_in_decl (tree decl)
5055 gcc_assert (DECL_P (decl));
5057 /* Give the FE a chance to remove its own data first. */
5058 lang_hooks.free_lang_data (decl);
5060 TREE_LANG_FLAG_0 (decl) = 0;
5061 TREE_LANG_FLAG_1 (decl) = 0;
5062 TREE_LANG_FLAG_2 (decl) = 0;
5063 TREE_LANG_FLAG_3 (decl) = 0;
5064 TREE_LANG_FLAG_4 (decl) = 0;
5065 TREE_LANG_FLAG_5 (decl) = 0;
5066 TREE_LANG_FLAG_6 (decl) = 0;
5068 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5069 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5070 if (TREE_CODE (decl) == FIELD_DECL)
5072 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5073 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5074 DECL_QUALIFIER (decl) = NULL_TREE;
5077 if (TREE_CODE (decl) == FUNCTION_DECL)
5079 struct cgraph_node *node;
5080 if (!(node = cgraph_node::get (decl))
5081 || (!node->definition && !node->clones))
5083 if (node)
5084 node->release_body ();
5085 else
5087 release_function_body (decl);
5088 DECL_ARGUMENTS (decl) = NULL;
5089 DECL_RESULT (decl) = NULL;
5090 DECL_INITIAL (decl) = error_mark_node;
5093 if (gimple_has_body_p (decl) || (node && node->thunk.thunk_p))
5095 tree t;
5097 /* If DECL has a gimple body, then the context for its
5098 arguments must be DECL. Otherwise, it doesn't really
5099 matter, as we will not be emitting any code for DECL. In
5100 general, there may be other instances of DECL created by
5101 the front end and since PARM_DECLs are generally shared,
5102 their DECL_CONTEXT changes as the replicas of DECL are
5103 created. The only time where DECL_CONTEXT is important
5104 is for the FUNCTION_DECLs that have a gimple body (since
5105 the PARM_DECL will be used in the function's body). */
5106 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5107 DECL_CONTEXT (t) = decl;
5108 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5109 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5110 = target_option_default_node;
5111 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5112 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5113 = optimization_default_node;
5116 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5117 At this point, it is not needed anymore. */
5118 DECL_SAVED_TREE (decl) = NULL_TREE;
5120 /* Clear the abstract origin if it refers to a method.
5121 Otherwise dwarf2out.c will ICE as we splice functions out of
5122 TYPE_FIELDS and thus the origin will not be output
5123 correctly. */
5124 if (DECL_ABSTRACT_ORIGIN (decl)
5125 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5126 && RECORD_OR_UNION_TYPE_P
5127 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5128 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5130 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5131 DECL_VINDEX referring to itself into a vtable slot number as it
5132 should. Happens with functions that are copied and then forgotten
5133 about. Just clear it, it won't matter anymore. */
5134 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5135 DECL_VINDEX (decl) = NULL_TREE;
5137 else if (VAR_P (decl))
5139 if ((DECL_EXTERNAL (decl)
5140 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5141 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5142 DECL_INITIAL (decl) = NULL_TREE;
5144 else if (TREE_CODE (decl) == TYPE_DECL)
5146 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5147 DECL_VISIBILITY_SPECIFIED (decl) = 0;
5148 DECL_INITIAL (decl) = NULL_TREE;
5150 else if (TREE_CODE (decl) == FIELD_DECL)
5151 DECL_INITIAL (decl) = NULL_TREE;
5152 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5153 && DECL_INITIAL (decl)
5154 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5156 /* Strip builtins from the translation-unit BLOCK. We still have targets
5157 without builtin_decl_explicit support and also builtins are shared
5158 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5159 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5160 while (*nextp)
5162 tree var = *nextp;
5163 if (TREE_CODE (var) == FUNCTION_DECL
5164 && DECL_BUILT_IN (var))
5165 *nextp = TREE_CHAIN (var);
5166 else
5167 nextp = &TREE_CHAIN (var);
5173 /* Data used when collecting DECLs and TYPEs for language data removal. */
5175 struct free_lang_data_d
5177 free_lang_data_d () : decls (100), types (100) {}
5179 /* Worklist to avoid excessive recursion. */
5180 auto_vec<tree> worklist;
5182 /* Set of traversed objects. Used to avoid duplicate visits. */
5183 hash_set<tree> pset;
5185 /* Array of symbols to process with free_lang_data_in_decl. */
5186 auto_vec<tree> decls;
5188 /* Array of types to process with free_lang_data_in_type. */
5189 auto_vec<tree> types;
5193 /* Save all language fields needed to generate proper debug information
5194 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5196 static void
5197 save_debug_info_for_decl (tree t)
5199 /*struct saved_debug_info_d *sdi;*/
5201 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5203 /* FIXME. Partial implementation for saving debug info removed. */
5207 /* Save all language fields needed to generate proper debug information
5208 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5210 static void
5211 save_debug_info_for_type (tree t)
5213 /*struct saved_debug_info_d *sdi;*/
5215 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5217 /* FIXME. Partial implementation for saving debug info removed. */
5221 /* Add type or decl T to one of the list of tree nodes that need their
5222 language data removed. The lists are held inside FLD. */
5224 static void
5225 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5227 if (DECL_P (t))
5229 fld->decls.safe_push (t);
5230 if (debug_info_level > DINFO_LEVEL_TERSE)
5231 save_debug_info_for_decl (t);
5233 else if (TYPE_P (t))
5235 fld->types.safe_push (t);
5236 if (debug_info_level > DINFO_LEVEL_TERSE)
5237 save_debug_info_for_type (t);
5239 else
5240 gcc_unreachable ();
5243 /* Push tree node T into FLD->WORKLIST. */
5245 static inline void
5246 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5248 if (t && !is_lang_specific (t) && !fld->pset.contains (t))
5249 fld->worklist.safe_push ((t));
5253 /* Operand callback helper for free_lang_data_in_node. *TP is the
5254 subtree operand being considered. */
5256 static tree
5257 find_decls_types_r (tree *tp, int *ws, void *data)
5259 tree t = *tp;
5260 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5262 if (TREE_CODE (t) == TREE_LIST)
5263 return NULL_TREE;
5265 /* Language specific nodes will be removed, so there is no need
5266 to gather anything under them. */
5267 if (is_lang_specific (t))
5269 *ws = 0;
5270 return NULL_TREE;
5273 if (DECL_P (t))
5275 /* Note that walk_tree does not traverse every possible field in
5276 decls, so we have to do our own traversals here. */
5277 add_tree_to_fld_list (t, fld);
5279 fld_worklist_push (DECL_NAME (t), fld);
5280 fld_worklist_push (DECL_CONTEXT (t), fld);
5281 fld_worklist_push (DECL_SIZE (t), fld);
5282 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5284 /* We are going to remove everything under DECL_INITIAL for
5285 TYPE_DECLs. No point walking them. */
5286 if (TREE_CODE (t) != TYPE_DECL)
5287 fld_worklist_push (DECL_INITIAL (t), fld);
5289 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5290 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5292 if (TREE_CODE (t) == FUNCTION_DECL)
5294 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5295 fld_worklist_push (DECL_RESULT (t), fld);
5297 else if (TREE_CODE (t) == TYPE_DECL)
5299 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5301 else if (TREE_CODE (t) == FIELD_DECL)
5303 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5304 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5305 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5306 fld_worklist_push (DECL_FCONTEXT (t), fld);
5309 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL)
5310 && DECL_HAS_VALUE_EXPR_P (t))
5311 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5313 if (TREE_CODE (t) != FIELD_DECL
5314 && TREE_CODE (t) != TYPE_DECL)
5315 fld_worklist_push (TREE_CHAIN (t), fld);
5316 *ws = 0;
5318 else if (TYPE_P (t))
5320 /* Note that walk_tree does not traverse every possible field in
5321 types, so we have to do our own traversals here. */
5322 add_tree_to_fld_list (t, fld);
5324 if (!RECORD_OR_UNION_TYPE_P (t))
5325 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5326 fld_worklist_push (TYPE_SIZE (t), fld);
5327 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5328 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5329 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5330 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5331 fld_worklist_push (TYPE_NAME (t), fld);
5332 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5333 them and thus do not and want not to reach unused pointer types
5334 this way. */
5335 if (!POINTER_TYPE_P (t))
5336 fld_worklist_push (TYPE_MIN_VALUE_RAW (t), fld);
5337 /* TYPE_MAX_VALUE_RAW is TYPE_BINFO for record types. */
5338 if (!RECORD_OR_UNION_TYPE_P (t))
5339 fld_worklist_push (TYPE_MAX_VALUE_RAW (t), fld);
5340 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5341 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5342 do not and want not to reach unused variants this way. */
5343 if (TYPE_CONTEXT (t))
5345 tree ctx = TYPE_CONTEXT (t);
5346 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5347 So push that instead. */
5348 while (ctx && TREE_CODE (ctx) == BLOCK)
5349 ctx = BLOCK_SUPERCONTEXT (ctx);
5350 fld_worklist_push (ctx, fld);
5352 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5353 and want not to reach unused types this way. */
5355 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5357 unsigned i;
5358 tree tem;
5359 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5360 fld_worklist_push (TREE_TYPE (tem), fld);
5361 fld_worklist_push (BINFO_VIRTUALS (TYPE_BINFO (t)), fld);
5363 if (RECORD_OR_UNION_TYPE_P (t))
5365 tree tem;
5366 /* Push all TYPE_FIELDS - there can be interleaving interesting
5367 and non-interesting things. */
5368 tem = TYPE_FIELDS (t);
5369 while (tem)
5371 if (TREE_CODE (tem) == FIELD_DECL
5372 || (TREE_CODE (tem) == TYPE_DECL
5373 && !DECL_IGNORED_P (tem)
5374 && debug_info_level > DINFO_LEVEL_TERSE
5375 && !is_redundant_typedef (tem)))
5376 fld_worklist_push (tem, fld);
5377 tem = TREE_CHAIN (tem);
5381 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5382 *ws = 0;
5384 else if (TREE_CODE (t) == BLOCK)
5386 tree tem;
5387 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5388 fld_worklist_push (tem, fld);
5389 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5390 fld_worklist_push (tem, fld);
5391 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5394 if (TREE_CODE (t) != IDENTIFIER_NODE
5395 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5396 fld_worklist_push (TREE_TYPE (t), fld);
5398 return NULL_TREE;
5402 /* Find decls and types in T. */
5404 static void
5405 find_decls_types (tree t, struct free_lang_data_d *fld)
5407 while (1)
5409 if (!fld->pset.contains (t))
5410 walk_tree (&t, find_decls_types_r, fld, &fld->pset);
5411 if (fld->worklist.is_empty ())
5412 break;
5413 t = fld->worklist.pop ();
5417 /* Translate all the types in LIST with the corresponding runtime
5418 types. */
5420 static tree
5421 get_eh_types_for_runtime (tree list)
5423 tree head, prev;
5425 if (list == NULL_TREE)
5426 return NULL_TREE;
5428 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5429 prev = head;
5430 list = TREE_CHAIN (list);
5431 while (list)
5433 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5434 TREE_CHAIN (prev) = n;
5435 prev = TREE_CHAIN (prev);
5436 list = TREE_CHAIN (list);
5439 return head;
5443 /* Find decls and types referenced in EH region R and store them in
5444 FLD->DECLS and FLD->TYPES. */
5446 static void
5447 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5449 switch (r->type)
5451 case ERT_CLEANUP:
5452 break;
5454 case ERT_TRY:
5456 eh_catch c;
5458 /* The types referenced in each catch must first be changed to the
5459 EH types used at runtime. This removes references to FE types
5460 in the region. */
5461 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5463 c->type_list = get_eh_types_for_runtime (c->type_list);
5464 walk_tree (&c->type_list, find_decls_types_r, fld, &fld->pset);
5467 break;
5469 case ERT_ALLOWED_EXCEPTIONS:
5470 r->u.allowed.type_list
5471 = get_eh_types_for_runtime (r->u.allowed.type_list);
5472 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, &fld->pset);
5473 break;
5475 case ERT_MUST_NOT_THROW:
5476 walk_tree (&r->u.must_not_throw.failure_decl,
5477 find_decls_types_r, fld, &fld->pset);
5478 break;
5483 /* Find decls and types referenced in cgraph node N and store them in
5484 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5485 look for *every* kind of DECL and TYPE node reachable from N,
5486 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5487 NAMESPACE_DECLs, etc). */
5489 static void
5490 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5492 basic_block bb;
5493 struct function *fn;
5494 unsigned ix;
5495 tree t;
5497 find_decls_types (n->decl, fld);
5499 if (!gimple_has_body_p (n->decl))
5500 return;
5502 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5504 fn = DECL_STRUCT_FUNCTION (n->decl);
5506 /* Traverse locals. */
5507 FOR_EACH_LOCAL_DECL (fn, ix, t)
5508 find_decls_types (t, fld);
5510 /* Traverse EH regions in FN. */
5512 eh_region r;
5513 FOR_ALL_EH_REGION_FN (r, fn)
5514 find_decls_types_in_eh_region (r, fld);
5517 /* Traverse every statement in FN. */
5518 FOR_EACH_BB_FN (bb, fn)
5520 gphi_iterator psi;
5521 gimple_stmt_iterator si;
5522 unsigned i;
5524 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5526 gphi *phi = psi.phi ();
5528 for (i = 0; i < gimple_phi_num_args (phi); i++)
5530 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5531 find_decls_types (*arg_p, fld);
5535 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5537 gimple *stmt = gsi_stmt (si);
5539 if (is_gimple_call (stmt))
5540 find_decls_types (gimple_call_fntype (stmt), fld);
5542 for (i = 0; i < gimple_num_ops (stmt); i++)
5544 tree arg = gimple_op (stmt, i);
5545 find_decls_types (arg, fld);
5552 /* Find decls and types referenced in varpool node N and store them in
5553 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5554 look for *every* kind of DECL and TYPE node reachable from N,
5555 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5556 NAMESPACE_DECLs, etc). */
5558 static void
5559 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5561 find_decls_types (v->decl, fld);
5564 /* If T needs an assembler name, have one created for it. */
5566 void
5567 assign_assembler_name_if_needed (tree t)
5569 if (need_assembler_name_p (t))
5571 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5572 diagnostics that use input_location to show locus
5573 information. The problem here is that, at this point,
5574 input_location is generally anchored to the end of the file
5575 (since the parser is long gone), so we don't have a good
5576 position to pin it to.
5578 To alleviate this problem, this uses the location of T's
5579 declaration. Examples of this are
5580 testsuite/g++.dg/template/cond2.C and
5581 testsuite/g++.dg/template/pr35240.C. */
5582 location_t saved_location = input_location;
5583 input_location = DECL_SOURCE_LOCATION (t);
5585 decl_assembler_name (t);
5587 input_location = saved_location;
5592 /* Free language specific information for every operand and expression
5593 in every node of the call graph. This process operates in three stages:
5595 1- Every callgraph node and varpool node is traversed looking for
5596 decls and types embedded in them. This is a more exhaustive
5597 search than that done by find_referenced_vars, because it will
5598 also collect individual fields, decls embedded in types, etc.
5600 2- All the decls found are sent to free_lang_data_in_decl.
5602 3- All the types found are sent to free_lang_data_in_type.
5604 The ordering between decls and types is important because
5605 free_lang_data_in_decl sets assembler names, which includes
5606 mangling. So types cannot be freed up until assembler names have
5607 been set up. */
5609 static void
5610 free_lang_data_in_cgraph (void)
5612 struct cgraph_node *n;
5613 varpool_node *v;
5614 struct free_lang_data_d fld;
5615 tree t;
5616 unsigned i;
5617 alias_pair *p;
5619 /* Find decls and types in the body of every function in the callgraph. */
5620 FOR_EACH_FUNCTION (n)
5621 find_decls_types_in_node (n, &fld);
5623 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5624 find_decls_types (p->decl, &fld);
5626 /* Find decls and types in every varpool symbol. */
5627 FOR_EACH_VARIABLE (v)
5628 find_decls_types_in_var (v, &fld);
5630 /* Set the assembler name on every decl found. We need to do this
5631 now because free_lang_data_in_decl will invalidate data needed
5632 for mangling. This breaks mangling on interdependent decls. */
5633 FOR_EACH_VEC_ELT (fld.decls, i, t)
5634 assign_assembler_name_if_needed (t);
5636 /* Traverse every decl found freeing its language data. */
5637 FOR_EACH_VEC_ELT (fld.decls, i, t)
5638 free_lang_data_in_decl (t);
5640 /* Traverse every type found freeing its language data. */
5641 FOR_EACH_VEC_ELT (fld.types, i, t)
5642 free_lang_data_in_type (t);
5643 if (flag_checking)
5645 FOR_EACH_VEC_ELT (fld.types, i, t)
5646 verify_type (t);
5651 /* Free resources that are used by FE but are not needed once they are done. */
5653 static unsigned
5654 free_lang_data (void)
5656 unsigned i;
5658 /* If we are the LTO frontend we have freed lang-specific data already. */
5659 if (in_lto_p
5660 || (!flag_generate_lto && !flag_generate_offload))
5661 return 0;
5663 /* Provide a dummy TRANSLATION_UNIT_DECL if the FE failed to provide one. */
5664 if (vec_safe_is_empty (all_translation_units))
5665 build_translation_unit_decl (NULL_TREE);
5667 /* Allocate and assign alias sets to the standard integer types
5668 while the slots are still in the way the frontends generated them. */
5669 for (i = 0; i < itk_none; ++i)
5670 if (integer_types[i])
5671 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5673 /* Traverse the IL resetting language specific information for
5674 operands, expressions, etc. */
5675 free_lang_data_in_cgraph ();
5677 /* Create gimple variants for common types. */
5678 for (unsigned i = 0;
5679 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
5680 ++i)
5681 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
5683 /* Reset some langhooks. Do not reset types_compatible_p, it may
5684 still be used indirectly via the get_alias_set langhook. */
5685 lang_hooks.dwarf_name = lhd_dwarf_name;
5686 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5687 lang_hooks.gimplify_expr = lhd_gimplify_expr;
5689 /* We do not want the default decl_assembler_name implementation,
5690 rather if we have fixed everything we want a wrapper around it
5691 asserting that all non-local symbols already got their assembler
5692 name and only produce assembler names for local symbols. Or rather
5693 make sure we never call decl_assembler_name on local symbols and
5694 devise a separate, middle-end private scheme for it. */
5696 /* Reset diagnostic machinery. */
5697 tree_diagnostics_defaults (global_dc);
5699 return 0;
5703 namespace {
5705 const pass_data pass_data_ipa_free_lang_data =
5707 SIMPLE_IPA_PASS, /* type */
5708 "*free_lang_data", /* name */
5709 OPTGROUP_NONE, /* optinfo_flags */
5710 TV_IPA_FREE_LANG_DATA, /* tv_id */
5711 0, /* properties_required */
5712 0, /* properties_provided */
5713 0, /* properties_destroyed */
5714 0, /* todo_flags_start */
5715 0, /* todo_flags_finish */
5718 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5720 public:
5721 pass_ipa_free_lang_data (gcc::context *ctxt)
5722 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5725 /* opt_pass methods: */
5726 virtual unsigned int execute (function *) { return free_lang_data (); }
5728 }; // class pass_ipa_free_lang_data
5730 } // anon namespace
5732 simple_ipa_opt_pass *
5733 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5735 return new pass_ipa_free_lang_data (ctxt);
5738 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5739 of the various TYPE_QUAL values. */
5741 static void
5742 set_type_quals (tree type, int type_quals)
5744 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
5745 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
5746 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
5747 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
5748 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
5751 /* Returns true iff CAND and BASE have equivalent language-specific
5752 qualifiers. */
5754 bool
5755 check_lang_type (const_tree cand, const_tree base)
5757 if (lang_hooks.types.type_hash_eq == NULL)
5758 return true;
5759 /* type_hash_eq currently only applies to these types. */
5760 if (TREE_CODE (cand) != FUNCTION_TYPE
5761 && TREE_CODE (cand) != METHOD_TYPE)
5762 return true;
5763 return lang_hooks.types.type_hash_eq (cand, base);
5766 /* Returns true iff unqualified CAND and BASE are equivalent. */
5768 bool
5769 check_base_type (const_tree cand, const_tree base)
5771 return (TYPE_NAME (cand) == TYPE_NAME (base)
5772 /* Apparently this is needed for Objective-C. */
5773 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5774 /* Check alignment. */
5775 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
5776 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5777 TYPE_ATTRIBUTES (base)));
5780 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5782 bool
5783 check_qualified_type (const_tree cand, const_tree base, int type_quals)
5785 return (TYPE_QUALS (cand) == type_quals
5786 && check_base_type (cand, base)
5787 && check_lang_type (cand, base));
5790 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
5792 static bool
5793 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
5795 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
5796 && TYPE_NAME (cand) == TYPE_NAME (base)
5797 /* Apparently this is needed for Objective-C. */
5798 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5799 /* Check alignment. */
5800 && TYPE_ALIGN (cand) == align
5801 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5802 TYPE_ATTRIBUTES (base))
5803 && check_lang_type (cand, base));
5806 /* This function checks to see if TYPE matches the size one of the built-in
5807 atomic types, and returns that core atomic type. */
5809 static tree
5810 find_atomic_core_type (tree type)
5812 tree base_atomic_type;
5814 /* Only handle complete types. */
5815 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
5816 return NULL_TREE;
5818 switch (tree_to_uhwi (TYPE_SIZE (type)))
5820 case 8:
5821 base_atomic_type = atomicQI_type_node;
5822 break;
5824 case 16:
5825 base_atomic_type = atomicHI_type_node;
5826 break;
5828 case 32:
5829 base_atomic_type = atomicSI_type_node;
5830 break;
5832 case 64:
5833 base_atomic_type = atomicDI_type_node;
5834 break;
5836 case 128:
5837 base_atomic_type = atomicTI_type_node;
5838 break;
5840 default:
5841 base_atomic_type = NULL_TREE;
5844 return base_atomic_type;
5847 /* Return a version of the TYPE, qualified as indicated by the
5848 TYPE_QUALS, if one exists. If no qualified version exists yet,
5849 return NULL_TREE. */
5851 tree
5852 get_qualified_type (tree type, int type_quals)
5854 tree t;
5856 if (TYPE_QUALS (type) == type_quals)
5857 return type;
5859 /* Search the chain of variants to see if there is already one there just
5860 like the one we need to have. If so, use that existing one. We must
5861 preserve the TYPE_NAME, since there is code that depends on this. */
5862 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5863 if (check_qualified_type (t, type, type_quals))
5864 return t;
5866 return NULL_TREE;
5869 /* Like get_qualified_type, but creates the type if it does not
5870 exist. This function never returns NULL_TREE. */
5872 tree
5873 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
5875 tree t;
5877 /* See if we already have the appropriate qualified variant. */
5878 t = get_qualified_type (type, type_quals);
5880 /* If not, build it. */
5881 if (!t)
5883 t = build_variant_type_copy (type PASS_MEM_STAT);
5884 set_type_quals (t, type_quals);
5886 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
5888 /* See if this object can map to a basic atomic type. */
5889 tree atomic_type = find_atomic_core_type (type);
5890 if (atomic_type)
5892 /* Ensure the alignment of this type is compatible with
5893 the required alignment of the atomic type. */
5894 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
5895 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
5899 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5900 /* Propagate structural equality. */
5901 SET_TYPE_STRUCTURAL_EQUALITY (t);
5902 else if (TYPE_CANONICAL (type) != type)
5903 /* Build the underlying canonical type, since it is different
5904 from TYPE. */
5906 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
5907 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
5909 else
5910 /* T is its own canonical type. */
5911 TYPE_CANONICAL (t) = t;
5915 return t;
5918 /* Create a variant of type T with alignment ALIGN. */
5920 tree
5921 build_aligned_type (tree type, unsigned int align)
5923 tree t;
5925 if (TYPE_PACKED (type)
5926 || TYPE_ALIGN (type) == align)
5927 return type;
5929 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5930 if (check_aligned_type (t, type, align))
5931 return t;
5933 t = build_variant_type_copy (type);
5934 SET_TYPE_ALIGN (t, align);
5935 TYPE_USER_ALIGN (t) = 1;
5937 return t;
5940 /* Create a new distinct copy of TYPE. The new type is made its own
5941 MAIN_VARIANT. If TYPE requires structural equality checks, the
5942 resulting type requires structural equality checks; otherwise, its
5943 TYPE_CANONICAL points to itself. */
5945 tree
5946 build_distinct_type_copy (tree type MEM_STAT_DECL)
5948 tree t = copy_node (type PASS_MEM_STAT);
5950 TYPE_POINTER_TO (t) = 0;
5951 TYPE_REFERENCE_TO (t) = 0;
5953 /* Set the canonical type either to a new equivalence class, or
5954 propagate the need for structural equality checks. */
5955 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5956 SET_TYPE_STRUCTURAL_EQUALITY (t);
5957 else
5958 TYPE_CANONICAL (t) = t;
5960 /* Make it its own variant. */
5961 TYPE_MAIN_VARIANT (t) = t;
5962 TYPE_NEXT_VARIANT (t) = 0;
5964 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
5965 whose TREE_TYPE is not t. This can also happen in the Ada
5966 frontend when using subtypes. */
5968 return t;
5971 /* Create a new variant of TYPE, equivalent but distinct. This is so
5972 the caller can modify it. TYPE_CANONICAL for the return type will
5973 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
5974 are considered equal by the language itself (or that both types
5975 require structural equality checks). */
5977 tree
5978 build_variant_type_copy (tree type MEM_STAT_DECL)
5980 tree t, m = TYPE_MAIN_VARIANT (type);
5982 t = build_distinct_type_copy (type PASS_MEM_STAT);
5984 /* Since we're building a variant, assume that it is a non-semantic
5985 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
5986 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
5987 /* Type variants have no alias set defined. */
5988 TYPE_ALIAS_SET (t) = -1;
5990 /* Add the new type to the chain of variants of TYPE. */
5991 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
5992 TYPE_NEXT_VARIANT (m) = t;
5993 TYPE_MAIN_VARIANT (t) = m;
5995 return t;
5998 /* Return true if the from tree in both tree maps are equal. */
6001 tree_map_base_eq (const void *va, const void *vb)
6003 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6004 *const b = (const struct tree_map_base *) vb;
6005 return (a->from == b->from);
6008 /* Hash a from tree in a tree_base_map. */
6010 unsigned int
6011 tree_map_base_hash (const void *item)
6013 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6016 /* Return true if this tree map structure is marked for garbage collection
6017 purposes. We simply return true if the from tree is marked, so that this
6018 structure goes away when the from tree goes away. */
6021 tree_map_base_marked_p (const void *p)
6023 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6026 /* Hash a from tree in a tree_map. */
6028 unsigned int
6029 tree_map_hash (const void *item)
6031 return (((const struct tree_map *) item)->hash);
6034 /* Hash a from tree in a tree_decl_map. */
6036 unsigned int
6037 tree_decl_map_hash (const void *item)
6039 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6042 /* Return the initialization priority for DECL. */
6044 priority_type
6045 decl_init_priority_lookup (tree decl)
6047 symtab_node *snode = symtab_node::get (decl);
6049 if (!snode)
6050 return DEFAULT_INIT_PRIORITY;
6051 return
6052 snode->get_init_priority ();
6055 /* Return the finalization priority for DECL. */
6057 priority_type
6058 decl_fini_priority_lookup (tree decl)
6060 cgraph_node *node = cgraph_node::get (decl);
6062 if (!node)
6063 return DEFAULT_INIT_PRIORITY;
6064 return
6065 node->get_fini_priority ();
6068 /* Set the initialization priority for DECL to PRIORITY. */
6070 void
6071 decl_init_priority_insert (tree decl, priority_type priority)
6073 struct symtab_node *snode;
6075 if (priority == DEFAULT_INIT_PRIORITY)
6077 snode = symtab_node::get (decl);
6078 if (!snode)
6079 return;
6081 else if (VAR_P (decl))
6082 snode = varpool_node::get_create (decl);
6083 else
6084 snode = cgraph_node::get_create (decl);
6085 snode->set_init_priority (priority);
6088 /* Set the finalization priority for DECL to PRIORITY. */
6090 void
6091 decl_fini_priority_insert (tree decl, priority_type priority)
6093 struct cgraph_node *node;
6095 if (priority == DEFAULT_INIT_PRIORITY)
6097 node = cgraph_node::get (decl);
6098 if (!node)
6099 return;
6101 else
6102 node = cgraph_node::get_create (decl);
6103 node->set_fini_priority (priority);
6106 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6108 static void
6109 print_debug_expr_statistics (void)
6111 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6112 (long) debug_expr_for_decl->size (),
6113 (long) debug_expr_for_decl->elements (),
6114 debug_expr_for_decl->collisions ());
6117 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6119 static void
6120 print_value_expr_statistics (void)
6122 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6123 (long) value_expr_for_decl->size (),
6124 (long) value_expr_for_decl->elements (),
6125 value_expr_for_decl->collisions ());
6128 /* Lookup a debug expression for FROM, and return it if we find one. */
6130 tree
6131 decl_debug_expr_lookup (tree from)
6133 struct tree_decl_map *h, in;
6134 in.base.from = from;
6136 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6137 if (h)
6138 return h->to;
6139 return NULL_TREE;
6142 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6144 void
6145 decl_debug_expr_insert (tree from, tree to)
6147 struct tree_decl_map *h;
6149 h = ggc_alloc<tree_decl_map> ();
6150 h->base.from = from;
6151 h->to = to;
6152 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6155 /* Lookup a value expression for FROM, and return it if we find one. */
6157 tree
6158 decl_value_expr_lookup (tree from)
6160 struct tree_decl_map *h, in;
6161 in.base.from = from;
6163 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6164 if (h)
6165 return h->to;
6166 return NULL_TREE;
6169 /* Insert a mapping FROM->TO in the value expression hashtable. */
6171 void
6172 decl_value_expr_insert (tree from, tree to)
6174 struct tree_decl_map *h;
6176 h = ggc_alloc<tree_decl_map> ();
6177 h->base.from = from;
6178 h->to = to;
6179 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6182 /* Lookup a vector of debug arguments for FROM, and return it if we
6183 find one. */
6185 vec<tree, va_gc> **
6186 decl_debug_args_lookup (tree from)
6188 struct tree_vec_map *h, in;
6190 if (!DECL_HAS_DEBUG_ARGS_P (from))
6191 return NULL;
6192 gcc_checking_assert (debug_args_for_decl != NULL);
6193 in.base.from = from;
6194 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6195 if (h)
6196 return &h->to;
6197 return NULL;
6200 /* Insert a mapping FROM->empty vector of debug arguments in the value
6201 expression hashtable. */
6203 vec<tree, va_gc> **
6204 decl_debug_args_insert (tree from)
6206 struct tree_vec_map *h;
6207 tree_vec_map **loc;
6209 if (DECL_HAS_DEBUG_ARGS_P (from))
6210 return decl_debug_args_lookup (from);
6211 if (debug_args_for_decl == NULL)
6212 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6213 h = ggc_alloc<tree_vec_map> ();
6214 h->base.from = from;
6215 h->to = NULL;
6216 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6217 *loc = h;
6218 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6219 return &h->to;
6222 /* Hashing of types so that we don't make duplicates.
6223 The entry point is `type_hash_canon'. */
6225 /* Generate the default hash code for TYPE. This is designed for
6226 speed, rather than maximum entropy. */
6228 hashval_t
6229 type_hash_canon_hash (tree type)
6231 inchash::hash hstate;
6233 hstate.add_int (TREE_CODE (type));
6235 if (TREE_TYPE (type))
6236 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
6238 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
6239 /* Just the identifier is adequate to distinguish. */
6240 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
6242 switch (TREE_CODE (type))
6244 case METHOD_TYPE:
6245 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
6246 /* FALLTHROUGH. */
6247 case FUNCTION_TYPE:
6248 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6249 if (TREE_VALUE (t) != error_mark_node)
6250 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
6251 break;
6253 case OFFSET_TYPE:
6254 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
6255 break;
6257 case ARRAY_TYPE:
6259 if (TYPE_DOMAIN (type))
6260 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
6261 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
6263 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
6264 hstate.add_object (typeless);
6267 break;
6269 case INTEGER_TYPE:
6271 tree t = TYPE_MAX_VALUE (type);
6272 if (!t)
6273 t = TYPE_MIN_VALUE (type);
6274 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
6275 hstate.add_object (TREE_INT_CST_ELT (t, i));
6276 break;
6279 case REAL_TYPE:
6280 case FIXED_POINT_TYPE:
6282 unsigned prec = TYPE_PRECISION (type);
6283 hstate.add_object (prec);
6284 break;
6287 case VECTOR_TYPE:
6289 unsigned nunits = TYPE_VECTOR_SUBPARTS (type);
6290 hstate.add_object (nunits);
6291 break;
6294 default:
6295 break;
6298 return hstate.end ();
6301 /* These are the Hashtable callback functions. */
6303 /* Returns true iff the types are equivalent. */
6305 bool
6306 type_cache_hasher::equal (type_hash *a, type_hash *b)
6308 /* First test the things that are the same for all types. */
6309 if (a->hash != b->hash
6310 || TREE_CODE (a->type) != TREE_CODE (b->type)
6311 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6312 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6313 TYPE_ATTRIBUTES (b->type))
6314 || (TREE_CODE (a->type) != COMPLEX_TYPE
6315 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6316 return 0;
6318 /* Be careful about comparing arrays before and after the element type
6319 has been completed; don't compare TYPE_ALIGN unless both types are
6320 complete. */
6321 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6322 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6323 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6324 return 0;
6326 switch (TREE_CODE (a->type))
6328 case VOID_TYPE:
6329 case COMPLEX_TYPE:
6330 case POINTER_TYPE:
6331 case REFERENCE_TYPE:
6332 case NULLPTR_TYPE:
6333 return 1;
6335 case VECTOR_TYPE:
6336 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6338 case ENUMERAL_TYPE:
6339 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6340 && !(TYPE_VALUES (a->type)
6341 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6342 && TYPE_VALUES (b->type)
6343 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6344 && type_list_equal (TYPE_VALUES (a->type),
6345 TYPE_VALUES (b->type))))
6346 return 0;
6348 /* fall through */
6350 case INTEGER_TYPE:
6351 case REAL_TYPE:
6352 case BOOLEAN_TYPE:
6353 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6354 return false;
6355 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6356 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6357 TYPE_MAX_VALUE (b->type)))
6358 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6359 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6360 TYPE_MIN_VALUE (b->type))));
6362 case FIXED_POINT_TYPE:
6363 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6365 case OFFSET_TYPE:
6366 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6368 case METHOD_TYPE:
6369 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6370 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6371 || (TYPE_ARG_TYPES (a->type)
6372 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6373 && TYPE_ARG_TYPES (b->type)
6374 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6375 && type_list_equal (TYPE_ARG_TYPES (a->type),
6376 TYPE_ARG_TYPES (b->type)))))
6377 break;
6378 return 0;
6379 case ARRAY_TYPE:
6380 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6381 where the flag should be inherited from the element type
6382 and can change after ARRAY_TYPEs are created; on non-aggregates
6383 compare it and hash it, scalars will never have that flag set
6384 and we need to differentiate between arrays created by different
6385 front-ends or middle-end created arrays. */
6386 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
6387 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
6388 || (TYPE_TYPELESS_STORAGE (a->type)
6389 == TYPE_TYPELESS_STORAGE (b->type))));
6391 case RECORD_TYPE:
6392 case UNION_TYPE:
6393 case QUAL_UNION_TYPE:
6394 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6395 || (TYPE_FIELDS (a->type)
6396 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6397 && TYPE_FIELDS (b->type)
6398 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6399 && type_list_equal (TYPE_FIELDS (a->type),
6400 TYPE_FIELDS (b->type))));
6402 case FUNCTION_TYPE:
6403 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6404 || (TYPE_ARG_TYPES (a->type)
6405 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6406 && TYPE_ARG_TYPES (b->type)
6407 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6408 && type_list_equal (TYPE_ARG_TYPES (a->type),
6409 TYPE_ARG_TYPES (b->type))))
6410 break;
6411 return 0;
6413 default:
6414 return 0;
6417 if (lang_hooks.types.type_hash_eq != NULL)
6418 return lang_hooks.types.type_hash_eq (a->type, b->type);
6420 return 1;
6423 /* Given TYPE, and HASHCODE its hash code, return the canonical
6424 object for an identical type if one already exists.
6425 Otherwise, return TYPE, and record it as the canonical object.
6427 To use this function, first create a type of the sort you want.
6428 Then compute its hash code from the fields of the type that
6429 make it different from other similar types.
6430 Then call this function and use the value. */
6432 tree
6433 type_hash_canon (unsigned int hashcode, tree type)
6435 type_hash in;
6436 type_hash **loc;
6438 /* The hash table only contains main variants, so ensure that's what we're
6439 being passed. */
6440 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6442 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6443 must call that routine before comparing TYPE_ALIGNs. */
6444 layout_type (type);
6446 in.hash = hashcode;
6447 in.type = type;
6449 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6450 if (*loc)
6452 tree t1 = ((type_hash *) *loc)->type;
6453 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
6454 if (TYPE_UID (type) + 1 == next_type_uid)
6455 --next_type_uid;
6456 /* Free also min/max values and the cache for integer
6457 types. This can't be done in free_node, as LTO frees
6458 those on its own. */
6459 if (TREE_CODE (type) == INTEGER_TYPE)
6461 if (TYPE_MIN_VALUE (type)
6462 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
6464 /* Zero is always in TYPE_CACHED_VALUES. */
6465 if (! TYPE_UNSIGNED (type))
6466 int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
6467 ggc_free (TYPE_MIN_VALUE (type));
6469 if (TYPE_MAX_VALUE (type)
6470 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
6472 int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
6473 ggc_free (TYPE_MAX_VALUE (type));
6475 if (TYPE_CACHED_VALUES_P (type))
6476 ggc_free (TYPE_CACHED_VALUES (type));
6478 free_node (type);
6479 return t1;
6481 else
6483 struct type_hash *h;
6485 h = ggc_alloc<type_hash> ();
6486 h->hash = hashcode;
6487 h->type = type;
6488 *loc = h;
6490 return type;
6494 static void
6495 print_type_hash_statistics (void)
6497 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6498 (long) type_hash_table->size (),
6499 (long) type_hash_table->elements (),
6500 type_hash_table->collisions ());
6503 /* Given two lists of types
6504 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6505 return 1 if the lists contain the same types in the same order.
6506 Also, the TREE_PURPOSEs must match. */
6509 type_list_equal (const_tree l1, const_tree l2)
6511 const_tree t1, t2;
6513 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6514 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6515 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6516 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6517 && (TREE_TYPE (TREE_PURPOSE (t1))
6518 == TREE_TYPE (TREE_PURPOSE (t2))))))
6519 return 0;
6521 return t1 == t2;
6524 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6525 given by TYPE. If the argument list accepts variable arguments,
6526 then this function counts only the ordinary arguments. */
6529 type_num_arguments (const_tree type)
6531 int i = 0;
6532 tree t;
6534 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6535 /* If the function does not take a variable number of arguments,
6536 the last element in the list will have type `void'. */
6537 if (VOID_TYPE_P (TREE_VALUE (t)))
6538 break;
6539 else
6540 ++i;
6542 return i;
6545 /* Nonzero if integer constants T1 and T2
6546 represent the same constant value. */
6549 tree_int_cst_equal (const_tree t1, const_tree t2)
6551 if (t1 == t2)
6552 return 1;
6554 if (t1 == 0 || t2 == 0)
6555 return 0;
6557 if (TREE_CODE (t1) == INTEGER_CST
6558 && TREE_CODE (t2) == INTEGER_CST
6559 && wi::to_widest (t1) == wi::to_widest (t2))
6560 return 1;
6562 return 0;
6565 /* Return true if T is an INTEGER_CST whose numerical value (extended
6566 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6568 bool
6569 tree_fits_shwi_p (const_tree t)
6571 return (t != NULL_TREE
6572 && TREE_CODE (t) == INTEGER_CST
6573 && wi::fits_shwi_p (wi::to_widest (t)));
6576 /* Return true if T is an INTEGER_CST whose numerical value (extended
6577 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6579 bool
6580 tree_fits_uhwi_p (const_tree t)
6582 return (t != NULL_TREE
6583 && TREE_CODE (t) == INTEGER_CST
6584 && wi::fits_uhwi_p (wi::to_widest (t)));
6587 /* T is an INTEGER_CST whose numerical value (extended according to
6588 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6589 HOST_WIDE_INT. */
6591 HOST_WIDE_INT
6592 tree_to_shwi (const_tree t)
6594 gcc_assert (tree_fits_shwi_p (t));
6595 return TREE_INT_CST_LOW (t);
6598 /* T is an INTEGER_CST whose numerical value (extended according to
6599 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6600 HOST_WIDE_INT. */
6602 unsigned HOST_WIDE_INT
6603 tree_to_uhwi (const_tree t)
6605 gcc_assert (tree_fits_uhwi_p (t));
6606 return TREE_INT_CST_LOW (t);
6609 /* Return the most significant (sign) bit of T. */
6612 tree_int_cst_sign_bit (const_tree t)
6614 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
6616 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
6619 /* Return an indication of the sign of the integer constant T.
6620 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6621 Note that -1 will never be returned if T's type is unsigned. */
6624 tree_int_cst_sgn (const_tree t)
6626 if (wi::to_wide (t) == 0)
6627 return 0;
6628 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
6629 return 1;
6630 else if (wi::neg_p (wi::to_wide (t)))
6631 return -1;
6632 else
6633 return 1;
6636 /* Return the minimum number of bits needed to represent VALUE in a
6637 signed or unsigned type, UNSIGNEDP says which. */
6639 unsigned int
6640 tree_int_cst_min_precision (tree value, signop sgn)
6642 /* If the value is negative, compute its negative minus 1. The latter
6643 adjustment is because the absolute value of the largest negative value
6644 is one larger than the largest positive value. This is equivalent to
6645 a bit-wise negation, so use that operation instead. */
6647 if (tree_int_cst_sgn (value) < 0)
6648 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
6650 /* Return the number of bits needed, taking into account the fact
6651 that we need one more bit for a signed than unsigned type.
6652 If value is 0 or -1, the minimum precision is 1 no matter
6653 whether unsignedp is true or false. */
6655 if (integer_zerop (value))
6656 return 1;
6657 else
6658 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
6661 /* Return truthvalue of whether T1 is the same tree structure as T2.
6662 Return 1 if they are the same.
6663 Return 0 if they are understandably different.
6664 Return -1 if either contains tree structure not understood by
6665 this function. */
6668 simple_cst_equal (const_tree t1, const_tree t2)
6670 enum tree_code code1, code2;
6671 int cmp;
6672 int i;
6674 if (t1 == t2)
6675 return 1;
6676 if (t1 == 0 || t2 == 0)
6677 return 0;
6679 code1 = TREE_CODE (t1);
6680 code2 = TREE_CODE (t2);
6682 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
6684 if (CONVERT_EXPR_CODE_P (code2)
6685 || code2 == NON_LVALUE_EXPR)
6686 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6687 else
6688 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
6691 else if (CONVERT_EXPR_CODE_P (code2)
6692 || code2 == NON_LVALUE_EXPR)
6693 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
6695 if (code1 != code2)
6696 return 0;
6698 switch (code1)
6700 case INTEGER_CST:
6701 return wi::to_widest (t1) == wi::to_widest (t2);
6703 case REAL_CST:
6704 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
6706 case FIXED_CST:
6707 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
6709 case STRING_CST:
6710 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
6711 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
6712 TREE_STRING_LENGTH (t1)));
6714 case CONSTRUCTOR:
6716 unsigned HOST_WIDE_INT idx;
6717 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
6718 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
6720 if (vec_safe_length (v1) != vec_safe_length (v2))
6721 return false;
6723 for (idx = 0; idx < vec_safe_length (v1); ++idx)
6724 /* ??? Should we handle also fields here? */
6725 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
6726 return false;
6727 return true;
6730 case SAVE_EXPR:
6731 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6733 case CALL_EXPR:
6734 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
6735 if (cmp <= 0)
6736 return cmp;
6737 if (call_expr_nargs (t1) != call_expr_nargs (t2))
6738 return 0;
6740 const_tree arg1, arg2;
6741 const_call_expr_arg_iterator iter1, iter2;
6742 for (arg1 = first_const_call_expr_arg (t1, &iter1),
6743 arg2 = first_const_call_expr_arg (t2, &iter2);
6744 arg1 && arg2;
6745 arg1 = next_const_call_expr_arg (&iter1),
6746 arg2 = next_const_call_expr_arg (&iter2))
6748 cmp = simple_cst_equal (arg1, arg2);
6749 if (cmp <= 0)
6750 return cmp;
6752 return arg1 == arg2;
6755 case TARGET_EXPR:
6756 /* Special case: if either target is an unallocated VAR_DECL,
6757 it means that it's going to be unified with whatever the
6758 TARGET_EXPR is really supposed to initialize, so treat it
6759 as being equivalent to anything. */
6760 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
6761 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
6762 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
6763 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
6764 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
6765 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
6766 cmp = 1;
6767 else
6768 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6770 if (cmp <= 0)
6771 return cmp;
6773 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
6775 case WITH_CLEANUP_EXPR:
6776 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6777 if (cmp <= 0)
6778 return cmp;
6780 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
6782 case COMPONENT_REF:
6783 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
6784 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6786 return 0;
6788 case VAR_DECL:
6789 case PARM_DECL:
6790 case CONST_DECL:
6791 case FUNCTION_DECL:
6792 return 0;
6794 default:
6795 break;
6798 /* This general rule works for most tree codes. All exceptions should be
6799 handled above. If this is a language-specific tree code, we can't
6800 trust what might be in the operand, so say we don't know
6801 the situation. */
6802 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
6803 return -1;
6805 switch (TREE_CODE_CLASS (code1))
6807 case tcc_unary:
6808 case tcc_binary:
6809 case tcc_comparison:
6810 case tcc_expression:
6811 case tcc_reference:
6812 case tcc_statement:
6813 cmp = 1;
6814 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
6816 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
6817 if (cmp <= 0)
6818 return cmp;
6821 return cmp;
6823 default:
6824 return -1;
6828 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
6829 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
6830 than U, respectively. */
6833 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
6835 if (tree_int_cst_sgn (t) < 0)
6836 return -1;
6837 else if (!tree_fits_uhwi_p (t))
6838 return 1;
6839 else if (TREE_INT_CST_LOW (t) == u)
6840 return 0;
6841 else if (TREE_INT_CST_LOW (t) < u)
6842 return -1;
6843 else
6844 return 1;
6847 /* Return true if SIZE represents a constant size that is in bounds of
6848 what the middle-end and the backend accepts (covering not more than
6849 half of the address-space). */
6851 bool
6852 valid_constant_size_p (const_tree size)
6854 if (! tree_fits_uhwi_p (size)
6855 || TREE_OVERFLOW (size)
6856 || tree_int_cst_sign_bit (size) != 0)
6857 return false;
6858 return true;
6861 /* Return the precision of the type, or for a complex or vector type the
6862 precision of the type of its elements. */
6864 unsigned int
6865 element_precision (const_tree type)
6867 if (!TYPE_P (type))
6868 type = TREE_TYPE (type);
6869 enum tree_code code = TREE_CODE (type);
6870 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
6871 type = TREE_TYPE (type);
6873 return TYPE_PRECISION (type);
6876 /* Return true if CODE represents an associative tree code. Otherwise
6877 return false. */
6878 bool
6879 associative_tree_code (enum tree_code code)
6881 switch (code)
6883 case BIT_IOR_EXPR:
6884 case BIT_AND_EXPR:
6885 case BIT_XOR_EXPR:
6886 case PLUS_EXPR:
6887 case MULT_EXPR:
6888 case MIN_EXPR:
6889 case MAX_EXPR:
6890 return true;
6892 default:
6893 break;
6895 return false;
6898 /* Return true if CODE represents a commutative tree code. Otherwise
6899 return false. */
6900 bool
6901 commutative_tree_code (enum tree_code code)
6903 switch (code)
6905 case PLUS_EXPR:
6906 case MULT_EXPR:
6907 case MULT_HIGHPART_EXPR:
6908 case MIN_EXPR:
6909 case MAX_EXPR:
6910 case BIT_IOR_EXPR:
6911 case BIT_XOR_EXPR:
6912 case BIT_AND_EXPR:
6913 case NE_EXPR:
6914 case EQ_EXPR:
6915 case UNORDERED_EXPR:
6916 case ORDERED_EXPR:
6917 case UNEQ_EXPR:
6918 case LTGT_EXPR:
6919 case TRUTH_AND_EXPR:
6920 case TRUTH_XOR_EXPR:
6921 case TRUTH_OR_EXPR:
6922 case WIDEN_MULT_EXPR:
6923 case VEC_WIDEN_MULT_HI_EXPR:
6924 case VEC_WIDEN_MULT_LO_EXPR:
6925 case VEC_WIDEN_MULT_EVEN_EXPR:
6926 case VEC_WIDEN_MULT_ODD_EXPR:
6927 return true;
6929 default:
6930 break;
6932 return false;
6935 /* Return true if CODE represents a ternary tree code for which the
6936 first two operands are commutative. Otherwise return false. */
6937 bool
6938 commutative_ternary_tree_code (enum tree_code code)
6940 switch (code)
6942 case WIDEN_MULT_PLUS_EXPR:
6943 case WIDEN_MULT_MINUS_EXPR:
6944 case DOT_PROD_EXPR:
6945 case FMA_EXPR:
6946 return true;
6948 default:
6949 break;
6951 return false;
6954 /* Returns true if CODE can overflow. */
6956 bool
6957 operation_can_overflow (enum tree_code code)
6959 switch (code)
6961 case PLUS_EXPR:
6962 case MINUS_EXPR:
6963 case MULT_EXPR:
6964 case LSHIFT_EXPR:
6965 /* Can overflow in various ways. */
6966 return true;
6967 case TRUNC_DIV_EXPR:
6968 case EXACT_DIV_EXPR:
6969 case FLOOR_DIV_EXPR:
6970 case CEIL_DIV_EXPR:
6971 /* For INT_MIN / -1. */
6972 return true;
6973 case NEGATE_EXPR:
6974 case ABS_EXPR:
6975 /* For -INT_MIN. */
6976 return true;
6977 default:
6978 /* These operators cannot overflow. */
6979 return false;
6983 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
6984 ftrapv doesn't generate trapping insns for CODE. */
6986 bool
6987 operation_no_trapping_overflow (tree type, enum tree_code code)
6989 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
6991 /* We don't generate instructions that trap on overflow for complex or vector
6992 types. */
6993 if (!INTEGRAL_TYPE_P (type))
6994 return true;
6996 if (!TYPE_OVERFLOW_TRAPS (type))
6997 return true;
6999 switch (code)
7001 case PLUS_EXPR:
7002 case MINUS_EXPR:
7003 case MULT_EXPR:
7004 case NEGATE_EXPR:
7005 case ABS_EXPR:
7006 /* These operators can overflow, and -ftrapv generates trapping code for
7007 these. */
7008 return false;
7009 case TRUNC_DIV_EXPR:
7010 case EXACT_DIV_EXPR:
7011 case FLOOR_DIV_EXPR:
7012 case CEIL_DIV_EXPR:
7013 case LSHIFT_EXPR:
7014 /* These operators can overflow, but -ftrapv does not generate trapping
7015 code for these. */
7016 return true;
7017 default:
7018 /* These operators cannot overflow. */
7019 return true;
7023 namespace inchash
7026 /* Generate a hash value for an expression. This can be used iteratively
7027 by passing a previous result as the HSTATE argument.
7029 This function is intended to produce the same hash for expressions which
7030 would compare equal using operand_equal_p. */
7031 void
7032 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
7034 int i;
7035 enum tree_code code;
7036 enum tree_code_class tclass;
7038 if (t == NULL_TREE || t == error_mark_node)
7040 hstate.merge_hash (0);
7041 return;
7044 if (!(flags & OEP_ADDRESS_OF))
7045 STRIP_NOPS (t);
7047 code = TREE_CODE (t);
7049 switch (code)
7051 /* Alas, constants aren't shared, so we can't rely on pointer
7052 identity. */
7053 case VOID_CST:
7054 hstate.merge_hash (0);
7055 return;
7056 case INTEGER_CST:
7057 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7058 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
7059 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
7060 return;
7061 case REAL_CST:
7063 unsigned int val2;
7064 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
7065 val2 = rvc_zero;
7066 else
7067 val2 = real_hash (TREE_REAL_CST_PTR (t));
7068 hstate.merge_hash (val2);
7069 return;
7071 case FIXED_CST:
7073 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7074 hstate.merge_hash (val2);
7075 return;
7077 case STRING_CST:
7078 hstate.add ((const void *) TREE_STRING_POINTER (t),
7079 TREE_STRING_LENGTH (t));
7080 return;
7081 case COMPLEX_CST:
7082 inchash::add_expr (TREE_REALPART (t), hstate, flags);
7083 inchash::add_expr (TREE_IMAGPART (t), hstate, flags);
7084 return;
7085 case VECTOR_CST:
7087 hstate.add_int (VECTOR_CST_NPATTERNS (t));
7088 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
7089 unsigned int count = vector_cst_encoded_nelts (t);
7090 for (unsigned int i = 0; i < count; ++i)
7091 inchash::add_expr (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
7092 return;
7094 case SSA_NAME:
7095 /* We can just compare by pointer. */
7096 hstate.add_hwi (SSA_NAME_VERSION (t));
7097 return;
7098 case PLACEHOLDER_EXPR:
7099 /* The node itself doesn't matter. */
7100 return;
7101 case BLOCK:
7102 case OMP_CLAUSE:
7103 /* Ignore. */
7104 return;
7105 case TREE_LIST:
7106 /* A list of expressions, for a CALL_EXPR or as the elements of a
7107 VECTOR_CST. */
7108 for (; t; t = TREE_CHAIN (t))
7109 inchash::add_expr (TREE_VALUE (t), hstate, flags);
7110 return;
7111 case CONSTRUCTOR:
7113 unsigned HOST_WIDE_INT idx;
7114 tree field, value;
7115 flags &= ~OEP_ADDRESS_OF;
7116 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7118 inchash::add_expr (field, hstate, flags);
7119 inchash::add_expr (value, hstate, flags);
7121 return;
7123 case STATEMENT_LIST:
7125 tree_stmt_iterator i;
7126 for (i = tsi_start (CONST_CAST_TREE (t));
7127 !tsi_end_p (i); tsi_next (&i))
7128 inchash::add_expr (tsi_stmt (i), hstate, flags);
7129 return;
7131 case TREE_VEC:
7132 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
7133 inchash::add_expr (TREE_VEC_ELT (t, i), hstate, flags);
7134 return;
7135 case FUNCTION_DECL:
7136 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7137 Otherwise nodes that compare equal according to operand_equal_p might
7138 get different hash codes. However, don't do this for machine specific
7139 or front end builtins, since the function code is overloaded in those
7140 cases. */
7141 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7142 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7144 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7145 code = TREE_CODE (t);
7147 /* FALL THROUGH */
7148 default:
7149 tclass = TREE_CODE_CLASS (code);
7151 if (tclass == tcc_declaration)
7153 /* DECL's have a unique ID */
7154 hstate.add_hwi (DECL_UID (t));
7156 else if (tclass == tcc_comparison && !commutative_tree_code (code))
7158 /* For comparisons that can be swapped, use the lower
7159 tree code. */
7160 enum tree_code ccode = swap_tree_comparison (code);
7161 if (code < ccode)
7162 ccode = code;
7163 hstate.add_object (ccode);
7164 inchash::add_expr (TREE_OPERAND (t, ccode != code), hstate, flags);
7165 inchash::add_expr (TREE_OPERAND (t, ccode == code), hstate, flags);
7167 else if (CONVERT_EXPR_CODE_P (code))
7169 /* NOP_EXPR and CONVERT_EXPR are considered equal by
7170 operand_equal_p. */
7171 enum tree_code ccode = NOP_EXPR;
7172 hstate.add_object (ccode);
7174 /* Don't hash the type, that can lead to having nodes which
7175 compare equal according to operand_equal_p, but which
7176 have different hash codes. Make sure to include signedness
7177 in the hash computation. */
7178 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7179 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
7181 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
7182 else if (code == MEM_REF
7183 && (flags & OEP_ADDRESS_OF) != 0
7184 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
7185 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
7186 && integer_zerop (TREE_OPERAND (t, 1)))
7187 inchash::add_expr (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
7188 hstate, flags);
7189 /* Don't ICE on FE specific trees, or their arguments etc.
7190 during operand_equal_p hash verification. */
7191 else if (!IS_EXPR_CODE_CLASS (tclass))
7192 gcc_assert (flags & OEP_HASH_CHECK);
7193 else
7195 unsigned int sflags = flags;
7197 hstate.add_object (code);
7199 switch (code)
7201 case ADDR_EXPR:
7202 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7203 flags |= OEP_ADDRESS_OF;
7204 sflags = flags;
7205 break;
7207 case INDIRECT_REF:
7208 case MEM_REF:
7209 case TARGET_MEM_REF:
7210 flags &= ~OEP_ADDRESS_OF;
7211 sflags = flags;
7212 break;
7214 case ARRAY_REF:
7215 case ARRAY_RANGE_REF:
7216 case COMPONENT_REF:
7217 case BIT_FIELD_REF:
7218 sflags &= ~OEP_ADDRESS_OF;
7219 break;
7221 case COND_EXPR:
7222 flags &= ~OEP_ADDRESS_OF;
7223 break;
7225 case FMA_EXPR:
7226 case WIDEN_MULT_PLUS_EXPR:
7227 case WIDEN_MULT_MINUS_EXPR:
7229 /* The multiplication operands are commutative. */
7230 inchash::hash one, two;
7231 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
7232 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
7233 hstate.add_commutative (one, two);
7234 inchash::add_expr (TREE_OPERAND (t, 2), two, flags);
7235 return;
7238 case CALL_EXPR:
7239 if (CALL_EXPR_FN (t) == NULL_TREE)
7240 hstate.add_int (CALL_EXPR_IFN (t));
7241 break;
7243 case TARGET_EXPR:
7244 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
7245 Usually different TARGET_EXPRs just should use
7246 different temporaries in their slots. */
7247 inchash::add_expr (TARGET_EXPR_SLOT (t), hstate, flags);
7248 return;
7250 default:
7251 break;
7254 /* Don't hash the type, that can lead to having nodes which
7255 compare equal according to operand_equal_p, but which
7256 have different hash codes. */
7257 if (code == NON_LVALUE_EXPR)
7259 /* Make sure to include signness in the hash computation. */
7260 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7261 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
7264 else if (commutative_tree_code (code))
7266 /* It's a commutative expression. We want to hash it the same
7267 however it appears. We do this by first hashing both operands
7268 and then rehashing based on the order of their independent
7269 hashes. */
7270 inchash::hash one, two;
7271 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
7272 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
7273 hstate.add_commutative (one, two);
7275 else
7276 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7277 inchash::add_expr (TREE_OPERAND (t, i), hstate,
7278 i == 0 ? flags : sflags);
7280 return;
7286 /* Constructors for pointer, array and function types.
7287 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7288 constructed by language-dependent code, not here.) */
7290 /* Construct, lay out and return the type of pointers to TO_TYPE with
7291 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7292 reference all of memory. If such a type has already been
7293 constructed, reuse it. */
7295 tree
7296 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7297 bool can_alias_all)
7299 tree t;
7300 bool could_alias = can_alias_all;
7302 if (to_type == error_mark_node)
7303 return error_mark_node;
7305 /* If the pointed-to type has the may_alias attribute set, force
7306 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7307 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7308 can_alias_all = true;
7310 /* In some cases, languages will have things that aren't a POINTER_TYPE
7311 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7312 In that case, return that type without regard to the rest of our
7313 operands.
7315 ??? This is a kludge, but consistent with the way this function has
7316 always operated and there doesn't seem to be a good way to avoid this
7317 at the moment. */
7318 if (TYPE_POINTER_TO (to_type) != 0
7319 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7320 return TYPE_POINTER_TO (to_type);
7322 /* First, if we already have a type for pointers to TO_TYPE and it's
7323 the proper mode, use it. */
7324 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7325 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7326 return t;
7328 t = make_node (POINTER_TYPE);
7330 TREE_TYPE (t) = to_type;
7331 SET_TYPE_MODE (t, mode);
7332 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7333 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7334 TYPE_POINTER_TO (to_type) = t;
7336 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7337 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7338 SET_TYPE_STRUCTURAL_EQUALITY (t);
7339 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7340 TYPE_CANONICAL (t)
7341 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7342 mode, false);
7344 /* Lay out the type. This function has many callers that are concerned
7345 with expression-construction, and this simplifies them all. */
7346 layout_type (t);
7348 return t;
7351 /* By default build pointers in ptr_mode. */
7353 tree
7354 build_pointer_type (tree to_type)
7356 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7357 : TYPE_ADDR_SPACE (to_type);
7358 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7359 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7362 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7364 tree
7365 build_reference_type_for_mode (tree to_type, machine_mode mode,
7366 bool can_alias_all)
7368 tree t;
7369 bool could_alias = can_alias_all;
7371 if (to_type == error_mark_node)
7372 return error_mark_node;
7374 /* If the pointed-to type has the may_alias attribute set, force
7375 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7376 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7377 can_alias_all = true;
7379 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7380 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7381 In that case, return that type without regard to the rest of our
7382 operands.
7384 ??? This is a kludge, but consistent with the way this function has
7385 always operated and there doesn't seem to be a good way to avoid this
7386 at the moment. */
7387 if (TYPE_REFERENCE_TO (to_type) != 0
7388 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7389 return TYPE_REFERENCE_TO (to_type);
7391 /* First, if we already have a type for pointers to TO_TYPE and it's
7392 the proper mode, use it. */
7393 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7394 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7395 return t;
7397 t = make_node (REFERENCE_TYPE);
7399 TREE_TYPE (t) = to_type;
7400 SET_TYPE_MODE (t, mode);
7401 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7402 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7403 TYPE_REFERENCE_TO (to_type) = t;
7405 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7406 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7407 SET_TYPE_STRUCTURAL_EQUALITY (t);
7408 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7409 TYPE_CANONICAL (t)
7410 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7411 mode, false);
7413 layout_type (t);
7415 return t;
7419 /* Build the node for the type of references-to-TO_TYPE by default
7420 in ptr_mode. */
7422 tree
7423 build_reference_type (tree to_type)
7425 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7426 : TYPE_ADDR_SPACE (to_type);
7427 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7428 return build_reference_type_for_mode (to_type, pointer_mode, false);
7431 #define MAX_INT_CACHED_PREC \
7432 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7433 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7435 /* Builds a signed or unsigned integer type of precision PRECISION.
7436 Used for C bitfields whose precision does not match that of
7437 built-in target types. */
7438 tree
7439 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7440 int unsignedp)
7442 tree itype, ret;
7444 if (unsignedp)
7445 unsignedp = MAX_INT_CACHED_PREC + 1;
7447 if (precision <= MAX_INT_CACHED_PREC)
7449 itype = nonstandard_integer_type_cache[precision + unsignedp];
7450 if (itype)
7451 return itype;
7454 itype = make_node (INTEGER_TYPE);
7455 TYPE_PRECISION (itype) = precision;
7457 if (unsignedp)
7458 fixup_unsigned_type (itype);
7459 else
7460 fixup_signed_type (itype);
7462 ret = itype;
7464 inchash::hash hstate;
7465 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7466 ret = type_hash_canon (hstate.end (), itype);
7467 if (precision <= MAX_INT_CACHED_PREC)
7468 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7470 return ret;
7473 #define MAX_BOOL_CACHED_PREC \
7474 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7475 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
7477 /* Builds a boolean type of precision PRECISION.
7478 Used for boolean vectors to choose proper vector element size. */
7479 tree
7480 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
7482 tree type;
7484 if (precision <= MAX_BOOL_CACHED_PREC)
7486 type = nonstandard_boolean_type_cache[precision];
7487 if (type)
7488 return type;
7491 type = make_node (BOOLEAN_TYPE);
7492 TYPE_PRECISION (type) = precision;
7493 fixup_signed_type (type);
7495 if (precision <= MAX_INT_CACHED_PREC)
7496 nonstandard_boolean_type_cache[precision] = type;
7498 return type;
7501 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7502 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7503 is true, reuse such a type that has already been constructed. */
7505 static tree
7506 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7508 tree itype = make_node (INTEGER_TYPE);
7510 TREE_TYPE (itype) = type;
7512 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7513 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7515 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7516 SET_TYPE_MODE (itype, TYPE_MODE (type));
7517 TYPE_SIZE (itype) = TYPE_SIZE (type);
7518 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7519 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
7520 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7521 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
7523 if (!shared)
7524 return itype;
7526 if ((TYPE_MIN_VALUE (itype)
7527 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7528 || (TYPE_MAX_VALUE (itype)
7529 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7531 /* Since we cannot reliably merge this type, we need to compare it using
7532 structural equality checks. */
7533 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7534 return itype;
7537 hashval_t hash = type_hash_canon_hash (itype);
7538 itype = type_hash_canon (hash, itype);
7540 return itype;
7543 /* Wrapper around build_range_type_1 with SHARED set to true. */
7545 tree
7546 build_range_type (tree type, tree lowval, tree highval)
7548 return build_range_type_1 (type, lowval, highval, true);
7551 /* Wrapper around build_range_type_1 with SHARED set to false. */
7553 tree
7554 build_nonshared_range_type (tree type, tree lowval, tree highval)
7556 return build_range_type_1 (type, lowval, highval, false);
7559 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7560 MAXVAL should be the maximum value in the domain
7561 (one less than the length of the array).
7563 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7564 We don't enforce this limit, that is up to caller (e.g. language front end).
7565 The limit exists because the result is a signed type and we don't handle
7566 sizes that use more than one HOST_WIDE_INT. */
7568 tree
7569 build_index_type (tree maxval)
7571 return build_range_type (sizetype, size_zero_node, maxval);
7574 /* Return true if the debug information for TYPE, a subtype, should be emitted
7575 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7576 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7577 debug info and doesn't reflect the source code. */
7579 bool
7580 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7582 tree base_type = TREE_TYPE (type), low, high;
7584 /* Subrange types have a base type which is an integral type. */
7585 if (!INTEGRAL_TYPE_P (base_type))
7586 return false;
7588 /* Get the real bounds of the subtype. */
7589 if (lang_hooks.types.get_subrange_bounds)
7590 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7591 else
7593 low = TYPE_MIN_VALUE (type);
7594 high = TYPE_MAX_VALUE (type);
7597 /* If the type and its base type have the same representation and the same
7598 name, then the type is not a subrange but a copy of the base type. */
7599 if ((TREE_CODE (base_type) == INTEGER_TYPE
7600 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7601 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7602 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7603 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7604 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7605 return false;
7607 if (lowval)
7608 *lowval = low;
7609 if (highval)
7610 *highval = high;
7611 return true;
7614 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7615 and number of elements specified by the range of values of INDEX_TYPE.
7616 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7617 If SHARED is true, reuse such a type that has already been constructed. */
7619 static tree
7620 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
7621 bool shared)
7623 tree t;
7625 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7627 error ("arrays of functions are not meaningful");
7628 elt_type = integer_type_node;
7631 t = make_node (ARRAY_TYPE);
7632 TREE_TYPE (t) = elt_type;
7633 TYPE_DOMAIN (t) = index_type;
7634 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7635 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
7636 layout_type (t);
7638 /* If the element type is incomplete at this point we get marked for
7639 structural equality. Do not record these types in the canonical
7640 type hashtable. */
7641 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7642 return t;
7644 if (shared)
7646 hashval_t hash = type_hash_canon_hash (t);
7647 t = type_hash_canon (hash, t);
7650 if (TYPE_CANONICAL (t) == t)
7652 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7653 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
7654 || in_lto_p)
7655 SET_TYPE_STRUCTURAL_EQUALITY (t);
7656 else if (TYPE_CANONICAL (elt_type) != elt_type
7657 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7658 TYPE_CANONICAL (t)
7659 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7660 index_type
7661 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7662 typeless_storage, shared);
7665 return t;
7668 /* Wrapper around build_array_type_1 with SHARED set to true. */
7670 tree
7671 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
7673 return build_array_type_1 (elt_type, index_type, typeless_storage, true);
7676 /* Wrapper around build_array_type_1 with SHARED set to false. */
7678 tree
7679 build_nonshared_array_type (tree elt_type, tree index_type)
7681 return build_array_type_1 (elt_type, index_type, false, false);
7684 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7685 sizetype. */
7687 tree
7688 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
7690 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7693 /* Recursively examines the array elements of TYPE, until a non-array
7694 element type is found. */
7696 tree
7697 strip_array_types (tree type)
7699 while (TREE_CODE (type) == ARRAY_TYPE)
7700 type = TREE_TYPE (type);
7702 return type;
7705 /* Computes the canonical argument types from the argument type list
7706 ARGTYPES.
7708 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7709 on entry to this function, or if any of the ARGTYPES are
7710 structural.
7712 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7713 true on entry to this function, or if any of the ARGTYPES are
7714 non-canonical.
7716 Returns a canonical argument list, which may be ARGTYPES when the
7717 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7718 true) or would not differ from ARGTYPES. */
7720 static tree
7721 maybe_canonicalize_argtypes (tree argtypes,
7722 bool *any_structural_p,
7723 bool *any_noncanonical_p)
7725 tree arg;
7726 bool any_noncanonical_argtypes_p = false;
7728 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7730 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7731 /* Fail gracefully by stating that the type is structural. */
7732 *any_structural_p = true;
7733 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7734 *any_structural_p = true;
7735 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7736 || TREE_PURPOSE (arg))
7737 /* If the argument has a default argument, we consider it
7738 non-canonical even though the type itself is canonical.
7739 That way, different variants of function and method types
7740 with default arguments will all point to the variant with
7741 no defaults as their canonical type. */
7742 any_noncanonical_argtypes_p = true;
7745 if (*any_structural_p)
7746 return argtypes;
7748 if (any_noncanonical_argtypes_p)
7750 /* Build the canonical list of argument types. */
7751 tree canon_argtypes = NULL_TREE;
7752 bool is_void = false;
7754 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7756 if (arg == void_list_node)
7757 is_void = true;
7758 else
7759 canon_argtypes = tree_cons (NULL_TREE,
7760 TYPE_CANONICAL (TREE_VALUE (arg)),
7761 canon_argtypes);
7764 canon_argtypes = nreverse (canon_argtypes);
7765 if (is_void)
7766 canon_argtypes = chainon (canon_argtypes, void_list_node);
7768 /* There is a non-canonical type. */
7769 *any_noncanonical_p = true;
7770 return canon_argtypes;
7773 /* The canonical argument types are the same as ARGTYPES. */
7774 return argtypes;
7777 /* Construct, lay out and return
7778 the type of functions returning type VALUE_TYPE
7779 given arguments of types ARG_TYPES.
7780 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7781 are data type nodes for the arguments of the function.
7782 If such a type has already been constructed, reuse it. */
7784 tree
7785 build_function_type (tree value_type, tree arg_types)
7787 tree t;
7788 inchash::hash hstate;
7789 bool any_structural_p, any_noncanonical_p;
7790 tree canon_argtypes;
7792 if (TREE_CODE (value_type) == FUNCTION_TYPE)
7794 error ("function return type cannot be function");
7795 value_type = integer_type_node;
7798 /* Make a node of the sort we want. */
7799 t = make_node (FUNCTION_TYPE);
7800 TREE_TYPE (t) = value_type;
7801 TYPE_ARG_TYPES (t) = arg_types;
7803 /* If we already have such a type, use the old one. */
7804 hashval_t hash = type_hash_canon_hash (t);
7805 t = type_hash_canon (hash, t);
7807 /* Set up the canonical type. */
7808 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
7809 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
7810 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
7811 &any_structural_p,
7812 &any_noncanonical_p);
7813 if (any_structural_p)
7814 SET_TYPE_STRUCTURAL_EQUALITY (t);
7815 else if (any_noncanonical_p)
7816 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
7817 canon_argtypes);
7819 if (!COMPLETE_TYPE_P (t))
7820 layout_type (t);
7821 return t;
7824 /* Build a function type. The RETURN_TYPE is the type returned by the
7825 function. If VAARGS is set, no void_type_node is appended to the
7826 list. ARGP must be always be terminated be a NULL_TREE. */
7828 static tree
7829 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
7831 tree t, args, last;
7833 t = va_arg (argp, tree);
7834 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
7835 args = tree_cons (NULL_TREE, t, args);
7837 if (vaargs)
7839 last = args;
7840 if (args != NULL_TREE)
7841 args = nreverse (args);
7842 gcc_assert (last != void_list_node);
7844 else if (args == NULL_TREE)
7845 args = void_list_node;
7846 else
7848 last = args;
7849 args = nreverse (args);
7850 TREE_CHAIN (last) = void_list_node;
7852 args = build_function_type (return_type, args);
7854 return args;
7857 /* Build a function type. The RETURN_TYPE is the type returned by the
7858 function. If additional arguments are provided, they are
7859 additional argument types. The list of argument types must always
7860 be terminated by NULL_TREE. */
7862 tree
7863 build_function_type_list (tree return_type, ...)
7865 tree args;
7866 va_list p;
7868 va_start (p, return_type);
7869 args = build_function_type_list_1 (false, return_type, p);
7870 va_end (p);
7871 return args;
7874 /* Build a variable argument function type. The RETURN_TYPE is the
7875 type returned by the function. If additional arguments are provided,
7876 they are additional argument types. The list of argument types must
7877 always be terminated by NULL_TREE. */
7879 tree
7880 build_varargs_function_type_list (tree return_type, ...)
7882 tree args;
7883 va_list p;
7885 va_start (p, return_type);
7886 args = build_function_type_list_1 (true, return_type, p);
7887 va_end (p);
7889 return args;
7892 /* Build a function type. RETURN_TYPE is the type returned by the
7893 function; VAARGS indicates whether the function takes varargs. The
7894 function takes N named arguments, the types of which are provided in
7895 ARG_TYPES. */
7897 static tree
7898 build_function_type_array_1 (bool vaargs, tree return_type, int n,
7899 tree *arg_types)
7901 int i;
7902 tree t = vaargs ? NULL_TREE : void_list_node;
7904 for (i = n - 1; i >= 0; i--)
7905 t = tree_cons (NULL_TREE, arg_types[i], t);
7907 return build_function_type (return_type, t);
7910 /* Build a function type. RETURN_TYPE is the type returned by the
7911 function. The function takes N named arguments, the types of which
7912 are provided in ARG_TYPES. */
7914 tree
7915 build_function_type_array (tree return_type, int n, tree *arg_types)
7917 return build_function_type_array_1 (false, return_type, n, arg_types);
7920 /* Build a variable argument function type. RETURN_TYPE is the type
7921 returned by the function. The function takes N named arguments, the
7922 types of which are provided in ARG_TYPES. */
7924 tree
7925 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
7927 return build_function_type_array_1 (true, return_type, n, arg_types);
7930 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
7931 and ARGTYPES (a TREE_LIST) are the return type and arguments types
7932 for the method. An implicit additional parameter (of type
7933 pointer-to-BASETYPE) is added to the ARGTYPES. */
7935 tree
7936 build_method_type_directly (tree basetype,
7937 tree rettype,
7938 tree argtypes)
7940 tree t;
7941 tree ptype;
7942 bool any_structural_p, any_noncanonical_p;
7943 tree canon_argtypes;
7945 /* Make a node of the sort we want. */
7946 t = make_node (METHOD_TYPE);
7948 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7949 TREE_TYPE (t) = rettype;
7950 ptype = build_pointer_type (basetype);
7952 /* The actual arglist for this function includes a "hidden" argument
7953 which is "this". Put it into the list of argument types. */
7954 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
7955 TYPE_ARG_TYPES (t) = argtypes;
7957 /* If we already have such a type, use the old one. */
7958 hashval_t hash = type_hash_canon_hash (t);
7959 t = type_hash_canon (hash, t);
7961 /* Set up the canonical type. */
7962 any_structural_p
7963 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7964 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
7965 any_noncanonical_p
7966 = (TYPE_CANONICAL (basetype) != basetype
7967 || TYPE_CANONICAL (rettype) != rettype);
7968 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
7969 &any_structural_p,
7970 &any_noncanonical_p);
7971 if (any_structural_p)
7972 SET_TYPE_STRUCTURAL_EQUALITY (t);
7973 else if (any_noncanonical_p)
7974 TYPE_CANONICAL (t)
7975 = build_method_type_directly (TYPE_CANONICAL (basetype),
7976 TYPE_CANONICAL (rettype),
7977 canon_argtypes);
7978 if (!COMPLETE_TYPE_P (t))
7979 layout_type (t);
7981 return t;
7984 /* Construct, lay out and return the type of methods belonging to class
7985 BASETYPE and whose arguments and values are described by TYPE.
7986 If that type exists already, reuse it.
7987 TYPE must be a FUNCTION_TYPE node. */
7989 tree
7990 build_method_type (tree basetype, tree type)
7992 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
7994 return build_method_type_directly (basetype,
7995 TREE_TYPE (type),
7996 TYPE_ARG_TYPES (type));
7999 /* Construct, lay out and return the type of offsets to a value
8000 of type TYPE, within an object of type BASETYPE.
8001 If a suitable offset type exists already, reuse it. */
8003 tree
8004 build_offset_type (tree basetype, tree type)
8006 tree t;
8008 /* Make a node of the sort we want. */
8009 t = make_node (OFFSET_TYPE);
8011 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8012 TREE_TYPE (t) = type;
8014 /* If we already have such a type, use the old one. */
8015 hashval_t hash = type_hash_canon_hash (t);
8016 t = type_hash_canon (hash, t);
8018 if (!COMPLETE_TYPE_P (t))
8019 layout_type (t);
8021 if (TYPE_CANONICAL (t) == t)
8023 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8024 || TYPE_STRUCTURAL_EQUALITY_P (type))
8025 SET_TYPE_STRUCTURAL_EQUALITY (t);
8026 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8027 || TYPE_CANONICAL (type) != type)
8028 TYPE_CANONICAL (t)
8029 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8030 TYPE_CANONICAL (type));
8033 return t;
8036 /* Create a complex type whose components are COMPONENT_TYPE.
8038 If NAMED is true, the type is given a TYPE_NAME. We do not always
8039 do so because this creates a DECL node and thus make the DECL_UIDs
8040 dependent on the type canonicalization hashtable, which is GC-ed,
8041 so the DECL_UIDs would not be stable wrt garbage collection. */
8043 tree
8044 build_complex_type (tree component_type, bool named)
8046 gcc_assert (INTEGRAL_TYPE_P (component_type)
8047 || SCALAR_FLOAT_TYPE_P (component_type)
8048 || FIXED_POINT_TYPE_P (component_type));
8050 /* Make a node of the sort we want. */
8051 tree probe = make_node (COMPLEX_TYPE);
8053 TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
8055 /* If we already have such a type, use the old one. */
8056 hashval_t hash = type_hash_canon_hash (probe);
8057 tree t = type_hash_canon (hash, probe);
8059 if (t == probe)
8061 /* We created a new type. The hash insertion will have laid
8062 out the type. We need to check the canonicalization and
8063 maybe set the name. */
8064 gcc_checking_assert (COMPLETE_TYPE_P (t)
8065 && !TYPE_NAME (t)
8066 && TYPE_CANONICAL (t) == t);
8068 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
8069 SET_TYPE_STRUCTURAL_EQUALITY (t);
8070 else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
8071 TYPE_CANONICAL (t)
8072 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
8074 /* We need to create a name, since complex is a fundamental type. */
8075 if (named)
8077 const char *name = NULL;
8079 if (TREE_TYPE (t) == char_type_node)
8080 name = "complex char";
8081 else if (TREE_TYPE (t) == signed_char_type_node)
8082 name = "complex signed char";
8083 else if (TREE_TYPE (t) == unsigned_char_type_node)
8084 name = "complex unsigned char";
8085 else if (TREE_TYPE (t) == short_integer_type_node)
8086 name = "complex short int";
8087 else if (TREE_TYPE (t) == short_unsigned_type_node)
8088 name = "complex short unsigned int";
8089 else if (TREE_TYPE (t) == integer_type_node)
8090 name = "complex int";
8091 else if (TREE_TYPE (t) == unsigned_type_node)
8092 name = "complex unsigned int";
8093 else if (TREE_TYPE (t) == long_integer_type_node)
8094 name = "complex long int";
8095 else if (TREE_TYPE (t) == long_unsigned_type_node)
8096 name = "complex long unsigned int";
8097 else if (TREE_TYPE (t) == long_long_integer_type_node)
8098 name = "complex long long int";
8099 else if (TREE_TYPE (t) == long_long_unsigned_type_node)
8100 name = "complex long long unsigned int";
8102 if (name != NULL)
8103 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8104 get_identifier (name), t);
8108 return build_qualified_type (t, TYPE_QUALS (component_type));
8111 /* If TYPE is a real or complex floating-point type and the target
8112 does not directly support arithmetic on TYPE then return the wider
8113 type to be used for arithmetic on TYPE. Otherwise, return
8114 NULL_TREE. */
8116 tree
8117 excess_precision_type (tree type)
8119 /* The target can give two different responses to the question of
8120 which excess precision mode it would like depending on whether we
8121 are in -fexcess-precision=standard or -fexcess-precision=fast. */
8123 enum excess_precision_type requested_type
8124 = (flag_excess_precision == EXCESS_PRECISION_FAST
8125 ? EXCESS_PRECISION_TYPE_FAST
8126 : EXCESS_PRECISION_TYPE_STANDARD);
8128 enum flt_eval_method target_flt_eval_method
8129 = targetm.c.excess_precision (requested_type);
8131 /* The target should not ask for unpredictable float evaluation (though
8132 it might advertise that implicitly the evaluation is unpredictable,
8133 but we don't care about that here, it will have been reported
8134 elsewhere). If it does ask for unpredictable evaluation, we have
8135 nothing to do here. */
8136 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
8138 /* Nothing to do. The target has asked for all types we know about
8139 to be computed with their native precision and range. */
8140 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
8141 return NULL_TREE;
8143 /* The target will promote this type in a target-dependent way, so excess
8144 precision ought to leave it alone. */
8145 if (targetm.promoted_type (type) != NULL_TREE)
8146 return NULL_TREE;
8148 machine_mode float16_type_mode = (float16_type_node
8149 ? TYPE_MODE (float16_type_node)
8150 : VOIDmode);
8151 machine_mode float_type_mode = TYPE_MODE (float_type_node);
8152 machine_mode double_type_mode = TYPE_MODE (double_type_node);
8154 switch (TREE_CODE (type))
8156 case REAL_TYPE:
8158 machine_mode type_mode = TYPE_MODE (type);
8159 switch (target_flt_eval_method)
8161 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8162 if (type_mode == float16_type_mode)
8163 return float_type_node;
8164 break;
8165 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8166 if (type_mode == float16_type_mode
8167 || type_mode == float_type_mode)
8168 return double_type_node;
8169 break;
8170 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8171 if (type_mode == float16_type_mode
8172 || type_mode == float_type_mode
8173 || type_mode == double_type_mode)
8174 return long_double_type_node;
8175 break;
8176 default:
8177 gcc_unreachable ();
8179 break;
8181 case COMPLEX_TYPE:
8183 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8184 return NULL_TREE;
8185 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
8186 switch (target_flt_eval_method)
8188 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8189 if (type_mode == float16_type_mode)
8190 return complex_float_type_node;
8191 break;
8192 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8193 if (type_mode == float16_type_mode
8194 || type_mode == float_type_mode)
8195 return complex_double_type_node;
8196 break;
8197 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8198 if (type_mode == float16_type_mode
8199 || type_mode == float_type_mode
8200 || type_mode == double_type_mode)
8201 return complex_long_double_type_node;
8202 break;
8203 default:
8204 gcc_unreachable ();
8206 break;
8208 default:
8209 break;
8212 return NULL_TREE;
8215 /* Return OP, stripped of any conversions to wider types as much as is safe.
8216 Converting the value back to OP's type makes a value equivalent to OP.
8218 If FOR_TYPE is nonzero, we return a value which, if converted to
8219 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8221 OP must have integer, real or enumeral type. Pointers are not allowed!
8223 There are some cases where the obvious value we could return
8224 would regenerate to OP if converted to OP's type,
8225 but would not extend like OP to wider types.
8226 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8227 For example, if OP is (unsigned short)(signed char)-1,
8228 we avoid returning (signed char)-1 if FOR_TYPE is int,
8229 even though extending that to an unsigned short would regenerate OP,
8230 since the result of extending (signed char)-1 to (int)
8231 is different from (int) OP. */
8233 tree
8234 get_unwidened (tree op, tree for_type)
8236 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8237 tree type = TREE_TYPE (op);
8238 unsigned final_prec
8239 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8240 int uns
8241 = (for_type != 0 && for_type != type
8242 && final_prec > TYPE_PRECISION (type)
8243 && TYPE_UNSIGNED (type));
8244 tree win = op;
8246 while (CONVERT_EXPR_P (op))
8248 int bitschange;
8250 /* TYPE_PRECISION on vector types has different meaning
8251 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8252 so avoid them here. */
8253 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8254 break;
8256 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8257 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8259 /* Truncations are many-one so cannot be removed.
8260 Unless we are later going to truncate down even farther. */
8261 if (bitschange < 0
8262 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8263 break;
8265 /* See what's inside this conversion. If we decide to strip it,
8266 we will set WIN. */
8267 op = TREE_OPERAND (op, 0);
8269 /* If we have not stripped any zero-extensions (uns is 0),
8270 we can strip any kind of extension.
8271 If we have previously stripped a zero-extension,
8272 only zero-extensions can safely be stripped.
8273 Any extension can be stripped if the bits it would produce
8274 are all going to be discarded later by truncating to FOR_TYPE. */
8276 if (bitschange > 0)
8278 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8279 win = op;
8280 /* TYPE_UNSIGNED says whether this is a zero-extension.
8281 Let's avoid computing it if it does not affect WIN
8282 and if UNS will not be needed again. */
8283 if ((uns
8284 || CONVERT_EXPR_P (op))
8285 && TYPE_UNSIGNED (TREE_TYPE (op)))
8287 uns = 1;
8288 win = op;
8293 /* If we finally reach a constant see if it fits in sth smaller and
8294 in that case convert it. */
8295 if (TREE_CODE (win) == INTEGER_CST)
8297 tree wtype = TREE_TYPE (win);
8298 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
8299 if (for_type)
8300 prec = MAX (prec, final_prec);
8301 if (prec < TYPE_PRECISION (wtype))
8303 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
8304 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
8305 win = fold_convert (t, win);
8309 return win;
8312 /* Return OP or a simpler expression for a narrower value
8313 which can be sign-extended or zero-extended to give back OP.
8314 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8315 or 0 if the value should be sign-extended. */
8317 tree
8318 get_narrower (tree op, int *unsignedp_ptr)
8320 int uns = 0;
8321 int first = 1;
8322 tree win = op;
8323 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8325 while (TREE_CODE (op) == NOP_EXPR)
8327 int bitschange
8328 = (TYPE_PRECISION (TREE_TYPE (op))
8329 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8331 /* Truncations are many-one so cannot be removed. */
8332 if (bitschange < 0)
8333 break;
8335 /* See what's inside this conversion. If we decide to strip it,
8336 we will set WIN. */
8338 if (bitschange > 0)
8340 op = TREE_OPERAND (op, 0);
8341 /* An extension: the outermost one can be stripped,
8342 but remember whether it is zero or sign extension. */
8343 if (first)
8344 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8345 /* Otherwise, if a sign extension has been stripped,
8346 only sign extensions can now be stripped;
8347 if a zero extension has been stripped, only zero-extensions. */
8348 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8349 break;
8350 first = 0;
8352 else /* bitschange == 0 */
8354 /* A change in nominal type can always be stripped, but we must
8355 preserve the unsignedness. */
8356 if (first)
8357 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8358 first = 0;
8359 op = TREE_OPERAND (op, 0);
8360 /* Keep trying to narrow, but don't assign op to win if it
8361 would turn an integral type into something else. */
8362 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8363 continue;
8366 win = op;
8369 if (TREE_CODE (op) == COMPONENT_REF
8370 /* Since type_for_size always gives an integer type. */
8371 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8372 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8373 /* Ensure field is laid out already. */
8374 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8375 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8377 unsigned HOST_WIDE_INT innerprec
8378 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8379 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8380 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8381 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8383 /* We can get this structure field in a narrower type that fits it,
8384 but the resulting extension to its nominal type (a fullword type)
8385 must satisfy the same conditions as for other extensions.
8387 Do this only for fields that are aligned (not bit-fields),
8388 because when bit-field insns will be used there is no
8389 advantage in doing this. */
8391 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8392 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8393 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8394 && type != 0)
8396 if (first)
8397 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8398 win = fold_convert (type, op);
8402 *unsignedp_ptr = uns;
8403 return win;
8406 /* Return true if integer constant C has a value that is permissible
8407 for TYPE, an integral type. */
8409 bool
8410 int_fits_type_p (const_tree c, const_tree type)
8412 tree type_low_bound, type_high_bound;
8413 bool ok_for_low_bound, ok_for_high_bound;
8414 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8416 /* Non-standard boolean types can have arbitrary precision but various
8417 transformations assume that they can only take values 0 and +/-1. */
8418 if (TREE_CODE (type) == BOOLEAN_TYPE)
8419 return wi::fits_to_boolean_p (wi::to_wide (c), type);
8421 retry:
8422 type_low_bound = TYPE_MIN_VALUE (type);
8423 type_high_bound = TYPE_MAX_VALUE (type);
8425 /* If at least one bound of the type is a constant integer, we can check
8426 ourselves and maybe make a decision. If no such decision is possible, but
8427 this type is a subtype, try checking against that. Otherwise, use
8428 fits_to_tree_p, which checks against the precision.
8430 Compute the status for each possibly constant bound, and return if we see
8431 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8432 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8433 for "constant known to fit". */
8435 /* Check if c >= type_low_bound. */
8436 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8438 if (tree_int_cst_lt (c, type_low_bound))
8439 return false;
8440 ok_for_low_bound = true;
8442 else
8443 ok_for_low_bound = false;
8445 /* Check if c <= type_high_bound. */
8446 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8448 if (tree_int_cst_lt (type_high_bound, c))
8449 return false;
8450 ok_for_high_bound = true;
8452 else
8453 ok_for_high_bound = false;
8455 /* If the constant fits both bounds, the result is known. */
8456 if (ok_for_low_bound && ok_for_high_bound)
8457 return true;
8459 /* Perform some generic filtering which may allow making a decision
8460 even if the bounds are not constant. First, negative integers
8461 never fit in unsigned types, */
8462 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
8463 return false;
8465 /* Second, narrower types always fit in wider ones. */
8466 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8467 return true;
8469 /* Third, unsigned integers with top bit set never fit signed types. */
8470 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8472 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
8473 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8475 /* When a tree_cst is converted to a wide-int, the precision
8476 is taken from the type. However, if the precision of the
8477 mode underneath the type is smaller than that, it is
8478 possible that the value will not fit. The test below
8479 fails if any bit is set between the sign bit of the
8480 underlying mode and the top bit of the type. */
8481 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
8482 return false;
8484 else if (wi::neg_p (wi::to_wide (c)))
8485 return false;
8488 /* If we haven't been able to decide at this point, there nothing more we
8489 can check ourselves here. Look at the base type if we have one and it
8490 has the same precision. */
8491 if (TREE_CODE (type) == INTEGER_TYPE
8492 && TREE_TYPE (type) != 0
8493 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8495 type = TREE_TYPE (type);
8496 goto retry;
8499 /* Or to fits_to_tree_p, if nothing else. */
8500 return wi::fits_to_tree_p (wi::to_wide (c), type);
8503 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8504 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8505 represented (assuming two's-complement arithmetic) within the bit
8506 precision of the type are returned instead. */
8508 void
8509 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8511 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8512 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8513 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
8514 else
8516 if (TYPE_UNSIGNED (type))
8517 mpz_set_ui (min, 0);
8518 else
8520 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8521 wi::to_mpz (mn, min, SIGNED);
8525 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8526 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8527 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
8528 else
8530 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8531 wi::to_mpz (mn, max, TYPE_SIGN (type));
8535 /* Return true if VAR is an automatic variable defined in function FN. */
8537 bool
8538 auto_var_in_fn_p (const_tree var, const_tree fn)
8540 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8541 && ((((VAR_P (var) && ! DECL_EXTERNAL (var))
8542 || TREE_CODE (var) == PARM_DECL)
8543 && ! TREE_STATIC (var))
8544 || TREE_CODE (var) == LABEL_DECL
8545 || TREE_CODE (var) == RESULT_DECL));
8548 /* Subprogram of following function. Called by walk_tree.
8550 Return *TP if it is an automatic variable or parameter of the
8551 function passed in as DATA. */
8553 static tree
8554 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8556 tree fn = (tree) data;
8558 if (TYPE_P (*tp))
8559 *walk_subtrees = 0;
8561 else if (DECL_P (*tp)
8562 && auto_var_in_fn_p (*tp, fn))
8563 return *tp;
8565 return NULL_TREE;
8568 /* Returns true if T is, contains, or refers to a type with variable
8569 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8570 arguments, but not the return type. If FN is nonzero, only return
8571 true if a modifier of the type or position of FN is a variable or
8572 parameter inside FN.
8574 This concept is more general than that of C99 'variably modified types':
8575 in C99, a struct type is never variably modified because a VLA may not
8576 appear as a structure member. However, in GNU C code like:
8578 struct S { int i[f()]; };
8580 is valid, and other languages may define similar constructs. */
8582 bool
8583 variably_modified_type_p (tree type, tree fn)
8585 tree t;
8587 /* Test if T is either variable (if FN is zero) or an expression containing
8588 a variable in FN. If TYPE isn't gimplified, return true also if
8589 gimplify_one_sizepos would gimplify the expression into a local
8590 variable. */
8591 #define RETURN_TRUE_IF_VAR(T) \
8592 do { tree _t = (T); \
8593 if (_t != NULL_TREE \
8594 && _t != error_mark_node \
8595 && TREE_CODE (_t) != INTEGER_CST \
8596 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8597 && (!fn \
8598 || (!TYPE_SIZES_GIMPLIFIED (type) \
8599 && !is_gimple_sizepos (_t)) \
8600 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8601 return true; } while (0)
8603 if (type == error_mark_node)
8604 return false;
8606 /* If TYPE itself has variable size, it is variably modified. */
8607 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8608 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8610 switch (TREE_CODE (type))
8612 case POINTER_TYPE:
8613 case REFERENCE_TYPE:
8614 case VECTOR_TYPE:
8615 /* Ada can have pointer types refering to themselves indirectly. */
8616 if (TREE_VISITED (type))
8617 return false;
8618 TREE_VISITED (type) = true;
8619 if (variably_modified_type_p (TREE_TYPE (type), fn))
8621 TREE_VISITED (type) = false;
8622 return true;
8624 TREE_VISITED (type) = false;
8625 break;
8627 case FUNCTION_TYPE:
8628 case METHOD_TYPE:
8629 /* If TYPE is a function type, it is variably modified if the
8630 return type is variably modified. */
8631 if (variably_modified_type_p (TREE_TYPE (type), fn))
8632 return true;
8633 break;
8635 case INTEGER_TYPE:
8636 case REAL_TYPE:
8637 case FIXED_POINT_TYPE:
8638 case ENUMERAL_TYPE:
8639 case BOOLEAN_TYPE:
8640 /* Scalar types are variably modified if their end points
8641 aren't constant. */
8642 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8643 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8644 break;
8646 case RECORD_TYPE:
8647 case UNION_TYPE:
8648 case QUAL_UNION_TYPE:
8649 /* We can't see if any of the fields are variably-modified by the
8650 definition we normally use, since that would produce infinite
8651 recursion via pointers. */
8652 /* This is variably modified if some field's type is. */
8653 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8654 if (TREE_CODE (t) == FIELD_DECL)
8656 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8657 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8658 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8660 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8661 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8663 break;
8665 case ARRAY_TYPE:
8666 /* Do not call ourselves to avoid infinite recursion. This is
8667 variably modified if the element type is. */
8668 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8669 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8670 break;
8672 default:
8673 break;
8676 /* The current language may have other cases to check, but in general,
8677 all other types are not variably modified. */
8678 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8680 #undef RETURN_TRUE_IF_VAR
8683 /* Given a DECL or TYPE, return the scope in which it was declared, or
8684 NULL_TREE if there is no containing scope. */
8686 tree
8687 get_containing_scope (const_tree t)
8689 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8692 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
8694 const_tree
8695 get_ultimate_context (const_tree decl)
8697 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
8699 if (TREE_CODE (decl) == BLOCK)
8700 decl = BLOCK_SUPERCONTEXT (decl);
8701 else
8702 decl = get_containing_scope (decl);
8704 return decl;
8707 /* Return the innermost context enclosing DECL that is
8708 a FUNCTION_DECL, or zero if none. */
8710 tree
8711 decl_function_context (const_tree decl)
8713 tree context;
8715 if (TREE_CODE (decl) == ERROR_MARK)
8716 return 0;
8718 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8719 where we look up the function at runtime. Such functions always take
8720 a first argument of type 'pointer to real context'.
8722 C++ should really be fixed to use DECL_CONTEXT for the real context,
8723 and use something else for the "virtual context". */
8724 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8725 context
8726 = TYPE_MAIN_VARIANT
8727 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8728 else
8729 context = DECL_CONTEXT (decl);
8731 while (context && TREE_CODE (context) != FUNCTION_DECL)
8733 if (TREE_CODE (context) == BLOCK)
8734 context = BLOCK_SUPERCONTEXT (context);
8735 else
8736 context = get_containing_scope (context);
8739 return context;
8742 /* Return the innermost context enclosing DECL that is
8743 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8744 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8746 tree
8747 decl_type_context (const_tree decl)
8749 tree context = DECL_CONTEXT (decl);
8751 while (context)
8752 switch (TREE_CODE (context))
8754 case NAMESPACE_DECL:
8755 case TRANSLATION_UNIT_DECL:
8756 return NULL_TREE;
8758 case RECORD_TYPE:
8759 case UNION_TYPE:
8760 case QUAL_UNION_TYPE:
8761 return context;
8763 case TYPE_DECL:
8764 case FUNCTION_DECL:
8765 context = DECL_CONTEXT (context);
8766 break;
8768 case BLOCK:
8769 context = BLOCK_SUPERCONTEXT (context);
8770 break;
8772 default:
8773 gcc_unreachable ();
8776 return NULL_TREE;
8779 /* CALL is a CALL_EXPR. Return the declaration for the function
8780 called, or NULL_TREE if the called function cannot be
8781 determined. */
8783 tree
8784 get_callee_fndecl (const_tree call)
8786 tree addr;
8788 if (call == error_mark_node)
8789 return error_mark_node;
8791 /* It's invalid to call this function with anything but a
8792 CALL_EXPR. */
8793 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8795 /* The first operand to the CALL is the address of the function
8796 called. */
8797 addr = CALL_EXPR_FN (call);
8799 /* If there is no function, return early. */
8800 if (addr == NULL_TREE)
8801 return NULL_TREE;
8803 STRIP_NOPS (addr);
8805 /* If this is a readonly function pointer, extract its initial value. */
8806 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8807 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8808 && DECL_INITIAL (addr))
8809 addr = DECL_INITIAL (addr);
8811 /* If the address is just `&f' for some function `f', then we know
8812 that `f' is being called. */
8813 if (TREE_CODE (addr) == ADDR_EXPR
8814 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8815 return TREE_OPERAND (addr, 0);
8817 /* We couldn't figure out what was being called. */
8818 return NULL_TREE;
8821 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
8822 return the associated function code, otherwise return CFN_LAST. */
8824 combined_fn
8825 get_call_combined_fn (const_tree call)
8827 /* It's invalid to call this function with anything but a CALL_EXPR. */
8828 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8830 if (!CALL_EXPR_FN (call))
8831 return as_combined_fn (CALL_EXPR_IFN (call));
8833 tree fndecl = get_callee_fndecl (call);
8834 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8835 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
8837 return CFN_LAST;
8840 #define TREE_MEM_USAGE_SPACES 40
8842 /* Print debugging information about tree nodes generated during the compile,
8843 and any language-specific information. */
8845 void
8846 dump_tree_statistics (void)
8848 if (GATHER_STATISTICS)
8850 int i;
8851 int total_nodes, total_bytes;
8852 fprintf (stderr, "\nKind Nodes Bytes\n");
8853 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8854 total_nodes = total_bytes = 0;
8855 for (i = 0; i < (int) all_kinds; i++)
8857 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
8858 tree_node_counts[i], tree_node_sizes[i]);
8859 total_nodes += tree_node_counts[i];
8860 total_bytes += tree_node_sizes[i];
8862 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8863 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
8864 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8865 fprintf (stderr, "Code Nodes\n");
8866 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8867 for (i = 0; i < (int) MAX_TREE_CODES; i++)
8868 fprintf (stderr, "%-32s %7d\n", get_tree_code_name ((enum tree_code) i),
8869 tree_code_counts[i]);
8870 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8871 fprintf (stderr, "\n");
8872 ssanames_print_statistics ();
8873 fprintf (stderr, "\n");
8874 phinodes_print_statistics ();
8875 fprintf (stderr, "\n");
8877 else
8878 fprintf (stderr, "(No per-node statistics)\n");
8880 print_type_hash_statistics ();
8881 print_debug_expr_statistics ();
8882 print_value_expr_statistics ();
8883 lang_hooks.print_statistics ();
8886 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8888 /* Generate a crc32 of the low BYTES bytes of VALUE. */
8890 unsigned
8891 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
8893 /* This relies on the raw feedback's top 4 bits being zero. */
8894 #define FEEDBACK(X) ((X) * 0x04c11db7)
8895 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
8896 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
8897 static const unsigned syndromes[16] =
8899 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
8900 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
8901 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
8902 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
8904 #undef FEEDBACK
8905 #undef SYNDROME
8907 value <<= (32 - bytes * 8);
8908 for (unsigned ix = bytes * 2; ix--; value <<= 4)
8910 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
8912 chksum = (chksum << 4) ^ feedback;
8915 return chksum;
8918 /* Generate a crc32 of a string. */
8920 unsigned
8921 crc32_string (unsigned chksum, const char *string)
8924 chksum = crc32_byte (chksum, *string);
8925 while (*string++);
8926 return chksum;
8929 /* P is a string that will be used in a symbol. Mask out any characters
8930 that are not valid in that context. */
8932 void
8933 clean_symbol_name (char *p)
8935 for (; *p; p++)
8936 if (! (ISALNUM (*p)
8937 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8938 || *p == '$'
8939 #endif
8940 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
8941 || *p == '.'
8942 #endif
8944 *p = '_';
8947 /* For anonymous aggregate types, we need some sort of name to
8948 hold on to. In practice, this should not appear, but it should
8949 not be harmful if it does. */
8950 bool
8951 anon_aggrname_p(const_tree id_node)
8953 #ifndef NO_DOT_IN_LABEL
8954 return (IDENTIFIER_POINTER (id_node)[0] == '.'
8955 && IDENTIFIER_POINTER (id_node)[1] == '_');
8956 #else /* NO_DOT_IN_LABEL */
8957 #ifndef NO_DOLLAR_IN_LABEL
8958 return (IDENTIFIER_POINTER (id_node)[0] == '$' \
8959 && IDENTIFIER_POINTER (id_node)[1] == '_');
8960 #else /* NO_DOLLAR_IN_LABEL */
8961 #define ANON_AGGRNAME_PREFIX "__anon_"
8962 return (!strncmp (IDENTIFIER_POINTER (id_node), ANON_AGGRNAME_PREFIX,
8963 sizeof (ANON_AGGRNAME_PREFIX) - 1));
8964 #endif /* NO_DOLLAR_IN_LABEL */
8965 #endif /* NO_DOT_IN_LABEL */
8968 /* Return a format for an anonymous aggregate name. */
8969 const char *
8970 anon_aggrname_format()
8972 #ifndef NO_DOT_IN_LABEL
8973 return "._%d";
8974 #else /* NO_DOT_IN_LABEL */
8975 #ifndef NO_DOLLAR_IN_LABEL
8976 return "$_%d";
8977 #else /* NO_DOLLAR_IN_LABEL */
8978 return "__anon_%d";
8979 #endif /* NO_DOLLAR_IN_LABEL */
8980 #endif /* NO_DOT_IN_LABEL */
8983 /* Generate a name for a special-purpose function.
8984 The generated name may need to be unique across the whole link.
8985 Changes to this function may also require corresponding changes to
8986 xstrdup_mask_random.
8987 TYPE is some string to identify the purpose of this function to the
8988 linker or collect2; it must start with an uppercase letter,
8989 one of:
8990 I - for constructors
8991 D - for destructors
8992 N - for C++ anonymous namespaces
8993 F - for DWARF unwind frame information. */
8995 tree
8996 get_file_function_name (const char *type)
8998 char *buf;
8999 const char *p;
9000 char *q;
9002 /* If we already have a name we know to be unique, just use that. */
9003 if (first_global_object_name)
9004 p = q = ASTRDUP (first_global_object_name);
9005 /* If the target is handling the constructors/destructors, they
9006 will be local to this file and the name is only necessary for
9007 debugging purposes.
9008 We also assign sub_I and sub_D sufixes to constructors called from
9009 the global static constructors. These are always local. */
9010 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9011 || (strncmp (type, "sub_", 4) == 0
9012 && (type[4] == 'I' || type[4] == 'D')))
9014 const char *file = main_input_filename;
9015 if (! file)
9016 file = LOCATION_FILE (input_location);
9017 /* Just use the file's basename, because the full pathname
9018 might be quite long. */
9019 p = q = ASTRDUP (lbasename (file));
9021 else
9023 /* Otherwise, the name must be unique across the entire link.
9024 We don't have anything that we know to be unique to this translation
9025 unit, so use what we do have and throw in some randomness. */
9026 unsigned len;
9027 const char *name = weak_global_object_name;
9028 const char *file = main_input_filename;
9030 if (! name)
9031 name = "";
9032 if (! file)
9033 file = LOCATION_FILE (input_location);
9035 len = strlen (file);
9036 q = (char *) alloca (9 + 19 + len + 1);
9037 memcpy (q, file, len + 1);
9039 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9040 crc32_string (0, name), get_random_seed (false));
9042 p = q;
9045 clean_symbol_name (q);
9046 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9047 + strlen (type));
9049 /* Set up the name of the file-level functions we may need.
9050 Use a global object (which is already required to be unique over
9051 the program) rather than the file name (which imposes extra
9052 constraints). */
9053 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9055 return get_identifier (buf);
9058 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9060 /* Complain that the tree code of NODE does not match the expected 0
9061 terminated list of trailing codes. The trailing code list can be
9062 empty, for a more vague error message. FILE, LINE, and FUNCTION
9063 are of the caller. */
9065 void
9066 tree_check_failed (const_tree node, const char *file,
9067 int line, const char *function, ...)
9069 va_list args;
9070 const char *buffer;
9071 unsigned length = 0;
9072 enum tree_code code;
9074 va_start (args, function);
9075 while ((code = (enum tree_code) va_arg (args, int)))
9076 length += 4 + strlen (get_tree_code_name (code));
9077 va_end (args);
9078 if (length)
9080 char *tmp;
9081 va_start (args, function);
9082 length += strlen ("expected ");
9083 buffer = tmp = (char *) alloca (length);
9084 length = 0;
9085 while ((code = (enum tree_code) va_arg (args, int)))
9087 const char *prefix = length ? " or " : "expected ";
9089 strcpy (tmp + length, prefix);
9090 length += strlen (prefix);
9091 strcpy (tmp + length, get_tree_code_name (code));
9092 length += strlen (get_tree_code_name (code));
9094 va_end (args);
9096 else
9097 buffer = "unexpected node";
9099 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9100 buffer, get_tree_code_name (TREE_CODE (node)),
9101 function, trim_filename (file), line);
9104 /* Complain that the tree code of NODE does match the expected 0
9105 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9106 the caller. */
9108 void
9109 tree_not_check_failed (const_tree node, const char *file,
9110 int line, const char *function, ...)
9112 va_list args;
9113 char *buffer;
9114 unsigned length = 0;
9115 enum tree_code code;
9117 va_start (args, function);
9118 while ((code = (enum tree_code) va_arg (args, int)))
9119 length += 4 + strlen (get_tree_code_name (code));
9120 va_end (args);
9121 va_start (args, function);
9122 buffer = (char *) alloca (length);
9123 length = 0;
9124 while ((code = (enum tree_code) va_arg (args, int)))
9126 if (length)
9128 strcpy (buffer + length, " or ");
9129 length += 4;
9131 strcpy (buffer + length, get_tree_code_name (code));
9132 length += strlen (get_tree_code_name (code));
9134 va_end (args);
9136 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9137 buffer, get_tree_code_name (TREE_CODE (node)),
9138 function, trim_filename (file), line);
9141 /* Similar to tree_check_failed, except that we check for a class of tree
9142 code, given in CL. */
9144 void
9145 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9146 const char *file, int line, const char *function)
9148 internal_error
9149 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9150 TREE_CODE_CLASS_STRING (cl),
9151 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9152 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9155 /* Similar to tree_check_failed, except that instead of specifying a
9156 dozen codes, use the knowledge that they're all sequential. */
9158 void
9159 tree_range_check_failed (const_tree node, const char *file, int line,
9160 const char *function, enum tree_code c1,
9161 enum tree_code c2)
9163 char *buffer;
9164 unsigned length = 0;
9165 unsigned int c;
9167 for (c = c1; c <= c2; ++c)
9168 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9170 length += strlen ("expected ");
9171 buffer = (char *) alloca (length);
9172 length = 0;
9174 for (c = c1; c <= c2; ++c)
9176 const char *prefix = length ? " or " : "expected ";
9178 strcpy (buffer + length, prefix);
9179 length += strlen (prefix);
9180 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9181 length += strlen (get_tree_code_name ((enum tree_code) c));
9184 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9185 buffer, get_tree_code_name (TREE_CODE (node)),
9186 function, trim_filename (file), line);
9190 /* Similar to tree_check_failed, except that we check that a tree does
9191 not have the specified code, given in CL. */
9193 void
9194 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9195 const char *file, int line, const char *function)
9197 internal_error
9198 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9199 TREE_CODE_CLASS_STRING (cl),
9200 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9201 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9205 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9207 void
9208 omp_clause_check_failed (const_tree node, const char *file, int line,
9209 const char *function, enum omp_clause_code code)
9211 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9212 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9213 function, trim_filename (file), line);
9217 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9219 void
9220 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9221 const char *function, enum omp_clause_code c1,
9222 enum omp_clause_code c2)
9224 char *buffer;
9225 unsigned length = 0;
9226 unsigned int c;
9228 for (c = c1; c <= c2; ++c)
9229 length += 4 + strlen (omp_clause_code_name[c]);
9231 length += strlen ("expected ");
9232 buffer = (char *) alloca (length);
9233 length = 0;
9235 for (c = c1; c <= c2; ++c)
9237 const char *prefix = length ? " or " : "expected ";
9239 strcpy (buffer + length, prefix);
9240 length += strlen (prefix);
9241 strcpy (buffer + length, omp_clause_code_name[c]);
9242 length += strlen (omp_clause_code_name[c]);
9245 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9246 buffer, omp_clause_code_name[TREE_CODE (node)],
9247 function, trim_filename (file), line);
9251 #undef DEFTREESTRUCT
9252 #define DEFTREESTRUCT(VAL, NAME) NAME,
9254 static const char *ts_enum_names[] = {
9255 #include "treestruct.def"
9257 #undef DEFTREESTRUCT
9259 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9261 /* Similar to tree_class_check_failed, except that we check for
9262 whether CODE contains the tree structure identified by EN. */
9264 void
9265 tree_contains_struct_check_failed (const_tree node,
9266 const enum tree_node_structure_enum en,
9267 const char *file, int line,
9268 const char *function)
9270 internal_error
9271 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9272 TS_ENUM_NAME (en),
9273 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9277 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9278 (dynamically sized) vector. */
9280 void
9281 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9282 const char *function)
9284 internal_error
9285 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9286 idx + 1, len, function, trim_filename (file), line);
9289 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9290 (dynamically sized) vector. */
9292 void
9293 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9294 const char *function)
9296 internal_error
9297 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9298 idx + 1, len, function, trim_filename (file), line);
9301 /* Similar to above, except that the check is for the bounds of the operand
9302 vector of an expression node EXP. */
9304 void
9305 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9306 int line, const char *function)
9308 enum tree_code code = TREE_CODE (exp);
9309 internal_error
9310 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9311 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9312 function, trim_filename (file), line);
9315 /* Similar to above, except that the check is for the number of
9316 operands of an OMP_CLAUSE node. */
9318 void
9319 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9320 int line, const char *function)
9322 internal_error
9323 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9324 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9325 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9326 trim_filename (file), line);
9328 #endif /* ENABLE_TREE_CHECKING */
9330 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9331 and mapped to the machine mode MODE. Initialize its fields and build
9332 the information necessary for debugging output. */
9334 static tree
9335 make_vector_type (tree innertype, int nunits, machine_mode mode)
9337 tree t;
9338 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9340 t = make_node (VECTOR_TYPE);
9341 TREE_TYPE (t) = mv_innertype;
9342 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9343 SET_TYPE_MODE (t, mode);
9345 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9346 SET_TYPE_STRUCTURAL_EQUALITY (t);
9347 else if ((TYPE_CANONICAL (mv_innertype) != innertype
9348 || mode != VOIDmode)
9349 && !VECTOR_BOOLEAN_TYPE_P (t))
9350 TYPE_CANONICAL (t)
9351 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
9353 layout_type (t);
9355 hashval_t hash = type_hash_canon_hash (t);
9356 t = type_hash_canon (hash, t);
9358 /* We have built a main variant, based on the main variant of the
9359 inner type. Use it to build the variant we return. */
9360 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9361 && TREE_TYPE (t) != innertype)
9362 return build_type_attribute_qual_variant (t,
9363 TYPE_ATTRIBUTES (innertype),
9364 TYPE_QUALS (innertype));
9366 return t;
9369 static tree
9370 make_or_reuse_type (unsigned size, int unsignedp)
9372 int i;
9374 if (size == INT_TYPE_SIZE)
9375 return unsignedp ? unsigned_type_node : integer_type_node;
9376 if (size == CHAR_TYPE_SIZE)
9377 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9378 if (size == SHORT_TYPE_SIZE)
9379 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9380 if (size == LONG_TYPE_SIZE)
9381 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9382 if (size == LONG_LONG_TYPE_SIZE)
9383 return (unsignedp ? long_long_unsigned_type_node
9384 : long_long_integer_type_node);
9386 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9387 if (size == int_n_data[i].bitsize
9388 && int_n_enabled_p[i])
9389 return (unsignedp ? int_n_trees[i].unsigned_type
9390 : int_n_trees[i].signed_type);
9392 if (unsignedp)
9393 return make_unsigned_type (size);
9394 else
9395 return make_signed_type (size);
9398 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9400 static tree
9401 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9403 if (satp)
9405 if (size == SHORT_FRACT_TYPE_SIZE)
9406 return unsignedp ? sat_unsigned_short_fract_type_node
9407 : sat_short_fract_type_node;
9408 if (size == FRACT_TYPE_SIZE)
9409 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9410 if (size == LONG_FRACT_TYPE_SIZE)
9411 return unsignedp ? sat_unsigned_long_fract_type_node
9412 : sat_long_fract_type_node;
9413 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9414 return unsignedp ? sat_unsigned_long_long_fract_type_node
9415 : sat_long_long_fract_type_node;
9417 else
9419 if (size == SHORT_FRACT_TYPE_SIZE)
9420 return unsignedp ? unsigned_short_fract_type_node
9421 : short_fract_type_node;
9422 if (size == FRACT_TYPE_SIZE)
9423 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9424 if (size == LONG_FRACT_TYPE_SIZE)
9425 return unsignedp ? unsigned_long_fract_type_node
9426 : long_fract_type_node;
9427 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9428 return unsignedp ? unsigned_long_long_fract_type_node
9429 : long_long_fract_type_node;
9432 return make_fract_type (size, unsignedp, satp);
9435 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9437 static tree
9438 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9440 if (satp)
9442 if (size == SHORT_ACCUM_TYPE_SIZE)
9443 return unsignedp ? sat_unsigned_short_accum_type_node
9444 : sat_short_accum_type_node;
9445 if (size == ACCUM_TYPE_SIZE)
9446 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9447 if (size == LONG_ACCUM_TYPE_SIZE)
9448 return unsignedp ? sat_unsigned_long_accum_type_node
9449 : sat_long_accum_type_node;
9450 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9451 return unsignedp ? sat_unsigned_long_long_accum_type_node
9452 : sat_long_long_accum_type_node;
9454 else
9456 if (size == SHORT_ACCUM_TYPE_SIZE)
9457 return unsignedp ? unsigned_short_accum_type_node
9458 : short_accum_type_node;
9459 if (size == ACCUM_TYPE_SIZE)
9460 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9461 if (size == LONG_ACCUM_TYPE_SIZE)
9462 return unsignedp ? unsigned_long_accum_type_node
9463 : long_accum_type_node;
9464 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9465 return unsignedp ? unsigned_long_long_accum_type_node
9466 : long_long_accum_type_node;
9469 return make_accum_type (size, unsignedp, satp);
9473 /* Create an atomic variant node for TYPE. This routine is called
9474 during initialization of data types to create the 5 basic atomic
9475 types. The generic build_variant_type function requires these to
9476 already be set up in order to function properly, so cannot be
9477 called from there. If ALIGN is non-zero, then ensure alignment is
9478 overridden to this value. */
9480 static tree
9481 build_atomic_base (tree type, unsigned int align)
9483 tree t;
9485 /* Make sure its not already registered. */
9486 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9487 return t;
9489 t = build_variant_type_copy (type);
9490 set_type_quals (t, TYPE_QUAL_ATOMIC);
9492 if (align)
9493 SET_TYPE_ALIGN (t, align);
9495 return t;
9498 /* Information about the _FloatN and _FloatNx types. This must be in
9499 the same order as the corresponding TI_* enum values. */
9500 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
9502 { 16, false },
9503 { 32, false },
9504 { 64, false },
9505 { 128, false },
9506 { 32, true },
9507 { 64, true },
9508 { 128, true },
9512 /* Create nodes for all integer types (and error_mark_node) using the sizes
9513 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9515 void
9516 build_common_tree_nodes (bool signed_char)
9518 int i;
9520 error_mark_node = make_node (ERROR_MARK);
9521 TREE_TYPE (error_mark_node) = error_mark_node;
9523 initialize_sizetypes ();
9525 /* Define both `signed char' and `unsigned char'. */
9526 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9527 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9528 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9529 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9531 /* Define `char', which is like either `signed char' or `unsigned char'
9532 but not the same as either. */
9533 char_type_node
9534 = (signed_char
9535 ? make_signed_type (CHAR_TYPE_SIZE)
9536 : make_unsigned_type (CHAR_TYPE_SIZE));
9537 TYPE_STRING_FLAG (char_type_node) = 1;
9539 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9540 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9541 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9542 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9543 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9544 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9545 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9546 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9548 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9550 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9551 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9552 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
9553 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
9555 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
9556 && int_n_enabled_p[i])
9558 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9559 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9563 /* Define a boolean type. This type only represents boolean values but
9564 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9565 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9566 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9567 TYPE_PRECISION (boolean_type_node) = 1;
9568 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9570 /* Define what type to use for size_t. */
9571 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9572 size_type_node = unsigned_type_node;
9573 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9574 size_type_node = long_unsigned_type_node;
9575 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9576 size_type_node = long_long_unsigned_type_node;
9577 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9578 size_type_node = short_unsigned_type_node;
9579 else
9581 int i;
9583 size_type_node = NULL_TREE;
9584 for (i = 0; i < NUM_INT_N_ENTS; i++)
9585 if (int_n_enabled_p[i])
9587 char name[50];
9588 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9590 if (strcmp (name, SIZE_TYPE) == 0)
9592 size_type_node = int_n_trees[i].unsigned_type;
9595 if (size_type_node == NULL_TREE)
9596 gcc_unreachable ();
9599 /* Define what type to use for ptrdiff_t. */
9600 if (strcmp (PTRDIFF_TYPE, "int") == 0)
9601 ptrdiff_type_node = integer_type_node;
9602 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
9603 ptrdiff_type_node = long_integer_type_node;
9604 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
9605 ptrdiff_type_node = long_long_integer_type_node;
9606 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
9607 ptrdiff_type_node = short_integer_type_node;
9608 else
9610 ptrdiff_type_node = NULL_TREE;
9611 for (int i = 0; i < NUM_INT_N_ENTS; i++)
9612 if (int_n_enabled_p[i])
9614 char name[50];
9615 sprintf (name, "__int%d", int_n_data[i].bitsize);
9616 if (strcmp (name, PTRDIFF_TYPE) == 0)
9617 ptrdiff_type_node = int_n_trees[i].signed_type;
9619 if (ptrdiff_type_node == NULL_TREE)
9620 gcc_unreachable ();
9623 /* Fill in the rest of the sized types. Reuse existing type nodes
9624 when possible. */
9625 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9626 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9627 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9628 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9629 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9631 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9632 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9633 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9634 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9635 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9637 /* Don't call build_qualified type for atomics. That routine does
9638 special processing for atomics, and until they are initialized
9639 it's better not to make that call.
9641 Check to see if there is a target override for atomic types. */
9643 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9644 targetm.atomic_align_for_mode (QImode));
9645 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9646 targetm.atomic_align_for_mode (HImode));
9647 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9648 targetm.atomic_align_for_mode (SImode));
9649 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9650 targetm.atomic_align_for_mode (DImode));
9651 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9652 targetm.atomic_align_for_mode (TImode));
9654 access_public_node = get_identifier ("public");
9655 access_protected_node = get_identifier ("protected");
9656 access_private_node = get_identifier ("private");
9658 /* Define these next since types below may used them. */
9659 integer_zero_node = build_int_cst (integer_type_node, 0);
9660 integer_one_node = build_int_cst (integer_type_node, 1);
9661 integer_three_node = build_int_cst (integer_type_node, 3);
9662 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9664 size_zero_node = size_int (0);
9665 size_one_node = size_int (1);
9666 bitsize_zero_node = bitsize_int (0);
9667 bitsize_one_node = bitsize_int (1);
9668 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9670 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9671 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9673 void_type_node = make_node (VOID_TYPE);
9674 layout_type (void_type_node);
9676 pointer_bounds_type_node = targetm.chkp_bound_type ();
9678 /* We are not going to have real types in C with less than byte alignment,
9679 so we might as well not have any types that claim to have it. */
9680 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
9681 TYPE_USER_ALIGN (void_type_node) = 0;
9683 void_node = make_node (VOID_CST);
9684 TREE_TYPE (void_node) = void_type_node;
9686 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9687 layout_type (TREE_TYPE (null_pointer_node));
9689 ptr_type_node = build_pointer_type (void_type_node);
9690 const_ptr_type_node
9691 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9692 for (unsigned i = 0;
9693 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
9694 ++i)
9695 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
9697 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9699 float_type_node = make_node (REAL_TYPE);
9700 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9701 layout_type (float_type_node);
9703 double_type_node = make_node (REAL_TYPE);
9704 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9705 layout_type (double_type_node);
9707 long_double_type_node = make_node (REAL_TYPE);
9708 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9709 layout_type (long_double_type_node);
9711 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9713 int n = floatn_nx_types[i].n;
9714 bool extended = floatn_nx_types[i].extended;
9715 scalar_float_mode mode;
9716 if (!targetm.floatn_mode (n, extended).exists (&mode))
9717 continue;
9718 int precision = GET_MODE_PRECISION (mode);
9719 /* Work around the rs6000 KFmode having precision 113 not
9720 128. */
9721 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
9722 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
9723 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
9724 if (!extended)
9725 gcc_assert (min_precision == n);
9726 if (precision < min_precision)
9727 precision = min_precision;
9728 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
9729 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
9730 layout_type (FLOATN_NX_TYPE_NODE (i));
9731 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
9734 float_ptr_type_node = build_pointer_type (float_type_node);
9735 double_ptr_type_node = build_pointer_type (double_type_node);
9736 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9737 integer_ptr_type_node = build_pointer_type (integer_type_node);
9739 /* Fixed size integer types. */
9740 uint16_type_node = make_or_reuse_type (16, 1);
9741 uint32_type_node = make_or_reuse_type (32, 1);
9742 uint64_type_node = make_or_reuse_type (64, 1);
9744 /* Decimal float types. */
9745 dfloat32_type_node = make_node (REAL_TYPE);
9746 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9747 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9748 layout_type (dfloat32_type_node);
9749 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9751 dfloat64_type_node = make_node (REAL_TYPE);
9752 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9753 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9754 layout_type (dfloat64_type_node);
9755 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9757 dfloat128_type_node = make_node (REAL_TYPE);
9758 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9759 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9760 layout_type (dfloat128_type_node);
9761 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9763 complex_integer_type_node = build_complex_type (integer_type_node, true);
9764 complex_float_type_node = build_complex_type (float_type_node, true);
9765 complex_double_type_node = build_complex_type (double_type_node, true);
9766 complex_long_double_type_node = build_complex_type (long_double_type_node,
9767 true);
9769 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9771 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
9772 COMPLEX_FLOATN_NX_TYPE_NODE (i)
9773 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
9776 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9777 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9778 sat_ ## KIND ## _type_node = \
9779 make_sat_signed_ ## KIND ## _type (SIZE); \
9780 sat_unsigned_ ## KIND ## _type_node = \
9781 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9782 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9783 unsigned_ ## KIND ## _type_node = \
9784 make_unsigned_ ## KIND ## _type (SIZE);
9786 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9787 sat_ ## WIDTH ## KIND ## _type_node = \
9788 make_sat_signed_ ## KIND ## _type (SIZE); \
9789 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9790 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9791 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9792 unsigned_ ## WIDTH ## KIND ## _type_node = \
9793 make_unsigned_ ## KIND ## _type (SIZE);
9795 /* Make fixed-point type nodes based on four different widths. */
9796 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9797 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9798 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9799 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9800 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9802 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9803 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9804 NAME ## _type_node = \
9805 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9806 u ## NAME ## _type_node = \
9807 make_or_reuse_unsigned_ ## KIND ## _type \
9808 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9809 sat_ ## NAME ## _type_node = \
9810 make_or_reuse_sat_signed_ ## KIND ## _type \
9811 (GET_MODE_BITSIZE (MODE ## mode)); \
9812 sat_u ## NAME ## _type_node = \
9813 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9814 (GET_MODE_BITSIZE (U ## MODE ## mode));
9816 /* Fixed-point type and mode nodes. */
9817 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9818 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9819 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9820 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9821 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9822 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9823 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9824 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9825 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9826 MAKE_FIXED_MODE_NODE (accum, da, DA)
9827 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9830 tree t = targetm.build_builtin_va_list ();
9832 /* Many back-ends define record types without setting TYPE_NAME.
9833 If we copied the record type here, we'd keep the original
9834 record type without a name. This breaks name mangling. So,
9835 don't copy record types and let c_common_nodes_and_builtins()
9836 declare the type to be __builtin_va_list. */
9837 if (TREE_CODE (t) != RECORD_TYPE)
9838 t = build_variant_type_copy (t);
9840 va_list_type_node = t;
9844 /* Modify DECL for given flags.
9845 TM_PURE attribute is set only on types, so the function will modify
9846 DECL's type when ECF_TM_PURE is used. */
9848 void
9849 set_call_expr_flags (tree decl, int flags)
9851 if (flags & ECF_NOTHROW)
9852 TREE_NOTHROW (decl) = 1;
9853 if (flags & ECF_CONST)
9854 TREE_READONLY (decl) = 1;
9855 if (flags & ECF_PURE)
9856 DECL_PURE_P (decl) = 1;
9857 if (flags & ECF_LOOPING_CONST_OR_PURE)
9858 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9859 if (flags & ECF_NOVOPS)
9860 DECL_IS_NOVOPS (decl) = 1;
9861 if (flags & ECF_NORETURN)
9862 TREE_THIS_VOLATILE (decl) = 1;
9863 if (flags & ECF_MALLOC)
9864 DECL_IS_MALLOC (decl) = 1;
9865 if (flags & ECF_RETURNS_TWICE)
9866 DECL_IS_RETURNS_TWICE (decl) = 1;
9867 if (flags & ECF_LEAF)
9868 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9869 NULL, DECL_ATTRIBUTES (decl));
9870 if (flags & ECF_COLD)
9871 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
9872 NULL, DECL_ATTRIBUTES (decl));
9873 if (flags & ECF_RET1)
9874 DECL_ATTRIBUTES (decl)
9875 = tree_cons (get_identifier ("fn spec"),
9876 build_tree_list (NULL_TREE, build_string (1, "1")),
9877 DECL_ATTRIBUTES (decl));
9878 if ((flags & ECF_TM_PURE) && flag_tm)
9879 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9880 /* Looping const or pure is implied by noreturn.
9881 There is currently no way to declare looping const or looping pure alone. */
9882 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9883 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9887 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9889 static void
9890 local_define_builtin (const char *name, tree type, enum built_in_function code,
9891 const char *library_name, int ecf_flags)
9893 tree decl;
9895 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9896 library_name, NULL_TREE);
9897 set_call_expr_flags (decl, ecf_flags);
9899 set_builtin_decl (code, decl, true);
9902 /* Call this function after instantiating all builtins that the language
9903 front end cares about. This will build the rest of the builtins
9904 and internal functions that are relied upon by the tree optimizers and
9905 the middle-end. */
9907 void
9908 build_common_builtin_nodes (void)
9910 tree tmp, ftype;
9911 int ecf_flags;
9913 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
9914 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
9916 ftype = build_function_type (void_type_node, void_list_node);
9917 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9918 local_define_builtin ("__builtin_unreachable", ftype,
9919 BUILT_IN_UNREACHABLE,
9920 "__builtin_unreachable",
9921 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9922 | ECF_CONST | ECF_COLD);
9923 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
9924 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
9925 "abort",
9926 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
9929 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9930 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9932 ftype = build_function_type_list (ptr_type_node,
9933 ptr_type_node, const_ptr_type_node,
9934 size_type_node, NULL_TREE);
9936 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9937 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9938 "memcpy", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
9939 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9940 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9941 "memmove", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
9944 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9946 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9947 const_ptr_type_node, size_type_node,
9948 NULL_TREE);
9949 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9950 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9953 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9955 ftype = build_function_type_list (ptr_type_node,
9956 ptr_type_node, integer_type_node,
9957 size_type_node, NULL_TREE);
9958 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9959 "memset", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
9962 /* If we're checking the stack, `alloca' can throw. */
9963 const int alloca_flags
9964 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
9966 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9968 ftype = build_function_type_list (ptr_type_node,
9969 size_type_node, NULL_TREE);
9970 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9971 "alloca", alloca_flags);
9974 ftype = build_function_type_list (ptr_type_node, size_type_node,
9975 size_type_node, NULL_TREE);
9976 local_define_builtin ("__builtin_alloca_with_align", ftype,
9977 BUILT_IN_ALLOCA_WITH_ALIGN,
9978 "__builtin_alloca_with_align",
9979 alloca_flags);
9981 ftype = build_function_type_list (ptr_type_node, size_type_node,
9982 size_type_node, size_type_node, NULL_TREE);
9983 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
9984 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
9985 "__builtin_alloca_with_align_and_max",
9986 alloca_flags);
9988 ftype = build_function_type_list (void_type_node,
9989 ptr_type_node, ptr_type_node,
9990 ptr_type_node, NULL_TREE);
9991 local_define_builtin ("__builtin_init_trampoline", ftype,
9992 BUILT_IN_INIT_TRAMPOLINE,
9993 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9994 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9995 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9996 "__builtin_init_heap_trampoline",
9997 ECF_NOTHROW | ECF_LEAF);
9998 local_define_builtin ("__builtin_init_descriptor", ftype,
9999 BUILT_IN_INIT_DESCRIPTOR,
10000 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
10002 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10003 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10004 BUILT_IN_ADJUST_TRAMPOLINE,
10005 "__builtin_adjust_trampoline",
10006 ECF_CONST | ECF_NOTHROW);
10007 local_define_builtin ("__builtin_adjust_descriptor", ftype,
10008 BUILT_IN_ADJUST_DESCRIPTOR,
10009 "__builtin_adjust_descriptor",
10010 ECF_CONST | ECF_NOTHROW);
10012 ftype = build_function_type_list (void_type_node,
10013 ptr_type_node, ptr_type_node, NULL_TREE);
10014 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10015 BUILT_IN_NONLOCAL_GOTO,
10016 "__builtin_nonlocal_goto",
10017 ECF_NORETURN | ECF_NOTHROW);
10019 ftype = build_function_type_list (void_type_node,
10020 ptr_type_node, ptr_type_node, NULL_TREE);
10021 local_define_builtin ("__builtin_setjmp_setup", ftype,
10022 BUILT_IN_SETJMP_SETUP,
10023 "__builtin_setjmp_setup", ECF_NOTHROW);
10025 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10026 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10027 BUILT_IN_SETJMP_RECEIVER,
10028 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10030 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10031 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10032 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10034 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10035 local_define_builtin ("__builtin_stack_restore", ftype,
10036 BUILT_IN_STACK_RESTORE,
10037 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10039 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10040 const_ptr_type_node, size_type_node,
10041 NULL_TREE);
10042 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
10043 "__builtin_memcmp_eq",
10044 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10046 /* If there's a possibility that we might use the ARM EABI, build the
10047 alternate __cxa_end_cleanup node used to resume from C++. */
10048 if (targetm.arm_eabi_unwinder)
10050 ftype = build_function_type_list (void_type_node, NULL_TREE);
10051 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10052 BUILT_IN_CXA_END_CLEANUP,
10053 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10056 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10057 local_define_builtin ("__builtin_unwind_resume", ftype,
10058 BUILT_IN_UNWIND_RESUME,
10059 ((targetm_common.except_unwind_info (&global_options)
10060 == UI_SJLJ)
10061 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10062 ECF_NORETURN);
10064 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10066 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10067 NULL_TREE);
10068 local_define_builtin ("__builtin_return_address", ftype,
10069 BUILT_IN_RETURN_ADDRESS,
10070 "__builtin_return_address",
10071 ECF_NOTHROW);
10074 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10075 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10077 ftype = build_function_type_list (void_type_node, ptr_type_node,
10078 ptr_type_node, NULL_TREE);
10079 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10080 local_define_builtin ("__cyg_profile_func_enter", ftype,
10081 BUILT_IN_PROFILE_FUNC_ENTER,
10082 "__cyg_profile_func_enter", 0);
10083 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10084 local_define_builtin ("__cyg_profile_func_exit", ftype,
10085 BUILT_IN_PROFILE_FUNC_EXIT,
10086 "__cyg_profile_func_exit", 0);
10089 /* The exception object and filter values from the runtime. The argument
10090 must be zero before exception lowering, i.e. from the front end. After
10091 exception lowering, it will be the region number for the exception
10092 landing pad. These functions are PURE instead of CONST to prevent
10093 them from being hoisted past the exception edge that will initialize
10094 its value in the landing pad. */
10095 ftype = build_function_type_list (ptr_type_node,
10096 integer_type_node, NULL_TREE);
10097 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10098 /* Only use TM_PURE if we have TM language support. */
10099 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10100 ecf_flags |= ECF_TM_PURE;
10101 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10102 "__builtin_eh_pointer", ecf_flags);
10104 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10105 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10106 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10107 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10109 ftype = build_function_type_list (void_type_node,
10110 integer_type_node, integer_type_node,
10111 NULL_TREE);
10112 local_define_builtin ("__builtin_eh_copy_values", ftype,
10113 BUILT_IN_EH_COPY_VALUES,
10114 "__builtin_eh_copy_values", ECF_NOTHROW);
10116 /* Complex multiplication and division. These are handled as builtins
10117 rather than optabs because emit_library_call_value doesn't support
10118 complex. Further, we can do slightly better with folding these
10119 beasties if the real and complex parts of the arguments are separate. */
10121 int mode;
10123 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10125 char mode_name_buf[4], *q;
10126 const char *p;
10127 enum built_in_function mcode, dcode;
10128 tree type, inner_type;
10129 const char *prefix = "__";
10131 if (targetm.libfunc_gnu_prefix)
10132 prefix = "__gnu_";
10134 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10135 if (type == NULL)
10136 continue;
10137 inner_type = TREE_TYPE (type);
10139 ftype = build_function_type_list (type, inner_type, inner_type,
10140 inner_type, inner_type, NULL_TREE);
10142 mcode = ((enum built_in_function)
10143 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10144 dcode = ((enum built_in_function)
10145 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10147 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10148 *q = TOLOWER (*p);
10149 *q = '\0';
10151 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10152 NULL);
10153 local_define_builtin (built_in_names[mcode], ftype, mcode,
10154 built_in_names[mcode],
10155 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10157 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10158 NULL);
10159 local_define_builtin (built_in_names[dcode], ftype, dcode,
10160 built_in_names[dcode],
10161 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10165 init_internal_fns ();
10168 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10169 better way.
10171 If we requested a pointer to a vector, build up the pointers that
10172 we stripped off while looking for the inner type. Similarly for
10173 return values from functions.
10175 The argument TYPE is the top of the chain, and BOTTOM is the
10176 new type which we will point to. */
10178 tree
10179 reconstruct_complex_type (tree type, tree bottom)
10181 tree inner, outer;
10183 if (TREE_CODE (type) == POINTER_TYPE)
10185 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10186 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10187 TYPE_REF_CAN_ALIAS_ALL (type));
10189 else if (TREE_CODE (type) == REFERENCE_TYPE)
10191 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10192 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10193 TYPE_REF_CAN_ALIAS_ALL (type));
10195 else if (TREE_CODE (type) == ARRAY_TYPE)
10197 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10198 outer = build_array_type (inner, TYPE_DOMAIN (type));
10200 else if (TREE_CODE (type) == FUNCTION_TYPE)
10202 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10203 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10205 else if (TREE_CODE (type) == METHOD_TYPE)
10207 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10208 /* The build_method_type_directly() routine prepends 'this' to argument list,
10209 so we must compensate by getting rid of it. */
10210 outer
10211 = build_method_type_directly
10212 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10213 inner,
10214 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10216 else if (TREE_CODE (type) == OFFSET_TYPE)
10218 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10219 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10221 else
10222 return bottom;
10224 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10225 TYPE_QUALS (type));
10228 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10229 the inner type. */
10230 tree
10231 build_vector_type_for_mode (tree innertype, machine_mode mode)
10233 int nunits;
10234 unsigned int bitsize;
10236 switch (GET_MODE_CLASS (mode))
10238 case MODE_VECTOR_INT:
10239 case MODE_VECTOR_FLOAT:
10240 case MODE_VECTOR_FRACT:
10241 case MODE_VECTOR_UFRACT:
10242 case MODE_VECTOR_ACCUM:
10243 case MODE_VECTOR_UACCUM:
10244 nunits = GET_MODE_NUNITS (mode);
10245 break;
10247 case MODE_INT:
10248 /* Check that there are no leftover bits. */
10249 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
10250 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10251 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10252 break;
10254 default:
10255 gcc_unreachable ();
10258 return make_vector_type (innertype, nunits, mode);
10261 /* Similarly, but takes the inner type and number of units, which must be
10262 a power of two. */
10264 tree
10265 build_vector_type (tree innertype, int nunits)
10267 return make_vector_type (innertype, nunits, VOIDmode);
10270 /* Build truth vector with specified length and number of units. */
10272 tree
10273 build_truth_vector_type (unsigned nunits, unsigned vector_size)
10275 machine_mode mask_mode
10276 = targetm.vectorize.get_mask_mode (nunits, vector_size).else_blk ();
10278 unsigned HOST_WIDE_INT vsize;
10279 if (mask_mode == BLKmode)
10280 vsize = vector_size * BITS_PER_UNIT;
10281 else
10282 vsize = GET_MODE_BITSIZE (mask_mode);
10284 unsigned HOST_WIDE_INT esize = vsize / nunits;
10285 gcc_assert (esize * nunits == vsize);
10287 tree bool_type = build_nonstandard_boolean_type (esize);
10289 return make_vector_type (bool_type, nunits, mask_mode);
10292 /* Returns a vector type corresponding to a comparison of VECTYPE. */
10294 tree
10295 build_same_sized_truth_vector_type (tree vectype)
10297 if (VECTOR_BOOLEAN_TYPE_P (vectype))
10298 return vectype;
10300 unsigned HOST_WIDE_INT size = GET_MODE_SIZE (TYPE_MODE (vectype));
10302 if (!size)
10303 size = tree_to_uhwi (TYPE_SIZE_UNIT (vectype));
10305 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (vectype), size);
10308 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10310 tree
10311 build_opaque_vector_type (tree innertype, int nunits)
10313 tree t = make_vector_type (innertype, nunits, VOIDmode);
10314 tree cand;
10315 /* We always build the non-opaque variant before the opaque one,
10316 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10317 cand = TYPE_NEXT_VARIANT (t);
10318 if (cand
10319 && TYPE_VECTOR_OPAQUE (cand)
10320 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10321 return cand;
10322 /* Othewise build a variant type and make sure to queue it after
10323 the non-opaque type. */
10324 cand = build_distinct_type_copy (t);
10325 TYPE_VECTOR_OPAQUE (cand) = true;
10326 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10327 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10328 TYPE_NEXT_VARIANT (t) = cand;
10329 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10330 return cand;
10333 /* Return the value of element I of VECTOR_CST T as a wide_int. */
10335 wide_int
10336 vector_cst_int_elt (const_tree t, unsigned int i)
10338 /* First handle elements that are directly encoded. */
10339 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10340 if (i < encoded_nelts)
10341 return wi::to_wide (VECTOR_CST_ENCODED_ELT (t, i));
10343 /* Identify the pattern that contains element I and work out the index of
10344 the last encoded element for that pattern. */
10345 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10346 unsigned int pattern = i % npatterns;
10347 unsigned int count = i / npatterns;
10348 unsigned int final_i = encoded_nelts - npatterns + pattern;
10350 /* If there are no steps, the final encoded value is the right one. */
10351 if (!VECTOR_CST_STEPPED_P (t))
10352 return wi::to_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
10354 /* Otherwise work out the value from the last two encoded elements. */
10355 tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
10356 tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
10357 wide_int diff = wi::to_wide (v2) - wi::to_wide (v1);
10358 return wi::to_wide (v2) + (count - 2) * diff;
10361 /* Return the value of element I of VECTOR_CST T. */
10363 tree
10364 vector_cst_elt (const_tree t, unsigned int i)
10366 /* First handle elements that are directly encoded. */
10367 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10368 if (i < encoded_nelts)
10369 return VECTOR_CST_ENCODED_ELT (t, i);
10371 /* If there are no steps, the final encoded value is the right one. */
10372 if (!VECTOR_CST_STEPPED_P (t))
10374 /* Identify the pattern that contains element I and work out the index of
10375 the last encoded element for that pattern. */
10376 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10377 unsigned int pattern = i % npatterns;
10378 unsigned int final_i = encoded_nelts - npatterns + pattern;
10379 return VECTOR_CST_ENCODED_ELT (t, final_i);
10382 /* Otherwise work out the value from the last two encoded elements. */
10383 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
10384 vector_cst_int_elt (t, i));
10387 /* Given an initializer INIT, return TRUE if INIT is zero or some
10388 aggregate of zeros. Otherwise return FALSE. */
10389 bool
10390 initializer_zerop (const_tree init)
10392 tree elt;
10394 STRIP_NOPS (init);
10396 switch (TREE_CODE (init))
10398 case INTEGER_CST:
10399 return integer_zerop (init);
10401 case REAL_CST:
10402 /* ??? Note that this is not correct for C4X float formats. There,
10403 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10404 negative exponent. */
10405 return real_zerop (init)
10406 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10408 case FIXED_CST:
10409 return fixed_zerop (init);
10411 case COMPLEX_CST:
10412 return integer_zerop (init)
10413 || (real_zerop (init)
10414 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10415 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10417 case VECTOR_CST:
10418 return (VECTOR_CST_NPATTERNS (init) == 1
10419 && VECTOR_CST_DUPLICATE_P (init)
10420 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)));
10422 case CONSTRUCTOR:
10424 unsigned HOST_WIDE_INT idx;
10426 if (TREE_CLOBBER_P (init))
10427 return false;
10428 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10429 if (!initializer_zerop (elt))
10430 return false;
10431 return true;
10434 case STRING_CST:
10436 int i;
10438 /* We need to loop through all elements to handle cases like
10439 "\0" and "\0foobar". */
10440 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10441 if (TREE_STRING_POINTER (init)[i] != '\0')
10442 return false;
10444 return true;
10447 default:
10448 return false;
10452 /* Check if vector VEC consists of all the equal elements and
10453 that the number of elements corresponds to the type of VEC.
10454 The function returns first element of the vector
10455 or NULL_TREE if the vector is not uniform. */
10456 tree
10457 uniform_vector_p (const_tree vec)
10459 tree first, t;
10460 unsigned i;
10462 if (vec == NULL_TREE)
10463 return NULL_TREE;
10465 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10467 if (TREE_CODE (vec) == VECTOR_CST)
10469 if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
10470 return VECTOR_CST_ENCODED_ELT (vec, 0);
10471 return NULL_TREE;
10474 else if (TREE_CODE (vec) == CONSTRUCTOR)
10476 first = error_mark_node;
10478 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10480 if (i == 0)
10482 first = t;
10483 continue;
10485 if (!operand_equal_p (first, t, 0))
10486 return NULL_TREE;
10488 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10489 return NULL_TREE;
10491 return first;
10494 return NULL_TREE;
10497 /* Build an empty statement at location LOC. */
10499 tree
10500 build_empty_stmt (location_t loc)
10502 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10503 SET_EXPR_LOCATION (t, loc);
10504 return t;
10508 /* Build an OpenMP clause with code CODE. LOC is the location of the
10509 clause. */
10511 tree
10512 build_omp_clause (location_t loc, enum omp_clause_code code)
10514 tree t;
10515 int size, length;
10517 length = omp_clause_num_ops[code];
10518 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10520 record_node_allocation_statistics (OMP_CLAUSE, size);
10522 t = (tree) ggc_internal_alloc (size);
10523 memset (t, 0, size);
10524 TREE_SET_CODE (t, OMP_CLAUSE);
10525 OMP_CLAUSE_SET_CODE (t, code);
10526 OMP_CLAUSE_LOCATION (t) = loc;
10528 return t;
10531 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10532 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10533 Except for the CODE and operand count field, other storage for the
10534 object is initialized to zeros. */
10536 tree
10537 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
10539 tree t;
10540 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10542 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10543 gcc_assert (len >= 1);
10545 record_node_allocation_statistics (code, length);
10547 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10549 TREE_SET_CODE (t, code);
10551 /* Can't use TREE_OPERAND to store the length because if checking is
10552 enabled, it will try to check the length before we store it. :-P */
10553 t->exp.operands[0] = build_int_cst (sizetype, len);
10555 return t;
10558 /* Helper function for build_call_* functions; build a CALL_EXPR with
10559 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10560 the argument slots. */
10562 static tree
10563 build_call_1 (tree return_type, tree fn, int nargs)
10565 tree t;
10567 t = build_vl_exp (CALL_EXPR, nargs + 3);
10568 TREE_TYPE (t) = return_type;
10569 CALL_EXPR_FN (t) = fn;
10570 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10572 return t;
10575 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10576 FN and a null static chain slot. NARGS is the number of call arguments
10577 which are specified as "..." arguments. */
10579 tree
10580 build_call_nary (tree return_type, tree fn, int nargs, ...)
10582 tree ret;
10583 va_list args;
10584 va_start (args, nargs);
10585 ret = build_call_valist (return_type, fn, nargs, args);
10586 va_end (args);
10587 return ret;
10590 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10591 FN and a null static chain slot. NARGS is the number of call arguments
10592 which are specified as a va_list ARGS. */
10594 tree
10595 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10597 tree t;
10598 int i;
10600 t = build_call_1 (return_type, fn, nargs);
10601 for (i = 0; i < nargs; i++)
10602 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10603 process_call_operands (t);
10604 return t;
10607 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10608 FN and a null static chain slot. NARGS is the number of call arguments
10609 which are specified as a tree array ARGS. */
10611 tree
10612 build_call_array_loc (location_t loc, tree return_type, tree fn,
10613 int nargs, const tree *args)
10615 tree t;
10616 int i;
10618 t = build_call_1 (return_type, fn, nargs);
10619 for (i = 0; i < nargs; i++)
10620 CALL_EXPR_ARG (t, i) = args[i];
10621 process_call_operands (t);
10622 SET_EXPR_LOCATION (t, loc);
10623 return t;
10626 /* Like build_call_array, but takes a vec. */
10628 tree
10629 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10631 tree ret, t;
10632 unsigned int ix;
10634 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10635 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10636 CALL_EXPR_ARG (ret, ix) = t;
10637 process_call_operands (ret);
10638 return ret;
10641 /* Conveniently construct a function call expression. FNDECL names the
10642 function to be called and N arguments are passed in the array
10643 ARGARRAY. */
10645 tree
10646 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10648 tree fntype = TREE_TYPE (fndecl);
10649 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10651 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10654 /* Conveniently construct a function call expression. FNDECL names the
10655 function to be called and the arguments are passed in the vector
10656 VEC. */
10658 tree
10659 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10661 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10662 vec_safe_address (vec));
10666 /* Conveniently construct a function call expression. FNDECL names the
10667 function to be called, N is the number of arguments, and the "..."
10668 parameters are the argument expressions. */
10670 tree
10671 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10673 va_list ap;
10674 tree *argarray = XALLOCAVEC (tree, n);
10675 int i;
10677 va_start (ap, n);
10678 for (i = 0; i < n; i++)
10679 argarray[i] = va_arg (ap, tree);
10680 va_end (ap);
10681 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10684 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10685 varargs macros aren't supported by all bootstrap compilers. */
10687 tree
10688 build_call_expr (tree fndecl, int n, ...)
10690 va_list ap;
10691 tree *argarray = XALLOCAVEC (tree, n);
10692 int i;
10694 va_start (ap, n);
10695 for (i = 0; i < n; i++)
10696 argarray[i] = va_arg (ap, tree);
10697 va_end (ap);
10698 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10701 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10702 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10703 It will get gimplified later into an ordinary internal function. */
10705 tree
10706 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
10707 tree type, int n, const tree *args)
10709 tree t = build_call_1 (type, NULL_TREE, n);
10710 for (int i = 0; i < n; ++i)
10711 CALL_EXPR_ARG (t, i) = args[i];
10712 SET_EXPR_LOCATION (t, loc);
10713 CALL_EXPR_IFN (t) = ifn;
10714 return t;
10717 /* Build internal call expression. This is just like CALL_EXPR, except
10718 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10719 internal function. */
10721 tree
10722 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10723 tree type, int n, ...)
10725 va_list ap;
10726 tree *argarray = XALLOCAVEC (tree, n);
10727 int i;
10729 va_start (ap, n);
10730 for (i = 0; i < n; i++)
10731 argarray[i] = va_arg (ap, tree);
10732 va_end (ap);
10733 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10736 /* Return a function call to FN, if the target is guaranteed to support it,
10737 or null otherwise.
10739 N is the number of arguments, passed in the "...", and TYPE is the
10740 type of the return value. */
10742 tree
10743 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
10744 int n, ...)
10746 va_list ap;
10747 tree *argarray = XALLOCAVEC (tree, n);
10748 int i;
10750 va_start (ap, n);
10751 for (i = 0; i < n; i++)
10752 argarray[i] = va_arg (ap, tree);
10753 va_end (ap);
10754 if (internal_fn_p (fn))
10756 internal_fn ifn = as_internal_fn (fn);
10757 if (direct_internal_fn_p (ifn))
10759 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
10760 if (!direct_internal_fn_supported_p (ifn, types,
10761 OPTIMIZE_FOR_BOTH))
10762 return NULL_TREE;
10764 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10766 else
10768 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
10769 if (!fndecl)
10770 return NULL_TREE;
10771 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10775 /* Return a function call to the appropriate builtin alloca variant.
10777 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
10778 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
10779 bound for SIZE in case it is not a fixed value. */
10781 tree
10782 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
10784 if (max_size >= 0)
10786 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
10787 return
10788 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
10790 else if (align > 0)
10792 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10793 return build_call_expr (t, 2, size, size_int (align));
10795 else
10797 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
10798 return build_call_expr (t, 1, size);
10802 /* Create a new constant string literal and return a char* pointer to it.
10803 The STRING_CST value is the LEN characters at STR. */
10804 tree
10805 build_string_literal (int len, const char *str)
10807 tree t, elem, index, type;
10809 t = build_string (len, str);
10810 elem = build_type_variant (char_type_node, 1, 0);
10811 index = build_index_type (size_int (len - 1));
10812 type = build_array_type (elem, index);
10813 TREE_TYPE (t) = type;
10814 TREE_CONSTANT (t) = 1;
10815 TREE_READONLY (t) = 1;
10816 TREE_STATIC (t) = 1;
10818 type = build_pointer_type (elem);
10819 t = build1 (ADDR_EXPR, type,
10820 build4 (ARRAY_REF, elem,
10821 t, integer_zero_node, NULL_TREE, NULL_TREE));
10822 return t;
10827 /* Return true if T (assumed to be a DECL) must be assigned a memory
10828 location. */
10830 bool
10831 needs_to_live_in_memory (const_tree t)
10833 return (TREE_ADDRESSABLE (t)
10834 || is_global_var (t)
10835 || (TREE_CODE (t) == RESULT_DECL
10836 && !DECL_BY_REFERENCE (t)
10837 && aggregate_value_p (t, current_function_decl)));
10840 /* Return value of a constant X and sign-extend it. */
10842 HOST_WIDE_INT
10843 int_cst_value (const_tree x)
10845 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10846 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10848 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10849 gcc_assert (cst_and_fits_in_hwi (x));
10851 if (bits < HOST_BITS_PER_WIDE_INT)
10853 bool negative = ((val >> (bits - 1)) & 1) != 0;
10854 if (negative)
10855 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
10856 else
10857 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
10860 return val;
10863 /* If TYPE is an integral or pointer type, return an integer type with
10864 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10865 if TYPE is already an integer type of signedness UNSIGNEDP. */
10867 tree
10868 signed_or_unsigned_type_for (int unsignedp, tree type)
10870 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10871 return type;
10873 if (TREE_CODE (type) == VECTOR_TYPE)
10875 tree inner = TREE_TYPE (type);
10876 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10877 if (!inner2)
10878 return NULL_TREE;
10879 if (inner == inner2)
10880 return type;
10881 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10884 if (!INTEGRAL_TYPE_P (type)
10885 && !POINTER_TYPE_P (type)
10886 && TREE_CODE (type) != OFFSET_TYPE)
10887 return NULL_TREE;
10889 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10892 /* If TYPE is an integral or pointer type, return an integer type with
10893 the same precision which is unsigned, or itself if TYPE is already an
10894 unsigned integer type. */
10896 tree
10897 unsigned_type_for (tree type)
10899 return signed_or_unsigned_type_for (1, type);
10902 /* If TYPE is an integral or pointer type, return an integer type with
10903 the same precision which is signed, or itself if TYPE is already a
10904 signed integer type. */
10906 tree
10907 signed_type_for (tree type)
10909 return signed_or_unsigned_type_for (0, type);
10912 /* If TYPE is a vector type, return a signed integer vector type with the
10913 same width and number of subparts. Otherwise return boolean_type_node. */
10915 tree
10916 truth_type_for (tree type)
10918 if (TREE_CODE (type) == VECTOR_TYPE)
10920 if (VECTOR_BOOLEAN_TYPE_P (type))
10921 return type;
10922 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (type),
10923 GET_MODE_SIZE (TYPE_MODE (type)));
10925 else
10926 return boolean_type_node;
10929 /* Returns the largest value obtainable by casting something in INNER type to
10930 OUTER type. */
10932 tree
10933 upper_bound_in_type (tree outer, tree inner)
10935 unsigned int det = 0;
10936 unsigned oprec = TYPE_PRECISION (outer);
10937 unsigned iprec = TYPE_PRECISION (inner);
10938 unsigned prec;
10940 /* Compute a unique number for every combination. */
10941 det |= (oprec > iprec) ? 4 : 0;
10942 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10943 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10945 /* Determine the exponent to use. */
10946 switch (det)
10948 case 0:
10949 case 1:
10950 /* oprec <= iprec, outer: signed, inner: don't care. */
10951 prec = oprec - 1;
10952 break;
10953 case 2:
10954 case 3:
10955 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10956 prec = oprec;
10957 break;
10958 case 4:
10959 /* oprec > iprec, outer: signed, inner: signed. */
10960 prec = iprec - 1;
10961 break;
10962 case 5:
10963 /* oprec > iprec, outer: signed, inner: unsigned. */
10964 prec = iprec;
10965 break;
10966 case 6:
10967 /* oprec > iprec, outer: unsigned, inner: signed. */
10968 prec = oprec;
10969 break;
10970 case 7:
10971 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10972 prec = iprec;
10973 break;
10974 default:
10975 gcc_unreachable ();
10978 return wide_int_to_tree (outer,
10979 wi::mask (prec, false, TYPE_PRECISION (outer)));
10982 /* Returns the smallest value obtainable by casting something in INNER type to
10983 OUTER type. */
10985 tree
10986 lower_bound_in_type (tree outer, tree inner)
10988 unsigned oprec = TYPE_PRECISION (outer);
10989 unsigned iprec = TYPE_PRECISION (inner);
10991 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10992 and obtain 0. */
10993 if (TYPE_UNSIGNED (outer)
10994 /* If we are widening something of an unsigned type, OUTER type
10995 contains all values of INNER type. In particular, both INNER
10996 and OUTER types have zero in common. */
10997 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10998 return build_int_cst (outer, 0);
10999 else
11001 /* If we are widening a signed type to another signed type, we
11002 want to obtain -2^^(iprec-1). If we are keeping the
11003 precision or narrowing to a signed type, we want to obtain
11004 -2^(oprec-1). */
11005 unsigned prec = oprec > iprec ? iprec : oprec;
11006 return wide_int_to_tree (outer,
11007 wi::mask (prec - 1, true,
11008 TYPE_PRECISION (outer)));
11012 /* Return nonzero if two operands that are suitable for PHI nodes are
11013 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11014 SSA_NAME or invariant. Note that this is strictly an optimization.
11015 That is, callers of this function can directly call operand_equal_p
11016 and get the same result, only slower. */
11019 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11021 if (arg0 == arg1)
11022 return 1;
11023 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11024 return 0;
11025 return operand_equal_p (arg0, arg1, 0);
11028 /* Returns number of zeros at the end of binary representation of X. */
11030 tree
11031 num_ending_zeros (const_tree x)
11033 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
11037 #define WALK_SUBTREE(NODE) \
11038 do \
11040 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11041 if (result) \
11042 return result; \
11044 while (0)
11046 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11047 be walked whenever a type is seen in the tree. Rest of operands and return
11048 value are as for walk_tree. */
11050 static tree
11051 walk_type_fields (tree type, walk_tree_fn func, void *data,
11052 hash_set<tree> *pset, walk_tree_lh lh)
11054 tree result = NULL_TREE;
11056 switch (TREE_CODE (type))
11058 case POINTER_TYPE:
11059 case REFERENCE_TYPE:
11060 case VECTOR_TYPE:
11061 /* We have to worry about mutually recursive pointers. These can't
11062 be written in C. They can in Ada. It's pathological, but
11063 there's an ACATS test (c38102a) that checks it. Deal with this
11064 by checking if we're pointing to another pointer, that one
11065 points to another pointer, that one does too, and we have no htab.
11066 If so, get a hash table. We check three levels deep to avoid
11067 the cost of the hash table if we don't need one. */
11068 if (POINTER_TYPE_P (TREE_TYPE (type))
11069 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11070 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11071 && !pset)
11073 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11074 func, data);
11075 if (result)
11076 return result;
11078 break;
11081 /* fall through */
11083 case COMPLEX_TYPE:
11084 WALK_SUBTREE (TREE_TYPE (type));
11085 break;
11087 case METHOD_TYPE:
11088 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11090 /* Fall through. */
11092 case FUNCTION_TYPE:
11093 WALK_SUBTREE (TREE_TYPE (type));
11095 tree arg;
11097 /* We never want to walk into default arguments. */
11098 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11099 WALK_SUBTREE (TREE_VALUE (arg));
11101 break;
11103 case ARRAY_TYPE:
11104 /* Don't follow this nodes's type if a pointer for fear that
11105 we'll have infinite recursion. If we have a PSET, then we
11106 need not fear. */
11107 if (pset
11108 || (!POINTER_TYPE_P (TREE_TYPE (type))
11109 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11110 WALK_SUBTREE (TREE_TYPE (type));
11111 WALK_SUBTREE (TYPE_DOMAIN (type));
11112 break;
11114 case OFFSET_TYPE:
11115 WALK_SUBTREE (TREE_TYPE (type));
11116 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11117 break;
11119 default:
11120 break;
11123 return NULL_TREE;
11126 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11127 called with the DATA and the address of each sub-tree. If FUNC returns a
11128 non-NULL value, the traversal is stopped, and the value returned by FUNC
11129 is returned. If PSET is non-NULL it is used to record the nodes visited,
11130 and to avoid visiting a node more than once. */
11132 tree
11133 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11134 hash_set<tree> *pset, walk_tree_lh lh)
11136 enum tree_code code;
11137 int walk_subtrees;
11138 tree result;
11140 #define WALK_SUBTREE_TAIL(NODE) \
11141 do \
11143 tp = & (NODE); \
11144 goto tail_recurse; \
11146 while (0)
11148 tail_recurse:
11149 /* Skip empty subtrees. */
11150 if (!*tp)
11151 return NULL_TREE;
11153 /* Don't walk the same tree twice, if the user has requested
11154 that we avoid doing so. */
11155 if (pset && pset->add (*tp))
11156 return NULL_TREE;
11158 /* Call the function. */
11159 walk_subtrees = 1;
11160 result = (*func) (tp, &walk_subtrees, data);
11162 /* If we found something, return it. */
11163 if (result)
11164 return result;
11166 code = TREE_CODE (*tp);
11168 /* Even if we didn't, FUNC may have decided that there was nothing
11169 interesting below this point in the tree. */
11170 if (!walk_subtrees)
11172 /* But we still need to check our siblings. */
11173 if (code == TREE_LIST)
11174 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11175 else if (code == OMP_CLAUSE)
11176 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11177 else
11178 return NULL_TREE;
11181 if (lh)
11183 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11184 if (result || !walk_subtrees)
11185 return result;
11188 switch (code)
11190 case ERROR_MARK:
11191 case IDENTIFIER_NODE:
11192 case INTEGER_CST:
11193 case REAL_CST:
11194 case FIXED_CST:
11195 case VECTOR_CST:
11196 case STRING_CST:
11197 case BLOCK:
11198 case PLACEHOLDER_EXPR:
11199 case SSA_NAME:
11200 case FIELD_DECL:
11201 case RESULT_DECL:
11202 /* None of these have subtrees other than those already walked
11203 above. */
11204 break;
11206 case TREE_LIST:
11207 WALK_SUBTREE (TREE_VALUE (*tp));
11208 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11209 break;
11211 case TREE_VEC:
11213 int len = TREE_VEC_LENGTH (*tp);
11215 if (len == 0)
11216 break;
11218 /* Walk all elements but the first. */
11219 while (--len)
11220 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11222 /* Now walk the first one as a tail call. */
11223 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11226 case COMPLEX_CST:
11227 WALK_SUBTREE (TREE_REALPART (*tp));
11228 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11230 case CONSTRUCTOR:
11232 unsigned HOST_WIDE_INT idx;
11233 constructor_elt *ce;
11235 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11236 idx++)
11237 WALK_SUBTREE (ce->value);
11239 break;
11241 case SAVE_EXPR:
11242 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11244 case BIND_EXPR:
11246 tree decl;
11247 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11249 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11250 into declarations that are just mentioned, rather than
11251 declared; they don't really belong to this part of the tree.
11252 And, we can see cycles: the initializer for a declaration
11253 can refer to the declaration itself. */
11254 WALK_SUBTREE (DECL_INITIAL (decl));
11255 WALK_SUBTREE (DECL_SIZE (decl));
11256 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11258 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11261 case STATEMENT_LIST:
11263 tree_stmt_iterator i;
11264 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11265 WALK_SUBTREE (*tsi_stmt_ptr (i));
11267 break;
11269 case OMP_CLAUSE:
11270 switch (OMP_CLAUSE_CODE (*tp))
11272 case OMP_CLAUSE_GANG:
11273 case OMP_CLAUSE__GRIDDIM_:
11274 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11275 /* FALLTHRU */
11277 case OMP_CLAUSE_ASYNC:
11278 case OMP_CLAUSE_WAIT:
11279 case OMP_CLAUSE_WORKER:
11280 case OMP_CLAUSE_VECTOR:
11281 case OMP_CLAUSE_NUM_GANGS:
11282 case OMP_CLAUSE_NUM_WORKERS:
11283 case OMP_CLAUSE_VECTOR_LENGTH:
11284 case OMP_CLAUSE_PRIVATE:
11285 case OMP_CLAUSE_SHARED:
11286 case OMP_CLAUSE_FIRSTPRIVATE:
11287 case OMP_CLAUSE_COPYIN:
11288 case OMP_CLAUSE_COPYPRIVATE:
11289 case OMP_CLAUSE_FINAL:
11290 case OMP_CLAUSE_IF:
11291 case OMP_CLAUSE_NUM_THREADS:
11292 case OMP_CLAUSE_SCHEDULE:
11293 case OMP_CLAUSE_UNIFORM:
11294 case OMP_CLAUSE_DEPEND:
11295 case OMP_CLAUSE_NUM_TEAMS:
11296 case OMP_CLAUSE_THREAD_LIMIT:
11297 case OMP_CLAUSE_DEVICE:
11298 case OMP_CLAUSE_DIST_SCHEDULE:
11299 case OMP_CLAUSE_SAFELEN:
11300 case OMP_CLAUSE_SIMDLEN:
11301 case OMP_CLAUSE_ORDERED:
11302 case OMP_CLAUSE_PRIORITY:
11303 case OMP_CLAUSE_GRAINSIZE:
11304 case OMP_CLAUSE_NUM_TASKS:
11305 case OMP_CLAUSE_HINT:
11306 case OMP_CLAUSE_TO_DECLARE:
11307 case OMP_CLAUSE_LINK:
11308 case OMP_CLAUSE_USE_DEVICE_PTR:
11309 case OMP_CLAUSE_IS_DEVICE_PTR:
11310 case OMP_CLAUSE__LOOPTEMP_:
11311 case OMP_CLAUSE__SIMDUID_:
11312 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11313 /* FALLTHRU */
11315 case OMP_CLAUSE_INDEPENDENT:
11316 case OMP_CLAUSE_NOWAIT:
11317 case OMP_CLAUSE_DEFAULT:
11318 case OMP_CLAUSE_UNTIED:
11319 case OMP_CLAUSE_MERGEABLE:
11320 case OMP_CLAUSE_PROC_BIND:
11321 case OMP_CLAUSE_INBRANCH:
11322 case OMP_CLAUSE_NOTINBRANCH:
11323 case OMP_CLAUSE_FOR:
11324 case OMP_CLAUSE_PARALLEL:
11325 case OMP_CLAUSE_SECTIONS:
11326 case OMP_CLAUSE_TASKGROUP:
11327 case OMP_CLAUSE_NOGROUP:
11328 case OMP_CLAUSE_THREADS:
11329 case OMP_CLAUSE_SIMD:
11330 case OMP_CLAUSE_DEFAULTMAP:
11331 case OMP_CLAUSE_AUTO:
11332 case OMP_CLAUSE_SEQ:
11333 case OMP_CLAUSE_TILE:
11334 case OMP_CLAUSE__SIMT_:
11335 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11337 case OMP_CLAUSE_LASTPRIVATE:
11338 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11339 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11340 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11342 case OMP_CLAUSE_COLLAPSE:
11344 int i;
11345 for (i = 0; i < 3; i++)
11346 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11347 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11350 case OMP_CLAUSE_LINEAR:
11351 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11352 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11353 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11354 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11356 case OMP_CLAUSE_ALIGNED:
11357 case OMP_CLAUSE_FROM:
11358 case OMP_CLAUSE_TO:
11359 case OMP_CLAUSE_MAP:
11360 case OMP_CLAUSE__CACHE_:
11361 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11362 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11363 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11365 case OMP_CLAUSE_REDUCTION:
11367 int i;
11368 for (i = 0; i < 5; i++)
11369 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11370 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11373 default:
11374 gcc_unreachable ();
11376 break;
11378 case TARGET_EXPR:
11380 int i, len;
11382 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11383 But, we only want to walk once. */
11384 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11385 for (i = 0; i < len; ++i)
11386 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11387 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11390 case DECL_EXPR:
11391 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11392 defining. We only want to walk into these fields of a type in this
11393 case and not in the general case of a mere reference to the type.
11395 The criterion is as follows: if the field can be an expression, it
11396 must be walked only here. This should be in keeping with the fields
11397 that are directly gimplified in gimplify_type_sizes in order for the
11398 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11399 variable-sized types.
11401 Note that DECLs get walked as part of processing the BIND_EXPR. */
11402 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11404 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11405 if (TREE_CODE (*type_p) == ERROR_MARK)
11406 return NULL_TREE;
11408 /* Call the function for the type. See if it returns anything or
11409 doesn't want us to continue. If we are to continue, walk both
11410 the normal fields and those for the declaration case. */
11411 result = (*func) (type_p, &walk_subtrees, data);
11412 if (result || !walk_subtrees)
11413 return result;
11415 /* But do not walk a pointed-to type since it may itself need to
11416 be walked in the declaration case if it isn't anonymous. */
11417 if (!POINTER_TYPE_P (*type_p))
11419 result = walk_type_fields (*type_p, func, data, pset, lh);
11420 if (result)
11421 return result;
11424 /* If this is a record type, also walk the fields. */
11425 if (RECORD_OR_UNION_TYPE_P (*type_p))
11427 tree field;
11429 for (field = TYPE_FIELDS (*type_p); field;
11430 field = DECL_CHAIN (field))
11432 /* We'd like to look at the type of the field, but we can
11433 easily get infinite recursion. So assume it's pointed
11434 to elsewhere in the tree. Also, ignore things that
11435 aren't fields. */
11436 if (TREE_CODE (field) != FIELD_DECL)
11437 continue;
11439 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11440 WALK_SUBTREE (DECL_SIZE (field));
11441 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11442 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11443 WALK_SUBTREE (DECL_QUALIFIER (field));
11447 /* Same for scalar types. */
11448 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11449 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11450 || TREE_CODE (*type_p) == INTEGER_TYPE
11451 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11452 || TREE_CODE (*type_p) == REAL_TYPE)
11454 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11455 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11458 WALK_SUBTREE (TYPE_SIZE (*type_p));
11459 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11461 /* FALLTHRU */
11463 default:
11464 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11466 int i, len;
11468 /* Walk over all the sub-trees of this operand. */
11469 len = TREE_OPERAND_LENGTH (*tp);
11471 /* Go through the subtrees. We need to do this in forward order so
11472 that the scope of a FOR_EXPR is handled properly. */
11473 if (len)
11475 for (i = 0; i < len - 1; ++i)
11476 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11477 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11480 /* If this is a type, walk the needed fields in the type. */
11481 else if (TYPE_P (*tp))
11482 return walk_type_fields (*tp, func, data, pset, lh);
11483 break;
11486 /* We didn't find what we were looking for. */
11487 return NULL_TREE;
11489 #undef WALK_SUBTREE_TAIL
11491 #undef WALK_SUBTREE
11493 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11495 tree
11496 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11497 walk_tree_lh lh)
11499 tree result;
11501 hash_set<tree> pset;
11502 result = walk_tree_1 (tp, func, data, &pset, lh);
11503 return result;
11507 tree
11508 tree_block (tree t)
11510 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11512 if (IS_EXPR_CODE_CLASS (c))
11513 return LOCATION_BLOCK (t->exp.locus);
11514 gcc_unreachable ();
11515 return NULL;
11518 void
11519 tree_set_block (tree t, tree b)
11521 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11523 if (IS_EXPR_CODE_CLASS (c))
11525 t->exp.locus = set_block (t->exp.locus, b);
11527 else
11528 gcc_unreachable ();
11531 /* Create a nameless artificial label and put it in the current
11532 function context. The label has a location of LOC. Returns the
11533 newly created label. */
11535 tree
11536 create_artificial_label (location_t loc)
11538 tree lab = build_decl (loc,
11539 LABEL_DECL, NULL_TREE, void_type_node);
11541 DECL_ARTIFICIAL (lab) = 1;
11542 DECL_IGNORED_P (lab) = 1;
11543 DECL_CONTEXT (lab) = current_function_decl;
11544 return lab;
11547 /* Given a tree, try to return a useful variable name that we can use
11548 to prefix a temporary that is being assigned the value of the tree.
11549 I.E. given <temp> = &A, return A. */
11551 const char *
11552 get_name (tree t)
11554 tree stripped_decl;
11556 stripped_decl = t;
11557 STRIP_NOPS (stripped_decl);
11558 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11559 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11560 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11562 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11563 if (!name)
11564 return NULL;
11565 return IDENTIFIER_POINTER (name);
11567 else
11569 switch (TREE_CODE (stripped_decl))
11571 case ADDR_EXPR:
11572 return get_name (TREE_OPERAND (stripped_decl, 0));
11573 default:
11574 return NULL;
11579 /* Return true if TYPE has a variable argument list. */
11581 bool
11582 stdarg_p (const_tree fntype)
11584 function_args_iterator args_iter;
11585 tree n = NULL_TREE, t;
11587 if (!fntype)
11588 return false;
11590 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11592 n = t;
11595 return n != NULL_TREE && n != void_type_node;
11598 /* Return true if TYPE has a prototype. */
11600 bool
11601 prototype_p (const_tree fntype)
11603 tree t;
11605 gcc_assert (fntype != NULL_TREE);
11607 t = TYPE_ARG_TYPES (fntype);
11608 return (t != NULL_TREE);
11611 /* If BLOCK is inlined from an __attribute__((__artificial__))
11612 routine, return pointer to location from where it has been
11613 called. */
11614 location_t *
11615 block_nonartificial_location (tree block)
11617 location_t *ret = NULL;
11619 while (block && TREE_CODE (block) == BLOCK
11620 && BLOCK_ABSTRACT_ORIGIN (block))
11622 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11624 while (TREE_CODE (ao) == BLOCK
11625 && BLOCK_ABSTRACT_ORIGIN (ao)
11626 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11627 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11629 if (TREE_CODE (ao) == FUNCTION_DECL)
11631 /* If AO is an artificial inline, point RET to the
11632 call site locus at which it has been inlined and continue
11633 the loop, in case AO's caller is also an artificial
11634 inline. */
11635 if (DECL_DECLARED_INLINE_P (ao)
11636 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11637 ret = &BLOCK_SOURCE_LOCATION (block);
11638 else
11639 break;
11641 else if (TREE_CODE (ao) != BLOCK)
11642 break;
11644 block = BLOCK_SUPERCONTEXT (block);
11646 return ret;
11650 /* If EXP is inlined from an __attribute__((__artificial__))
11651 function, return the location of the original call expression. */
11653 location_t
11654 tree_nonartificial_location (tree exp)
11656 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11658 if (loc)
11659 return *loc;
11660 else
11661 return EXPR_LOCATION (exp);
11665 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11666 nodes. */
11668 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11670 hashval_t
11671 cl_option_hasher::hash (tree x)
11673 const_tree const t = x;
11674 const char *p;
11675 size_t i;
11676 size_t len = 0;
11677 hashval_t hash = 0;
11679 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11681 p = (const char *)TREE_OPTIMIZATION (t);
11682 len = sizeof (struct cl_optimization);
11685 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11686 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11688 else
11689 gcc_unreachable ();
11691 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11692 something else. */
11693 for (i = 0; i < len; i++)
11694 if (p[i])
11695 hash = (hash << 4) ^ ((i << 2) | p[i]);
11697 return hash;
11700 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11701 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11702 same. */
11704 bool
11705 cl_option_hasher::equal (tree x, tree y)
11707 const_tree const xt = x;
11708 const_tree const yt = y;
11709 const char *xp;
11710 const char *yp;
11711 size_t len;
11713 if (TREE_CODE (xt) != TREE_CODE (yt))
11714 return 0;
11716 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11718 xp = (const char *)TREE_OPTIMIZATION (xt);
11719 yp = (const char *)TREE_OPTIMIZATION (yt);
11720 len = sizeof (struct cl_optimization);
11723 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11725 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11726 TREE_TARGET_OPTION (yt));
11729 else
11730 gcc_unreachable ();
11732 return (memcmp (xp, yp, len) == 0);
11735 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11737 tree
11738 build_optimization_node (struct gcc_options *opts)
11740 tree t;
11742 /* Use the cache of optimization nodes. */
11744 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11745 opts);
11747 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11748 t = *slot;
11749 if (!t)
11751 /* Insert this one into the hash table. */
11752 t = cl_optimization_node;
11753 *slot = t;
11755 /* Make a new node for next time round. */
11756 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11759 return t;
11762 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11764 tree
11765 build_target_option_node (struct gcc_options *opts)
11767 tree t;
11769 /* Use the cache of optimization nodes. */
11771 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11772 opts);
11774 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11775 t = *slot;
11776 if (!t)
11778 /* Insert this one into the hash table. */
11779 t = cl_target_option_node;
11780 *slot = t;
11782 /* Make a new node for next time round. */
11783 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11786 return t;
11789 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11790 so that they aren't saved during PCH writing. */
11792 void
11793 prepare_target_option_nodes_for_pch (void)
11795 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11796 for (; iter != cl_option_hash_table->end (); ++iter)
11797 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11798 TREE_TARGET_GLOBALS (*iter) = NULL;
11801 /* Determine the "ultimate origin" of a block. The block may be an inlined
11802 instance of an inlined instance of a block which is local to an inline
11803 function, so we have to trace all of the way back through the origin chain
11804 to find out what sort of node actually served as the original seed for the
11805 given block. */
11807 tree
11808 block_ultimate_origin (const_tree block)
11810 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11812 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
11813 we're trying to output the abstract instance of this function. */
11814 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11815 return NULL_TREE;
11817 if (immediate_origin == NULL_TREE)
11818 return NULL_TREE;
11819 else
11821 tree ret_val;
11822 tree lookahead = immediate_origin;
11826 ret_val = lookahead;
11827 lookahead = (TREE_CODE (ret_val) == BLOCK
11828 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11830 while (lookahead != NULL && lookahead != ret_val);
11832 /* The block's abstract origin chain may not be the *ultimate* origin of
11833 the block. It could lead to a DECL that has an abstract origin set.
11834 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11835 will give us if it has one). Note that DECL's abstract origins are
11836 supposed to be the most distant ancestor (or so decl_ultimate_origin
11837 claims), so we don't need to loop following the DECL origins. */
11838 if (DECL_P (ret_val))
11839 return DECL_ORIGIN (ret_val);
11841 return ret_val;
11845 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11846 no instruction. */
11848 bool
11849 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11851 /* Do not strip casts into or out of differing address spaces. */
11852 if (POINTER_TYPE_P (outer_type)
11853 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
11855 if (!POINTER_TYPE_P (inner_type)
11856 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
11857 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
11858 return false;
11860 else if (POINTER_TYPE_P (inner_type)
11861 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
11863 /* We already know that outer_type is not a pointer with
11864 a non-generic address space. */
11865 return false;
11868 /* Use precision rather then machine mode when we can, which gives
11869 the correct answer even for submode (bit-field) types. */
11870 if ((INTEGRAL_TYPE_P (outer_type)
11871 || POINTER_TYPE_P (outer_type)
11872 || TREE_CODE (outer_type) == OFFSET_TYPE)
11873 && (INTEGRAL_TYPE_P (inner_type)
11874 || POINTER_TYPE_P (inner_type)
11875 || TREE_CODE (inner_type) == OFFSET_TYPE))
11876 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11878 /* Otherwise fall back on comparing machine modes (e.g. for
11879 aggregate types, floats). */
11880 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11883 /* Return true iff conversion in EXP generates no instruction. Mark
11884 it inline so that we fully inline into the stripping functions even
11885 though we have two uses of this function. */
11887 static inline bool
11888 tree_nop_conversion (const_tree exp)
11890 tree outer_type, inner_type;
11892 if (!CONVERT_EXPR_P (exp)
11893 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11894 return false;
11895 if (TREE_OPERAND (exp, 0) == error_mark_node)
11896 return false;
11898 outer_type = TREE_TYPE (exp);
11899 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11901 if (!inner_type)
11902 return false;
11904 return tree_nop_conversion_p (outer_type, inner_type);
11907 /* Return true iff conversion in EXP generates no instruction. Don't
11908 consider conversions changing the signedness. */
11910 static bool
11911 tree_sign_nop_conversion (const_tree exp)
11913 tree outer_type, inner_type;
11915 if (!tree_nop_conversion (exp))
11916 return false;
11918 outer_type = TREE_TYPE (exp);
11919 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11921 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11922 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11925 /* Strip conversions from EXP according to tree_nop_conversion and
11926 return the resulting expression. */
11928 tree
11929 tree_strip_nop_conversions (tree exp)
11931 while (tree_nop_conversion (exp))
11932 exp = TREE_OPERAND (exp, 0);
11933 return exp;
11936 /* Strip conversions from EXP according to tree_sign_nop_conversion
11937 and return the resulting expression. */
11939 tree
11940 tree_strip_sign_nop_conversions (tree exp)
11942 while (tree_sign_nop_conversion (exp))
11943 exp = TREE_OPERAND (exp, 0);
11944 return exp;
11947 /* Avoid any floating point extensions from EXP. */
11948 tree
11949 strip_float_extensions (tree exp)
11951 tree sub, expt, subt;
11953 /* For floating point constant look up the narrowest type that can hold
11954 it properly and handle it like (type)(narrowest_type)constant.
11955 This way we can optimize for instance a=a*2.0 where "a" is float
11956 but 2.0 is double constant. */
11957 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11959 REAL_VALUE_TYPE orig;
11960 tree type = NULL;
11962 orig = TREE_REAL_CST (exp);
11963 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11964 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11965 type = float_type_node;
11966 else if (TYPE_PRECISION (TREE_TYPE (exp))
11967 > TYPE_PRECISION (double_type_node)
11968 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11969 type = double_type_node;
11970 if (type)
11971 return build_real_truncate (type, orig);
11974 if (!CONVERT_EXPR_P (exp))
11975 return exp;
11977 sub = TREE_OPERAND (exp, 0);
11978 subt = TREE_TYPE (sub);
11979 expt = TREE_TYPE (exp);
11981 if (!FLOAT_TYPE_P (subt))
11982 return exp;
11984 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11985 return exp;
11987 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11988 return exp;
11990 return strip_float_extensions (sub);
11993 /* Strip out all handled components that produce invariant
11994 offsets. */
11996 const_tree
11997 strip_invariant_refs (const_tree op)
11999 while (handled_component_p (op))
12001 switch (TREE_CODE (op))
12003 case ARRAY_REF:
12004 case ARRAY_RANGE_REF:
12005 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12006 || TREE_OPERAND (op, 2) != NULL_TREE
12007 || TREE_OPERAND (op, 3) != NULL_TREE)
12008 return NULL;
12009 break;
12011 case COMPONENT_REF:
12012 if (TREE_OPERAND (op, 2) != NULL_TREE)
12013 return NULL;
12014 break;
12016 default:;
12018 op = TREE_OPERAND (op, 0);
12021 return op;
12024 static GTY(()) tree gcc_eh_personality_decl;
12026 /* Return the GCC personality function decl. */
12028 tree
12029 lhd_gcc_personality (void)
12031 if (!gcc_eh_personality_decl)
12032 gcc_eh_personality_decl = build_personality_function ("gcc");
12033 return gcc_eh_personality_decl;
12036 /* TARGET is a call target of GIMPLE call statement
12037 (obtained by gimple_call_fn). Return true if it is
12038 OBJ_TYPE_REF representing an virtual call of C++ method.
12039 (As opposed to OBJ_TYPE_REF representing objc calls
12040 through a cast where middle-end devirtualization machinery
12041 can't apply.) */
12043 bool
12044 virtual_method_call_p (const_tree target)
12046 if (TREE_CODE (target) != OBJ_TYPE_REF)
12047 return false;
12048 tree t = TREE_TYPE (target);
12049 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12050 t = TREE_TYPE (t);
12051 if (TREE_CODE (t) == FUNCTION_TYPE)
12052 return false;
12053 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12054 /* If we do not have BINFO associated, it means that type was built
12055 without devirtualization enabled. Do not consider this a virtual
12056 call. */
12057 if (!TYPE_BINFO (obj_type_ref_class (target)))
12058 return false;
12059 return true;
12062 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
12064 tree
12065 obj_type_ref_class (const_tree ref)
12067 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
12068 ref = TREE_TYPE (ref);
12069 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12070 ref = TREE_TYPE (ref);
12071 /* We look for type THIS points to. ObjC also builds
12072 OBJ_TYPE_REF with non-method calls, Their first parameter
12073 ID however also corresponds to class type. */
12074 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12075 || TREE_CODE (ref) == FUNCTION_TYPE);
12076 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12077 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12078 return TREE_TYPE (ref);
12081 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12083 static tree
12084 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12086 unsigned int i;
12087 tree base_binfo, b;
12089 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12090 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12091 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12092 return base_binfo;
12093 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12094 return b;
12095 return NULL;
12098 /* Try to find a base info of BINFO that would have its field decl at offset
12099 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12100 found, return, otherwise return NULL_TREE. */
12102 tree
12103 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
12105 tree type = BINFO_TYPE (binfo);
12107 while (true)
12109 HOST_WIDE_INT pos, size;
12110 tree fld;
12111 int i;
12113 if (types_same_for_odr (type, expected_type))
12114 return binfo;
12115 if (offset < 0)
12116 return NULL_TREE;
12118 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12120 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12121 continue;
12123 pos = int_bit_position (fld);
12124 size = tree_to_uhwi (DECL_SIZE (fld));
12125 if (pos <= offset && (pos + size) > offset)
12126 break;
12128 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12129 return NULL_TREE;
12131 /* Offset 0 indicates the primary base, whose vtable contents are
12132 represented in the binfo for the derived class. */
12133 else if (offset != 0)
12135 tree found_binfo = NULL, base_binfo;
12136 /* Offsets in BINFO are in bytes relative to the whole structure
12137 while POS is in bits relative to the containing field. */
12138 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12139 / BITS_PER_UNIT);
12141 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12142 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12143 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12145 found_binfo = base_binfo;
12146 break;
12148 if (found_binfo)
12149 binfo = found_binfo;
12150 else
12151 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12152 binfo_offset);
12155 type = TREE_TYPE (fld);
12156 offset -= pos;
12160 /* Returns true if X is a typedef decl. */
12162 bool
12163 is_typedef_decl (const_tree x)
12165 return (x && TREE_CODE (x) == TYPE_DECL
12166 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12169 /* Returns true iff TYPE is a type variant created for a typedef. */
12171 bool
12172 typedef_variant_p (const_tree type)
12174 return is_typedef_decl (TYPE_NAME (type));
12177 /* Warn about a use of an identifier which was marked deprecated. */
12178 void
12179 warn_deprecated_use (tree node, tree attr)
12181 const char *msg;
12183 if (node == 0 || !warn_deprecated_decl)
12184 return;
12186 if (!attr)
12188 if (DECL_P (node))
12189 attr = DECL_ATTRIBUTES (node);
12190 else if (TYPE_P (node))
12192 tree decl = TYPE_STUB_DECL (node);
12193 if (decl)
12194 attr = lookup_attribute ("deprecated",
12195 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12199 if (attr)
12200 attr = lookup_attribute ("deprecated", attr);
12202 if (attr)
12203 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12204 else
12205 msg = NULL;
12207 bool w;
12208 if (DECL_P (node))
12210 if (msg)
12211 w = warning (OPT_Wdeprecated_declarations,
12212 "%qD is deprecated: %s", node, msg);
12213 else
12214 w = warning (OPT_Wdeprecated_declarations,
12215 "%qD is deprecated", node);
12216 if (w)
12217 inform (DECL_SOURCE_LOCATION (node), "declared here");
12219 else if (TYPE_P (node))
12221 tree what = NULL_TREE;
12222 tree decl = TYPE_STUB_DECL (node);
12224 if (TYPE_NAME (node))
12226 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12227 what = TYPE_NAME (node);
12228 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12229 && DECL_NAME (TYPE_NAME (node)))
12230 what = DECL_NAME (TYPE_NAME (node));
12233 if (decl)
12235 if (what)
12237 if (msg)
12238 w = warning (OPT_Wdeprecated_declarations,
12239 "%qE is deprecated: %s", what, msg);
12240 else
12241 w = warning (OPT_Wdeprecated_declarations,
12242 "%qE is deprecated", what);
12244 else
12246 if (msg)
12247 w = warning (OPT_Wdeprecated_declarations,
12248 "type is deprecated: %s", msg);
12249 else
12250 w = warning (OPT_Wdeprecated_declarations,
12251 "type is deprecated");
12253 if (w)
12254 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12256 else
12258 if (what)
12260 if (msg)
12261 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12262 what, msg);
12263 else
12264 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12266 else
12268 if (msg)
12269 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12270 msg);
12271 else
12272 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12278 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12279 somewhere in it. */
12281 bool
12282 contains_bitfld_component_ref_p (const_tree ref)
12284 while (handled_component_p (ref))
12286 if (TREE_CODE (ref) == COMPONENT_REF
12287 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12288 return true;
12289 ref = TREE_OPERAND (ref, 0);
12292 return false;
12295 /* Try to determine whether a TRY_CATCH expression can fall through.
12296 This is a subroutine of block_may_fallthru. */
12298 static bool
12299 try_catch_may_fallthru (const_tree stmt)
12301 tree_stmt_iterator i;
12303 /* If the TRY block can fall through, the whole TRY_CATCH can
12304 fall through. */
12305 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12306 return true;
12308 i = tsi_start (TREE_OPERAND (stmt, 1));
12309 switch (TREE_CODE (tsi_stmt (i)))
12311 case CATCH_EXPR:
12312 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12313 catch expression and a body. The whole TRY_CATCH may fall
12314 through iff any of the catch bodies falls through. */
12315 for (; !tsi_end_p (i); tsi_next (&i))
12317 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12318 return true;
12320 return false;
12322 case EH_FILTER_EXPR:
12323 /* The exception filter expression only matters if there is an
12324 exception. If the exception does not match EH_FILTER_TYPES,
12325 we will execute EH_FILTER_FAILURE, and we will fall through
12326 if that falls through. If the exception does match
12327 EH_FILTER_TYPES, the stack unwinder will continue up the
12328 stack, so we will not fall through. We don't know whether we
12329 will throw an exception which matches EH_FILTER_TYPES or not,
12330 so we just ignore EH_FILTER_TYPES and assume that we might
12331 throw an exception which doesn't match. */
12332 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12334 default:
12335 /* This case represents statements to be executed when an
12336 exception occurs. Those statements are implicitly followed
12337 by a RESX statement to resume execution after the exception.
12338 So in this case the TRY_CATCH never falls through. */
12339 return false;
12343 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12344 need not be 100% accurate; simply be conservative and return true if we
12345 don't know. This is used only to avoid stupidly generating extra code.
12346 If we're wrong, we'll just delete the extra code later. */
12348 bool
12349 block_may_fallthru (const_tree block)
12351 /* This CONST_CAST is okay because expr_last returns its argument
12352 unmodified and we assign it to a const_tree. */
12353 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12355 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12357 case GOTO_EXPR:
12358 case RETURN_EXPR:
12359 /* Easy cases. If the last statement of the block implies
12360 control transfer, then we can't fall through. */
12361 return false;
12363 case SWITCH_EXPR:
12364 /* If there is a default: label or case labels cover all possible
12365 SWITCH_COND values, then the SWITCH_EXPR will transfer control
12366 to some case label in all cases and all we care is whether the
12367 SWITCH_BODY falls through. */
12368 if (SWITCH_ALL_CASES_P (stmt))
12369 return block_may_fallthru (SWITCH_BODY (stmt));
12370 return true;
12372 case COND_EXPR:
12373 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12374 return true;
12375 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12377 case BIND_EXPR:
12378 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12380 case TRY_CATCH_EXPR:
12381 return try_catch_may_fallthru (stmt);
12383 case TRY_FINALLY_EXPR:
12384 /* The finally clause is always executed after the try clause,
12385 so if it does not fall through, then the try-finally will not
12386 fall through. Otherwise, if the try clause does not fall
12387 through, then when the finally clause falls through it will
12388 resume execution wherever the try clause was going. So the
12389 whole try-finally will only fall through if both the try
12390 clause and the finally clause fall through. */
12391 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12392 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12394 case MODIFY_EXPR:
12395 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12396 stmt = TREE_OPERAND (stmt, 1);
12397 else
12398 return true;
12399 /* FALLTHRU */
12401 case CALL_EXPR:
12402 /* Functions that do not return do not fall through. */
12403 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12405 case CLEANUP_POINT_EXPR:
12406 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12408 case TARGET_EXPR:
12409 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12411 case ERROR_MARK:
12412 return true;
12414 default:
12415 return lang_hooks.block_may_fallthru (stmt);
12419 /* True if we are using EH to handle cleanups. */
12420 static bool using_eh_for_cleanups_flag = false;
12422 /* This routine is called from front ends to indicate eh should be used for
12423 cleanups. */
12424 void
12425 using_eh_for_cleanups (void)
12427 using_eh_for_cleanups_flag = true;
12430 /* Query whether EH is used for cleanups. */
12431 bool
12432 using_eh_for_cleanups_p (void)
12434 return using_eh_for_cleanups_flag;
12437 /* Wrapper for tree_code_name to ensure that tree code is valid */
12438 const char *
12439 get_tree_code_name (enum tree_code code)
12441 const char *invalid = "<invalid tree code>";
12443 if (code >= MAX_TREE_CODES)
12444 return invalid;
12446 return tree_code_name[code];
12449 /* Drops the TREE_OVERFLOW flag from T. */
12451 tree
12452 drop_tree_overflow (tree t)
12454 gcc_checking_assert (TREE_OVERFLOW (t));
12456 /* For tree codes with a sharing machinery re-build the result. */
12457 if (TREE_CODE (t) == INTEGER_CST)
12458 return wide_int_to_tree (TREE_TYPE (t), wi::to_wide (t));
12460 /* For VECTOR_CST, remove the overflow bits from the encoded elements
12461 and canonicalize the result. */
12462 if (TREE_CODE (t) == VECTOR_CST)
12464 tree_vector_builder builder;
12465 builder.new_unary_operation (TREE_TYPE (t), t, true);
12466 unsigned int count = builder.encoded_nelts ();
12467 for (unsigned int i = 0; i < count; ++i)
12469 tree elt = VECTOR_CST_ELT (t, i);
12470 if (TREE_OVERFLOW (elt))
12471 elt = drop_tree_overflow (elt);
12472 builder.quick_push (elt);
12474 return builder.build ();
12477 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12478 and drop the flag. */
12479 t = copy_node (t);
12480 TREE_OVERFLOW (t) = 0;
12482 /* For constants that contain nested constants, drop the flag
12483 from those as well. */
12484 if (TREE_CODE (t) == COMPLEX_CST)
12486 if (TREE_OVERFLOW (TREE_REALPART (t)))
12487 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
12488 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
12489 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
12492 return t;
12495 /* Given a memory reference expression T, return its base address.
12496 The base address of a memory reference expression is the main
12497 object being referenced. For instance, the base address for
12498 'array[i].fld[j]' is 'array'. You can think of this as stripping
12499 away the offset part from a memory address.
12501 This function calls handled_component_p to strip away all the inner
12502 parts of the memory reference until it reaches the base object. */
12504 tree
12505 get_base_address (tree t)
12507 while (handled_component_p (t))
12508 t = TREE_OPERAND (t, 0);
12510 if ((TREE_CODE (t) == MEM_REF
12511 || TREE_CODE (t) == TARGET_MEM_REF)
12512 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12513 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12515 /* ??? Either the alias oracle or all callers need to properly deal
12516 with WITH_SIZE_EXPRs before we can look through those. */
12517 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12518 return NULL_TREE;
12520 return t;
12523 /* Return a tree of sizetype representing the size, in bytes, of the element
12524 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12526 tree
12527 array_ref_element_size (tree exp)
12529 tree aligned_size = TREE_OPERAND (exp, 3);
12530 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12531 location_t loc = EXPR_LOCATION (exp);
12533 /* If a size was specified in the ARRAY_REF, it's the size measured
12534 in alignment units of the element type. So multiply by that value. */
12535 if (aligned_size)
12537 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12538 sizetype from another type of the same width and signedness. */
12539 if (TREE_TYPE (aligned_size) != sizetype)
12540 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12541 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12542 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12545 /* Otherwise, take the size from that of the element type. Substitute
12546 any PLACEHOLDER_EXPR that we have. */
12547 else
12548 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12551 /* Return a tree representing the lower bound of the array mentioned in
12552 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12554 tree
12555 array_ref_low_bound (tree exp)
12557 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12559 /* If a lower bound is specified in EXP, use it. */
12560 if (TREE_OPERAND (exp, 2))
12561 return TREE_OPERAND (exp, 2);
12563 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12564 substituting for a PLACEHOLDER_EXPR as needed. */
12565 if (domain_type && TYPE_MIN_VALUE (domain_type))
12566 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12568 /* Otherwise, return a zero of the appropriate type. */
12569 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
12572 /* Return a tree representing the upper bound of the array mentioned in
12573 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12575 tree
12576 array_ref_up_bound (tree exp)
12578 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12580 /* If there is a domain type and it has an upper bound, use it, substituting
12581 for a PLACEHOLDER_EXPR as needed. */
12582 if (domain_type && TYPE_MAX_VALUE (domain_type))
12583 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12585 /* Otherwise fail. */
12586 return NULL_TREE;
12589 /* Returns true if REF is an array reference or a component reference
12590 to an array at the end of a structure.
12591 If this is the case, the array may be allocated larger
12592 than its upper bound implies. */
12594 bool
12595 array_at_struct_end_p (tree ref)
12597 tree atype;
12599 if (TREE_CODE (ref) == ARRAY_REF
12600 || TREE_CODE (ref) == ARRAY_RANGE_REF)
12602 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
12603 ref = TREE_OPERAND (ref, 0);
12605 else if (TREE_CODE (ref) == COMPONENT_REF
12606 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
12607 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
12608 else
12609 return false;
12611 if (TREE_CODE (ref) == STRING_CST)
12612 return false;
12614 while (handled_component_p (ref))
12616 /* If the reference chain contains a component reference to a
12617 non-union type and there follows another field the reference
12618 is not at the end of a structure. */
12619 if (TREE_CODE (ref) == COMPONENT_REF)
12621 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12623 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12624 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12625 nextf = DECL_CHAIN (nextf);
12626 if (nextf)
12627 return false;
12630 /* If we have a multi-dimensional array we do not consider
12631 a non-innermost dimension as flex array if the whole
12632 multi-dimensional array is at struct end.
12633 Same for an array of aggregates with a trailing array
12634 member. */
12635 else if (TREE_CODE (ref) == ARRAY_REF)
12636 return false;
12637 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
12639 /* If we view an underlying object as sth else then what we
12640 gathered up to now is what we have to rely on. */
12641 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
12642 break;
12643 else
12644 gcc_unreachable ();
12646 ref = TREE_OPERAND (ref, 0);
12649 /* The array now is at struct end. Treat flexible arrays as
12650 always subject to extend, even into just padding constrained by
12651 an underlying decl. */
12652 if (! TYPE_SIZE (atype))
12653 return true;
12655 tree size = NULL;
12657 if (TREE_CODE (ref) == MEM_REF
12658 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
12660 size = TYPE_SIZE (TREE_TYPE (ref));
12661 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
12664 /* If the reference is based on a declared entity, the size of the array
12665 is constrained by its given domain. (Do not trust commons PR/69368). */
12666 if (DECL_P (ref)
12667 /* Be sure the size of MEM_REF target match. For example:
12669 char buf[10];
12670 struct foo *str = (struct foo *)&buf;
12672 str->trailin_array[2] = 1;
12674 is valid because BUF allocate enough space. */
12676 && (!size || (DECL_SIZE (ref) != NULL
12677 && operand_equal_p (DECL_SIZE (ref), size, 0)))
12678 && !(flag_unconstrained_commons
12679 && VAR_P (ref) && DECL_COMMON (ref)))
12680 return false;
12682 return true;
12685 /* Return a tree representing the offset, in bytes, of the field referenced
12686 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12688 tree
12689 component_ref_field_offset (tree exp)
12691 tree aligned_offset = TREE_OPERAND (exp, 2);
12692 tree field = TREE_OPERAND (exp, 1);
12693 location_t loc = EXPR_LOCATION (exp);
12695 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12696 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12697 value. */
12698 if (aligned_offset)
12700 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12701 sizetype from another type of the same width and signedness. */
12702 if (TREE_TYPE (aligned_offset) != sizetype)
12703 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12704 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12705 size_int (DECL_OFFSET_ALIGN (field)
12706 / BITS_PER_UNIT));
12709 /* Otherwise, take the offset from that of the field. Substitute
12710 any PLACEHOLDER_EXPR that we have. */
12711 else
12712 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12715 /* Return the machine mode of T. For vectors, returns the mode of the
12716 inner type. The main use case is to feed the result to HONOR_NANS,
12717 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12719 machine_mode
12720 element_mode (const_tree t)
12722 if (!TYPE_P (t))
12723 t = TREE_TYPE (t);
12724 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12725 t = TREE_TYPE (t);
12726 return TYPE_MODE (t);
12729 /* Vector types need to re-check the target flags each time we report
12730 the machine mode. We need to do this because attribute target can
12731 change the result of vector_mode_supported_p and have_regs_of_mode
12732 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
12733 change on a per-function basis. */
12734 /* ??? Possibly a better solution is to run through all the types
12735 referenced by a function and re-compute the TYPE_MODE once, rather
12736 than make the TYPE_MODE macro call a function. */
12738 machine_mode
12739 vector_type_mode (const_tree t)
12741 machine_mode mode;
12743 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
12745 mode = t->type_common.mode;
12746 if (VECTOR_MODE_P (mode)
12747 && (!targetm.vector_mode_supported_p (mode)
12748 || !have_regs_of_mode[mode]))
12750 scalar_int_mode innermode;
12752 /* For integers, try mapping it to a same-sized scalar mode. */
12753 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
12755 unsigned int size = (TYPE_VECTOR_SUBPARTS (t)
12756 * GET_MODE_BITSIZE (innermode));
12757 scalar_int_mode mode;
12758 if (int_mode_for_size (size, 0).exists (&mode)
12759 && have_regs_of_mode[mode])
12760 return mode;
12763 return BLKmode;
12766 return mode;
12769 /* Verify that basic properties of T match TV and thus T can be a variant of
12770 TV. TV should be the more specified variant (i.e. the main variant). */
12772 static bool
12773 verify_type_variant (const_tree t, tree tv)
12775 /* Type variant can differ by:
12777 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
12778 ENCODE_QUAL_ADDR_SPACE.
12779 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
12780 in this case some values may not be set in the variant types
12781 (see TYPE_COMPLETE_P checks).
12782 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
12783 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
12784 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
12785 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
12786 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
12787 this is necessary to make it possible to merge types form different TUs
12788 - arrays, pointers and references may have TREE_TYPE that is a variant
12789 of TREE_TYPE of their main variants.
12790 - aggregates may have new TYPE_FIELDS list that list variants of
12791 the main variant TYPE_FIELDS.
12792 - vector types may differ by TYPE_VECTOR_OPAQUE
12795 /* Convenience macro for matching individual fields. */
12796 #define verify_variant_match(flag) \
12797 do { \
12798 if (flag (tv) != flag (t)) \
12800 error ("type variant differs by " #flag "."); \
12801 debug_tree (tv); \
12802 return false; \
12804 } while (false)
12806 /* tree_base checks. */
12808 verify_variant_match (TREE_CODE);
12809 /* FIXME: Ada builds non-artificial variants of artificial types. */
12810 if (TYPE_ARTIFICIAL (tv) && 0)
12811 verify_variant_match (TYPE_ARTIFICIAL);
12812 if (POINTER_TYPE_P (tv))
12813 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
12814 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
12815 verify_variant_match (TYPE_UNSIGNED);
12816 verify_variant_match (TYPE_PACKED);
12817 if (TREE_CODE (t) == REFERENCE_TYPE)
12818 verify_variant_match (TYPE_REF_IS_RVALUE);
12819 if (AGGREGATE_TYPE_P (t))
12820 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
12821 else
12822 verify_variant_match (TYPE_SATURATING);
12823 /* FIXME: This check trigger during libstdc++ build. */
12824 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
12825 verify_variant_match (TYPE_FINAL_P);
12827 /* tree_type_common checks. */
12829 if (COMPLETE_TYPE_P (t))
12831 verify_variant_match (TYPE_MODE);
12832 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
12833 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
12834 verify_variant_match (TYPE_SIZE);
12835 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
12836 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
12837 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
12839 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
12840 TYPE_SIZE_UNIT (tv), 0));
12841 error ("type variant has different TYPE_SIZE_UNIT");
12842 debug_tree (tv);
12843 error ("type variant's TYPE_SIZE_UNIT");
12844 debug_tree (TYPE_SIZE_UNIT (tv));
12845 error ("type's TYPE_SIZE_UNIT");
12846 debug_tree (TYPE_SIZE_UNIT (t));
12847 return false;
12850 verify_variant_match (TYPE_PRECISION);
12851 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
12852 if (RECORD_OR_UNION_TYPE_P (t))
12853 verify_variant_match (TYPE_TRANSPARENT_AGGR);
12854 else if (TREE_CODE (t) == ARRAY_TYPE)
12855 verify_variant_match (TYPE_NONALIASED_COMPONENT);
12856 /* During LTO we merge variant lists from diferent translation units
12857 that may differ BY TYPE_CONTEXT that in turn may point
12858 to TRANSLATION_UNIT_DECL.
12859 Ada also builds variants of types with different TYPE_CONTEXT. */
12860 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
12861 verify_variant_match (TYPE_CONTEXT);
12862 verify_variant_match (TYPE_STRING_FLAG);
12863 if (TYPE_ALIAS_SET_KNOWN_P (t))
12865 error ("type variant with TYPE_ALIAS_SET_KNOWN_P");
12866 debug_tree (tv);
12867 return false;
12870 /* tree_type_non_common checks. */
12872 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
12873 and dangle the pointer from time to time. */
12874 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
12875 && (in_lto_p || !TYPE_VFIELD (tv)
12876 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
12878 error ("type variant has different TYPE_VFIELD");
12879 debug_tree (tv);
12880 return false;
12882 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
12883 || TREE_CODE (t) == INTEGER_TYPE
12884 || TREE_CODE (t) == BOOLEAN_TYPE
12885 || TREE_CODE (t) == REAL_TYPE
12886 || TREE_CODE (t) == FIXED_POINT_TYPE)
12888 verify_variant_match (TYPE_MAX_VALUE);
12889 verify_variant_match (TYPE_MIN_VALUE);
12891 if (TREE_CODE (t) == METHOD_TYPE)
12892 verify_variant_match (TYPE_METHOD_BASETYPE);
12893 if (TREE_CODE (t) == OFFSET_TYPE)
12894 verify_variant_match (TYPE_OFFSET_BASETYPE);
12895 if (TREE_CODE (t) == ARRAY_TYPE)
12896 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
12897 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
12898 or even type's main variant. This is needed to make bootstrap pass
12899 and the bug seems new in GCC 5.
12900 C++ FE should be updated to make this consistent and we should check
12901 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
12902 is a match with main variant.
12904 Also disable the check for Java for now because of parser hack that builds
12905 first an dummy BINFO and then sometimes replace it by real BINFO in some
12906 of the copies. */
12907 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
12908 && TYPE_BINFO (t) != TYPE_BINFO (tv)
12909 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
12910 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
12911 at LTO time only. */
12912 && (in_lto_p && odr_type_p (t)))
12914 error ("type variant has different TYPE_BINFO");
12915 debug_tree (tv);
12916 error ("type variant's TYPE_BINFO");
12917 debug_tree (TYPE_BINFO (tv));
12918 error ("type's TYPE_BINFO");
12919 debug_tree (TYPE_BINFO (t));
12920 return false;
12923 /* Check various uses of TYPE_VALUES_RAW. */
12924 if (TREE_CODE (t) == ENUMERAL_TYPE)
12925 verify_variant_match (TYPE_VALUES);
12926 else if (TREE_CODE (t) == ARRAY_TYPE)
12927 verify_variant_match (TYPE_DOMAIN);
12928 /* Permit incomplete variants of complete type. While FEs may complete
12929 all variants, this does not happen for C++ templates in all cases. */
12930 else if (RECORD_OR_UNION_TYPE_P (t)
12931 && COMPLETE_TYPE_P (t)
12932 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
12934 tree f1, f2;
12936 /* Fortran builds qualified variants as new records with items of
12937 qualified type. Verify that they looks same. */
12938 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
12939 f1 && f2;
12940 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
12941 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
12942 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
12943 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
12944 /* FIXME: gfc_nonrestricted_type builds all types as variants
12945 with exception of pointer types. It deeply copies the type
12946 which means that we may end up with a variant type
12947 referring non-variant pointer. We may change it to
12948 produce types as variants, too, like
12949 objc_get_protocol_qualified_type does. */
12950 && !POINTER_TYPE_P (TREE_TYPE (f1)))
12951 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
12952 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
12953 break;
12954 if (f1 || f2)
12956 error ("type variant has different TYPE_FIELDS");
12957 debug_tree (tv);
12958 error ("first mismatch is field");
12959 debug_tree (f1);
12960 error ("and field");
12961 debug_tree (f2);
12962 return false;
12965 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
12966 verify_variant_match (TYPE_ARG_TYPES);
12967 /* For C++ the qualified variant of array type is really an array type
12968 of qualified TREE_TYPE.
12969 objc builds variants of pointer where pointer to type is a variant, too
12970 in objc_get_protocol_qualified_type. */
12971 if (TREE_TYPE (t) != TREE_TYPE (tv)
12972 && ((TREE_CODE (t) != ARRAY_TYPE
12973 && !POINTER_TYPE_P (t))
12974 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
12975 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
12977 error ("type variant has different TREE_TYPE");
12978 debug_tree (tv);
12979 error ("type variant's TREE_TYPE");
12980 debug_tree (TREE_TYPE (tv));
12981 error ("type's TREE_TYPE");
12982 debug_tree (TREE_TYPE (t));
12983 return false;
12985 if (type_with_alias_set_p (t)
12986 && !gimple_canonical_types_compatible_p (t, tv, false))
12988 error ("type is not compatible with its variant");
12989 debug_tree (tv);
12990 error ("type variant's TREE_TYPE");
12991 debug_tree (TREE_TYPE (tv));
12992 error ("type's TREE_TYPE");
12993 debug_tree (TREE_TYPE (t));
12994 return false;
12996 return true;
12997 #undef verify_variant_match
13001 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13002 the middle-end types_compatible_p function. It needs to avoid
13003 claiming types are different for types that should be treated
13004 the same with respect to TBAA. Canonical types are also used
13005 for IL consistency checks via the useless_type_conversion_p
13006 predicate which does not handle all type kinds itself but falls
13007 back to pointer-comparison of TYPE_CANONICAL for aggregates
13008 for example. */
13010 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13011 type calculation because we need to allow inter-operability between signed
13012 and unsigned variants. */
13014 bool
13015 type_with_interoperable_signedness (const_tree type)
13017 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13018 signed char and unsigned char. Similarly fortran FE builds
13019 C_SIZE_T as signed type, while C defines it unsigned. */
13021 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13022 == INTEGER_TYPE
13023 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13024 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13027 /* Return true iff T1 and T2 are structurally identical for what
13028 TBAA is concerned.
13029 This function is used both by lto.c canonical type merging and by the
13030 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13031 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13032 only for LTO because only in these cases TYPE_CANONICAL equivalence
13033 correspond to one defined by gimple_canonical_types_compatible_p. */
13035 bool
13036 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13037 bool trust_type_canonical)
13039 /* Type variants should be same as the main variant. When not doing sanity
13040 checking to verify this fact, go to main variants and save some work. */
13041 if (trust_type_canonical)
13043 t1 = TYPE_MAIN_VARIANT (t1);
13044 t2 = TYPE_MAIN_VARIANT (t2);
13047 /* Check first for the obvious case of pointer identity. */
13048 if (t1 == t2)
13049 return true;
13051 /* Check that we have two types to compare. */
13052 if (t1 == NULL_TREE || t2 == NULL_TREE)
13053 return false;
13055 /* We consider complete types always compatible with incomplete type.
13056 This does not make sense for canonical type calculation and thus we
13057 need to ensure that we are never called on it.
13059 FIXME: For more correctness the function probably should have three modes
13060 1) mode assuming that types are complete mathcing their structure
13061 2) mode allowing incomplete types but producing equivalence classes
13062 and thus ignoring all info from complete types
13063 3) mode allowing incomplete types to match complete but checking
13064 compatibility between complete types.
13066 1 and 2 can be used for canonical type calculation. 3 is the real
13067 definition of type compatibility that can be used i.e. for warnings during
13068 declaration merging. */
13070 gcc_assert (!trust_type_canonical
13071 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13072 /* If the types have been previously registered and found equal
13073 they still are. */
13075 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13076 && trust_type_canonical)
13078 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13079 they are always NULL, but they are set to non-NULL for types
13080 constructed by build_pointer_type and variants. In this case the
13081 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13082 all pointers are considered equal. Be sure to not return false
13083 negatives. */
13084 gcc_checking_assert (canonical_type_used_p (t1)
13085 && canonical_type_used_p (t2));
13086 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13089 /* Can't be the same type if the types don't have the same code. */
13090 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13091 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13092 return false;
13094 /* Qualifiers do not matter for canonical type comparison purposes. */
13096 /* Void types and nullptr types are always the same. */
13097 if (TREE_CODE (t1) == VOID_TYPE
13098 || TREE_CODE (t1) == NULLPTR_TYPE)
13099 return true;
13101 /* Can't be the same type if they have different mode. */
13102 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13103 return false;
13105 /* Non-aggregate types can be handled cheaply. */
13106 if (INTEGRAL_TYPE_P (t1)
13107 || SCALAR_FLOAT_TYPE_P (t1)
13108 || FIXED_POINT_TYPE_P (t1)
13109 || TREE_CODE (t1) == VECTOR_TYPE
13110 || TREE_CODE (t1) == COMPLEX_TYPE
13111 || TREE_CODE (t1) == OFFSET_TYPE
13112 || POINTER_TYPE_P (t1))
13114 /* Can't be the same type if they have different recision. */
13115 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13116 return false;
13118 /* In some cases the signed and unsigned types are required to be
13119 inter-operable. */
13120 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13121 && !type_with_interoperable_signedness (t1))
13122 return false;
13124 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13125 interoperable with "signed char". Unless all frontends are revisited
13126 to agree on these types, we must ignore the flag completely. */
13128 /* Fortran standard define C_PTR type that is compatible with every
13129 C pointer. For this reason we need to glob all pointers into one.
13130 Still pointers in different address spaces are not compatible. */
13131 if (POINTER_TYPE_P (t1))
13133 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13134 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13135 return false;
13138 /* Tail-recurse to components. */
13139 if (TREE_CODE (t1) == VECTOR_TYPE
13140 || TREE_CODE (t1) == COMPLEX_TYPE)
13141 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13142 TREE_TYPE (t2),
13143 trust_type_canonical);
13145 return true;
13148 /* Do type-specific comparisons. */
13149 switch (TREE_CODE (t1))
13151 case ARRAY_TYPE:
13152 /* Array types are the same if the element types are the same and
13153 the number of elements are the same. */
13154 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13155 trust_type_canonical)
13156 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13157 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13158 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13159 return false;
13160 else
13162 tree i1 = TYPE_DOMAIN (t1);
13163 tree i2 = TYPE_DOMAIN (t2);
13165 /* For an incomplete external array, the type domain can be
13166 NULL_TREE. Check this condition also. */
13167 if (i1 == NULL_TREE && i2 == NULL_TREE)
13168 return true;
13169 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13170 return false;
13171 else
13173 tree min1 = TYPE_MIN_VALUE (i1);
13174 tree min2 = TYPE_MIN_VALUE (i2);
13175 tree max1 = TYPE_MAX_VALUE (i1);
13176 tree max2 = TYPE_MAX_VALUE (i2);
13178 /* The minimum/maximum values have to be the same. */
13179 if ((min1 == min2
13180 || (min1 && min2
13181 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13182 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13183 || operand_equal_p (min1, min2, 0))))
13184 && (max1 == max2
13185 || (max1 && max2
13186 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13187 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13188 || operand_equal_p (max1, max2, 0)))))
13189 return true;
13190 else
13191 return false;
13195 case METHOD_TYPE:
13196 case FUNCTION_TYPE:
13197 /* Function types are the same if the return type and arguments types
13198 are the same. */
13199 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13200 trust_type_canonical))
13201 return false;
13203 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13204 return true;
13205 else
13207 tree parms1, parms2;
13209 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13210 parms1 && parms2;
13211 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13213 if (!gimple_canonical_types_compatible_p
13214 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13215 trust_type_canonical))
13216 return false;
13219 if (parms1 || parms2)
13220 return false;
13222 return true;
13225 case RECORD_TYPE:
13226 case UNION_TYPE:
13227 case QUAL_UNION_TYPE:
13229 tree f1, f2;
13231 /* Don't try to compare variants of an incomplete type, before
13232 TYPE_FIELDS has been copied around. */
13233 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13234 return true;
13237 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13238 return false;
13240 /* For aggregate types, all the fields must be the same. */
13241 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13242 f1 || f2;
13243 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13245 /* Skip non-fields and zero-sized fields. */
13246 while (f1 && (TREE_CODE (f1) != FIELD_DECL
13247 || (DECL_SIZE (f1)
13248 && integer_zerop (DECL_SIZE (f1)))))
13249 f1 = TREE_CHAIN (f1);
13250 while (f2 && (TREE_CODE (f2) != FIELD_DECL
13251 || (DECL_SIZE (f2)
13252 && integer_zerop (DECL_SIZE (f2)))))
13253 f2 = TREE_CHAIN (f2);
13254 if (!f1 || !f2)
13255 break;
13256 /* The fields must have the same name, offset and type. */
13257 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13258 || !gimple_compare_field_offset (f1, f2)
13259 || !gimple_canonical_types_compatible_p
13260 (TREE_TYPE (f1), TREE_TYPE (f2),
13261 trust_type_canonical))
13262 return false;
13265 /* If one aggregate has more fields than the other, they
13266 are not the same. */
13267 if (f1 || f2)
13268 return false;
13270 return true;
13273 default:
13274 /* Consider all types with language specific trees in them mutually
13275 compatible. This is executed only from verify_type and false
13276 positives can be tolerated. */
13277 gcc_assert (!in_lto_p);
13278 return true;
13282 /* Verify type T. */
13284 void
13285 verify_type (const_tree t)
13287 bool error_found = false;
13288 tree mv = TYPE_MAIN_VARIANT (t);
13289 if (!mv)
13291 error ("Main variant is not defined");
13292 error_found = true;
13294 else if (mv != TYPE_MAIN_VARIANT (mv))
13296 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
13297 debug_tree (mv);
13298 error_found = true;
13300 else if (t != mv && !verify_type_variant (t, mv))
13301 error_found = true;
13303 tree ct = TYPE_CANONICAL (t);
13304 if (!ct)
13306 else if (TYPE_CANONICAL (t) != ct)
13308 error ("TYPE_CANONICAL has different TYPE_CANONICAL");
13309 debug_tree (ct);
13310 error_found = true;
13312 /* Method and function types can not be used to address memory and thus
13313 TYPE_CANONICAL really matters only for determining useless conversions.
13315 FIXME: C++ FE produce declarations of builtin functions that are not
13316 compatible with main variants. */
13317 else if (TREE_CODE (t) == FUNCTION_TYPE)
13319 else if (t != ct
13320 /* FIXME: gimple_canonical_types_compatible_p can not compare types
13321 with variably sized arrays because their sizes possibly
13322 gimplified to different variables. */
13323 && !variably_modified_type_p (ct, NULL)
13324 && !gimple_canonical_types_compatible_p (t, ct, false))
13326 error ("TYPE_CANONICAL is not compatible");
13327 debug_tree (ct);
13328 error_found = true;
13331 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13332 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13334 error ("TYPE_MODE of TYPE_CANONICAL is not compatible");
13335 debug_tree (ct);
13336 error_found = true;
13338 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
13340 error ("TYPE_CANONICAL of main variant is not main variant");
13341 debug_tree (ct);
13342 debug_tree (TYPE_MAIN_VARIANT (ct));
13343 error_found = true;
13347 /* Check various uses of TYPE_MIN_VALUE_RAW. */
13348 if (RECORD_OR_UNION_TYPE_P (t))
13350 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13351 and danagle the pointer from time to time. */
13352 if (TYPE_VFIELD (t)
13353 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13354 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13356 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
13357 debug_tree (TYPE_VFIELD (t));
13358 error_found = true;
13361 else if (TREE_CODE (t) == POINTER_TYPE)
13363 if (TYPE_NEXT_PTR_TO (t)
13364 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13366 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
13367 debug_tree (TYPE_NEXT_PTR_TO (t));
13368 error_found = true;
13371 else if (TREE_CODE (t) == REFERENCE_TYPE)
13373 if (TYPE_NEXT_REF_TO (t)
13374 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13376 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
13377 debug_tree (TYPE_NEXT_REF_TO (t));
13378 error_found = true;
13381 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13382 || TREE_CODE (t) == FIXED_POINT_TYPE)
13384 /* FIXME: The following check should pass:
13385 useless_type_conversion_p (const_cast <tree> (t),
13386 TREE_TYPE (TYPE_MIN_VALUE (t))
13387 but does not for C sizetypes in LTO. */
13390 /* Check various uses of TYPE_MAXVAL_RAW. */
13391 if (RECORD_OR_UNION_TYPE_P (t))
13393 if (!TYPE_BINFO (t))
13395 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13397 error ("TYPE_BINFO is not TREE_BINFO");
13398 debug_tree (TYPE_BINFO (t));
13399 error_found = true;
13401 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
13403 error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT");
13404 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13405 error_found = true;
13408 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13410 if (TYPE_METHOD_BASETYPE (t)
13411 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13412 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13414 error ("TYPE_METHOD_BASETYPE is not record nor union");
13415 debug_tree (TYPE_METHOD_BASETYPE (t));
13416 error_found = true;
13419 else if (TREE_CODE (t) == OFFSET_TYPE)
13421 if (TYPE_OFFSET_BASETYPE (t)
13422 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13423 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13425 error ("TYPE_OFFSET_BASETYPE is not record nor union");
13426 debug_tree (TYPE_OFFSET_BASETYPE (t));
13427 error_found = true;
13430 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13431 || TREE_CODE (t) == FIXED_POINT_TYPE)
13433 /* FIXME: The following check should pass:
13434 useless_type_conversion_p (const_cast <tree> (t),
13435 TREE_TYPE (TYPE_MAX_VALUE (t))
13436 but does not for C sizetypes in LTO. */
13438 else if (TREE_CODE (t) == ARRAY_TYPE)
13440 if (TYPE_ARRAY_MAX_SIZE (t)
13441 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13443 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
13444 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13445 error_found = true;
13448 else if (TYPE_MAX_VALUE_RAW (t))
13450 error ("TYPE_MAX_VALUE_RAW non-NULL");
13451 debug_tree (TYPE_MAX_VALUE_RAW (t));
13452 error_found = true;
13455 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13457 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
13458 debug_tree (TYPE_LANG_SLOT_1 (t));
13459 error_found = true;
13462 /* Check various uses of TYPE_VALUES_RAW. */
13463 if (TREE_CODE (t) == ENUMERAL_TYPE)
13464 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13466 tree value = TREE_VALUE (l);
13467 tree name = TREE_PURPOSE (l);
13469 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13470 CONST_DECL of ENUMERAL TYPE. */
13471 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13473 error ("Enum value is not CONST_DECL or INTEGER_CST");
13474 debug_tree (value);
13475 debug_tree (name);
13476 error_found = true;
13478 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13479 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13481 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
13482 debug_tree (value);
13483 debug_tree (name);
13484 error_found = true;
13486 if (TREE_CODE (name) != IDENTIFIER_NODE)
13488 error ("Enum value name is not IDENTIFIER_NODE");
13489 debug_tree (value);
13490 debug_tree (name);
13491 error_found = true;
13494 else if (TREE_CODE (t) == ARRAY_TYPE)
13496 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13498 error ("Array TYPE_DOMAIN is not integer type");
13499 debug_tree (TYPE_DOMAIN (t));
13500 error_found = true;
13503 else if (RECORD_OR_UNION_TYPE_P (t))
13505 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
13507 error ("TYPE_FIELDS defined in incomplete type");
13508 error_found = true;
13510 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13512 /* TODO: verify properties of decls. */
13513 if (TREE_CODE (fld) == FIELD_DECL)
13515 else if (TREE_CODE (fld) == TYPE_DECL)
13517 else if (TREE_CODE (fld) == CONST_DECL)
13519 else if (VAR_P (fld))
13521 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13523 else if (TREE_CODE (fld) == USING_DECL)
13525 else if (TREE_CODE (fld) == FUNCTION_DECL)
13527 else
13529 error ("Wrong tree in TYPE_FIELDS list");
13530 debug_tree (fld);
13531 error_found = true;
13535 else if (TREE_CODE (t) == INTEGER_TYPE
13536 || TREE_CODE (t) == BOOLEAN_TYPE
13537 || TREE_CODE (t) == OFFSET_TYPE
13538 || TREE_CODE (t) == REFERENCE_TYPE
13539 || TREE_CODE (t) == NULLPTR_TYPE
13540 || TREE_CODE (t) == POINTER_TYPE)
13542 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13544 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
13545 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13546 error_found = true;
13548 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13550 error ("TYPE_CACHED_VALUES is not TREE_VEC");
13551 debug_tree (TYPE_CACHED_VALUES (t));
13552 error_found = true;
13554 /* Verify just enough of cache to ensure that no one copied it to new type.
13555 All copying should go by copy_node that should clear it. */
13556 else if (TYPE_CACHED_VALUES_P (t))
13558 int i;
13559 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13560 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13561 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13563 error ("wrong TYPE_CACHED_VALUES entry");
13564 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13565 error_found = true;
13566 break;
13570 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13571 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13573 /* C++ FE uses TREE_PURPOSE to store initial values. */
13574 if (TREE_PURPOSE (l) && in_lto_p)
13576 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
13577 debug_tree (l);
13578 error_found = true;
13580 if (!TYPE_P (TREE_VALUE (l)))
13582 error ("Wrong entry in TYPE_ARG_TYPES list");
13583 debug_tree (l);
13584 error_found = true;
13587 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13589 error ("TYPE_VALUES_RAW field is non-NULL");
13590 debug_tree (TYPE_VALUES_RAW (t));
13591 error_found = true;
13593 if (TREE_CODE (t) != INTEGER_TYPE
13594 && TREE_CODE (t) != BOOLEAN_TYPE
13595 && TREE_CODE (t) != OFFSET_TYPE
13596 && TREE_CODE (t) != REFERENCE_TYPE
13597 && TREE_CODE (t) != NULLPTR_TYPE
13598 && TREE_CODE (t) != POINTER_TYPE
13599 && TYPE_CACHED_VALUES_P (t))
13601 error ("TYPE_CACHED_VALUES_P is set while it should not");
13602 error_found = true;
13604 if (TYPE_STRING_FLAG (t)
13605 && TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != INTEGER_TYPE)
13607 error ("TYPE_STRING_FLAG is set on wrong type code");
13608 error_found = true;
13611 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13612 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13613 of a type. */
13614 if (TREE_CODE (t) == METHOD_TYPE
13615 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
13617 error ("TYPE_METHOD_BASETYPE is not main variant");
13618 error_found = true;
13621 if (error_found)
13623 debug_tree (const_cast <tree> (t));
13624 internal_error ("verify_type failed");
13629 /* Return 1 if ARG interpreted as signed in its precision is known to be
13630 always positive or 2 if ARG is known to be always negative, or 3 if
13631 ARG may be positive or negative. */
13634 get_range_pos_neg (tree arg)
13636 if (arg == error_mark_node)
13637 return 3;
13639 int prec = TYPE_PRECISION (TREE_TYPE (arg));
13640 int cnt = 0;
13641 if (TREE_CODE (arg) == INTEGER_CST)
13643 wide_int w = wi::sext (wi::to_wide (arg), prec);
13644 if (wi::neg_p (w))
13645 return 2;
13646 else
13647 return 1;
13649 while (CONVERT_EXPR_P (arg)
13650 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
13651 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
13653 arg = TREE_OPERAND (arg, 0);
13654 /* Narrower value zero extended into wider type
13655 will always result in positive values. */
13656 if (TYPE_UNSIGNED (TREE_TYPE (arg))
13657 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
13658 return 1;
13659 prec = TYPE_PRECISION (TREE_TYPE (arg));
13660 if (++cnt > 30)
13661 return 3;
13664 if (TREE_CODE (arg) != SSA_NAME)
13665 return 3;
13666 wide_int arg_min, arg_max;
13667 while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
13669 gimple *g = SSA_NAME_DEF_STMT (arg);
13670 if (is_gimple_assign (g)
13671 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
13673 tree t = gimple_assign_rhs1 (g);
13674 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
13675 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
13677 if (TYPE_UNSIGNED (TREE_TYPE (t))
13678 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
13679 return 1;
13680 prec = TYPE_PRECISION (TREE_TYPE (t));
13681 arg = t;
13682 if (++cnt > 30)
13683 return 3;
13684 continue;
13687 return 3;
13689 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
13691 /* For unsigned values, the "positive" range comes
13692 below the "negative" range. */
13693 if (!wi::neg_p (wi::sext (arg_max, prec), SIGNED))
13694 return 1;
13695 if (wi::neg_p (wi::sext (arg_min, prec), SIGNED))
13696 return 2;
13698 else
13700 if (!wi::neg_p (wi::sext (arg_min, prec), SIGNED))
13701 return 1;
13702 if (wi::neg_p (wi::sext (arg_max, prec), SIGNED))
13703 return 2;
13705 return 3;
13711 /* Return true if ARG is marked with the nonnull attribute in the
13712 current function signature. */
13714 bool
13715 nonnull_arg_p (const_tree arg)
13717 tree t, attrs, fntype;
13718 unsigned HOST_WIDE_INT arg_num;
13720 gcc_assert (TREE_CODE (arg) == PARM_DECL
13721 && (POINTER_TYPE_P (TREE_TYPE (arg))
13722 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
13724 /* The static chain decl is always non null. */
13725 if (arg == cfun->static_chain_decl)
13726 return true;
13728 /* THIS argument of method is always non-NULL. */
13729 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
13730 && arg == DECL_ARGUMENTS (cfun->decl)
13731 && flag_delete_null_pointer_checks)
13732 return true;
13734 /* Values passed by reference are always non-NULL. */
13735 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
13736 && flag_delete_null_pointer_checks)
13737 return true;
13739 fntype = TREE_TYPE (cfun->decl);
13740 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
13742 attrs = lookup_attribute ("nonnull", attrs);
13744 /* If "nonnull" wasn't specified, we know nothing about the argument. */
13745 if (attrs == NULL_TREE)
13746 return false;
13748 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
13749 if (TREE_VALUE (attrs) == NULL_TREE)
13750 return true;
13752 /* Get the position number for ARG in the function signature. */
13753 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
13755 t = DECL_CHAIN (t), arg_num++)
13757 if (t == arg)
13758 break;
13761 gcc_assert (t == arg);
13763 /* Now see if ARG_NUM is mentioned in the nonnull list. */
13764 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
13766 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
13767 return true;
13771 return false;
13774 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
13775 information. */
13777 location_t
13778 set_block (location_t loc, tree block)
13780 location_t pure_loc = get_pure_location (loc);
13781 source_range src_range = get_range_from_loc (line_table, loc);
13782 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
13785 location_t
13786 set_source_range (tree expr, location_t start, location_t finish)
13788 source_range src_range;
13789 src_range.m_start = start;
13790 src_range.m_finish = finish;
13791 return set_source_range (expr, src_range);
13794 location_t
13795 set_source_range (tree expr, source_range src_range)
13797 if (!EXPR_P (expr))
13798 return UNKNOWN_LOCATION;
13800 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
13801 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
13802 pure_loc,
13803 src_range,
13804 NULL);
13805 SET_EXPR_LOCATION (expr, adhoc);
13806 return adhoc;
13809 /* Return the name of combined function FN, for debugging purposes. */
13811 const char *
13812 combined_fn_name (combined_fn fn)
13814 if (builtin_fn_p (fn))
13816 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
13817 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
13819 else
13820 return internal_fn_name (as_internal_fn (fn));
13823 /* Return a bitmap with a bit set corresponding to each argument in
13824 a function call type FNTYPE declared with attribute nonnull,
13825 or null if none of the function's argument are nonnull. The caller
13826 must free the bitmap. */
13828 bitmap
13829 get_nonnull_args (const_tree fntype)
13831 if (fntype == NULL_TREE)
13832 return NULL;
13834 tree attrs = TYPE_ATTRIBUTES (fntype);
13835 if (!attrs)
13836 return NULL;
13838 bitmap argmap = NULL;
13840 /* A function declaration can specify multiple attribute nonnull,
13841 each with zero or more arguments. The loop below creates a bitmap
13842 representing a union of all the arguments. An empty (but non-null)
13843 bitmap means that all arguments have been declaraed nonnull. */
13844 for ( ; attrs; attrs = TREE_CHAIN (attrs))
13846 attrs = lookup_attribute ("nonnull", attrs);
13847 if (!attrs)
13848 break;
13850 if (!argmap)
13851 argmap = BITMAP_ALLOC (NULL);
13853 if (!TREE_VALUE (attrs))
13855 /* Clear the bitmap in case a previous attribute nonnull
13856 set it and this one overrides it for all arguments. */
13857 bitmap_clear (argmap);
13858 return argmap;
13861 /* Iterate over the indices of the format arguments declared nonnull
13862 and set a bit for each. */
13863 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
13865 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
13866 bitmap_set_bit (argmap, val);
13870 return argmap;
13873 /* Returns true if TYPE is a type where it and all of its subobjects
13874 (recursively) are of structure, union, or array type. */
13876 static bool
13877 default_is_empty_type (tree type)
13879 if (RECORD_OR_UNION_TYPE_P (type))
13881 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
13882 if (TREE_CODE (field) == FIELD_DECL
13883 && !DECL_PADDING_P (field)
13884 && !default_is_empty_type (TREE_TYPE (field)))
13885 return false;
13886 return true;
13888 else if (TREE_CODE (type) == ARRAY_TYPE)
13889 return (integer_minus_onep (array_type_nelts (type))
13890 || TYPE_DOMAIN (type) == NULL_TREE
13891 || default_is_empty_type (TREE_TYPE (type)));
13892 return false;
13895 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
13896 that shouldn't be passed via stack. */
13898 bool
13899 default_is_empty_record (const_tree type)
13901 if (!abi_version_at_least (12))
13902 return false;
13904 if (type == error_mark_node)
13905 return false;
13907 if (TREE_ADDRESSABLE (type))
13908 return false;
13910 return default_is_empty_type (TYPE_MAIN_VARIANT (type));
13913 /* Like int_size_in_bytes, but handle empty records specially. */
13915 HOST_WIDE_INT
13916 arg_int_size_in_bytes (const_tree type)
13918 return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
13921 /* Like size_in_bytes, but handle empty records specially. */
13923 tree
13924 arg_size_in_bytes (const_tree type)
13926 return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
13929 /* Return true if an expression with CODE has to have the same result type as
13930 its first operand. */
13932 bool
13933 expr_type_first_operand_type_p (tree_code code)
13935 switch (code)
13937 case NEGATE_EXPR:
13938 case ABS_EXPR:
13939 case BIT_NOT_EXPR:
13940 case PAREN_EXPR:
13941 case CONJ_EXPR:
13943 case PLUS_EXPR:
13944 case MINUS_EXPR:
13945 case MULT_EXPR:
13946 case TRUNC_DIV_EXPR:
13947 case CEIL_DIV_EXPR:
13948 case FLOOR_DIV_EXPR:
13949 case ROUND_DIV_EXPR:
13950 case TRUNC_MOD_EXPR:
13951 case CEIL_MOD_EXPR:
13952 case FLOOR_MOD_EXPR:
13953 case ROUND_MOD_EXPR:
13954 case RDIV_EXPR:
13955 case EXACT_DIV_EXPR:
13956 case MIN_EXPR:
13957 case MAX_EXPR:
13958 case BIT_IOR_EXPR:
13959 case BIT_XOR_EXPR:
13960 case BIT_AND_EXPR:
13962 case LSHIFT_EXPR:
13963 case RSHIFT_EXPR:
13964 case LROTATE_EXPR:
13965 case RROTATE_EXPR:
13966 return true;
13968 default:
13969 return false;
13973 /* List of pointer types used to declare builtins before we have seen their
13974 real declaration.
13976 Keep the size up to date in tree.h ! */
13977 const builtin_structptr_type builtin_structptr_types[6] =
13979 { fileptr_type_node, ptr_type_node, "FILE" },
13980 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
13981 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
13982 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
13983 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
13984 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
13987 #if CHECKING_P
13989 namespace selftest {
13991 /* Selftests for tree. */
13993 /* Verify that integer constants are sane. */
13995 static void
13996 test_integer_constants ()
13998 ASSERT_TRUE (integer_type_node != NULL);
13999 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
14001 tree type = integer_type_node;
14003 tree zero = build_zero_cst (type);
14004 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
14005 ASSERT_EQ (type, TREE_TYPE (zero));
14007 tree one = build_int_cst (type, 1);
14008 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
14009 ASSERT_EQ (type, TREE_TYPE (zero));
14012 /* Verify identifiers. */
14014 static void
14015 test_identifiers ()
14017 tree identifier = get_identifier ("foo");
14018 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
14019 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
14022 /* Verify LABEL_DECL. */
14024 static void
14025 test_labels ()
14027 tree identifier = get_identifier ("err");
14028 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
14029 identifier, void_type_node);
14030 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
14031 ASSERT_FALSE (FORCED_LABEL (label_decl));
14034 /* Return a new VECTOR_CST node whose type is TYPE and whose values
14035 are given by VALS. */
14037 static tree
14038 build_vector (tree type, vec<tree> vals MEM_STAT_DECL)
14040 gcc_assert (vals.length () == TYPE_VECTOR_SUBPARTS (type));
14041 tree_vector_builder builder (type, vals.length (), 1);
14042 builder.splice (vals);
14043 return builder.build ();
14046 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
14048 static void
14049 check_vector_cst (vec<tree> expected, tree actual)
14051 ASSERT_EQ (expected.length (), TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
14052 for (unsigned int i = 0; i < expected.length (); ++i)
14053 ASSERT_EQ (wi::to_wide (expected[i]),
14054 wi::to_wide (vector_cst_elt (actual, i)));
14057 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
14058 and that its elements match EXPECTED. */
14060 static void
14061 check_vector_cst_duplicate (vec<tree> expected, tree actual,
14062 unsigned int npatterns)
14064 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14065 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
14066 ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
14067 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
14068 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
14069 check_vector_cst (expected, actual);
14072 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
14073 and NPATTERNS background elements, and that its elements match
14074 EXPECTED. */
14076 static void
14077 check_vector_cst_fill (vec<tree> expected, tree actual,
14078 unsigned int npatterns)
14080 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14081 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
14082 ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
14083 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
14084 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
14085 check_vector_cst (expected, actual);
14088 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
14089 and that its elements match EXPECTED. */
14091 static void
14092 check_vector_cst_stepped (vec<tree> expected, tree actual,
14093 unsigned int npatterns)
14095 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14096 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
14097 ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
14098 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
14099 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
14100 check_vector_cst (expected, actual);
14103 /* Test the creation of VECTOR_CSTs. */
14105 static void
14106 test_vector_cst_patterns ()
14108 auto_vec<tree, 8> elements (8);
14109 elements.quick_grow (8);
14110 tree element_type = build_nonstandard_integer_type (16, true);
14111 tree vector_type = build_vector_type (element_type, 8);
14113 /* Test a simple linear series with a base of 0 and a step of 1:
14114 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
14115 for (unsigned int i = 0; i < 8; ++i)
14116 elements[i] = build_int_cst (element_type, i);
14117 check_vector_cst_stepped (elements, build_vector (vector_type, elements), 1);
14119 /* Try the same with the first element replaced by 100:
14120 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
14121 elements[0] = build_int_cst (element_type, 100);
14122 check_vector_cst_stepped (elements, build_vector (vector_type, elements), 1);
14124 /* Try a series that wraps around.
14125 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
14126 for (unsigned int i = 1; i < 8; ++i)
14127 elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
14128 check_vector_cst_stepped (elements, build_vector (vector_type, elements), 1);
14130 /* Try a downward series:
14131 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
14132 for (unsigned int i = 1; i < 8; ++i)
14133 elements[i] = build_int_cst (element_type, 80 - i);
14134 check_vector_cst_stepped (elements, build_vector (vector_type, elements), 1);
14136 /* Try two interleaved series with different bases and steps:
14137 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
14138 elements[1] = build_int_cst (element_type, 53);
14139 for (unsigned int i = 2; i < 8; i += 2)
14141 elements[i] = build_int_cst (element_type, 70 - i * 2);
14142 elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
14144 check_vector_cst_stepped (elements, build_vector (vector_type, elements), 2);
14146 /* Try a duplicated value:
14147 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
14148 for (unsigned int i = 1; i < 8; ++i)
14149 elements[i] = elements[0];
14150 check_vector_cst_duplicate (elements,
14151 build_vector (vector_type, elements), 1);
14153 /* Try an interleaved duplicated value:
14154 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
14155 elements[1] = build_int_cst (element_type, 55);
14156 for (unsigned int i = 2; i < 8; ++i)
14157 elements[i] = elements[i - 2];
14158 check_vector_cst_duplicate (elements,
14159 build_vector (vector_type, elements), 2);
14161 /* Try a duplicated value with 2 exceptions
14162 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
14163 elements[0] = build_int_cst (element_type, 41);
14164 elements[1] = build_int_cst (element_type, 97);
14165 check_vector_cst_fill (elements, build_vector (vector_type, elements), 2);
14167 /* Try with and without a step
14168 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
14169 for (unsigned int i = 3; i < 8; i += 2)
14170 elements[i] = build_int_cst (element_type, i * 7);
14171 check_vector_cst_stepped (elements, build_vector (vector_type, elements), 2);
14173 /* Try a fully-general constant:
14174 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
14175 elements[5] = build_int_cst (element_type, 9990);
14176 check_vector_cst_fill (elements, build_vector (vector_type, elements), 4);
14179 /* Run all of the selftests within this file. */
14181 void
14182 tree_c_tests ()
14184 test_integer_constants ();
14185 test_identifiers ();
14186 test_labels ();
14187 test_vector_cst_patterns ();
14190 } // namespace selftest
14192 #endif /* CHECKING_P */
14194 #include "gt-tree.h"