2017-12-18 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / tree.c
blob65e945afe06cd166f799d8cc1fb6af68a504f082
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "params.h"
58 #include "langhooks-def.h"
59 #include "tree-diagnostic.h"
60 #include "except.h"
61 #include "builtins.h"
62 #include "print-tree.h"
63 #include "ipa-utils.h"
64 #include "selftest.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "rtl.h"
68 #include "regs.h"
69 #include "tree-vector-builder.h"
71 /* Tree code classes. */
73 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
74 #define END_OF_BASE_TREE_CODES tcc_exceptional,
76 const enum tree_code_class tree_code_type[] = {
77 #include "all-tree.def"
80 #undef DEFTREECODE
81 #undef END_OF_BASE_TREE_CODES
83 /* Table indexed by tree code giving number of expression
84 operands beyond the fixed part of the node structure.
85 Not used for types or decls. */
87 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
88 #define END_OF_BASE_TREE_CODES 0,
90 const unsigned char tree_code_length[] = {
91 #include "all-tree.def"
94 #undef DEFTREECODE
95 #undef END_OF_BASE_TREE_CODES
97 /* Names of tree components.
98 Used for printing out the tree and error messages. */
99 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
100 #define END_OF_BASE_TREE_CODES "@dummy",
102 static const char *const tree_code_name[] = {
103 #include "all-tree.def"
106 #undef DEFTREECODE
107 #undef END_OF_BASE_TREE_CODES
109 /* Each tree code class has an associated string representation.
110 These must correspond to the tree_code_class entries. */
112 const char *const tree_code_class_strings[] =
114 "exceptional",
115 "constant",
116 "type",
117 "declaration",
118 "reference",
119 "comparison",
120 "unary",
121 "binary",
122 "statement",
123 "vl_exp",
124 "expression"
127 /* obstack.[ch] explicitly declined to prototype this. */
128 extern int _obstack_allocated_p (struct obstack *h, void *obj);
130 /* Statistics-gathering stuff. */
132 static int tree_code_counts[MAX_TREE_CODES];
133 int tree_node_counts[(int) all_kinds];
134 int tree_node_sizes[(int) all_kinds];
136 /* Keep in sync with tree.h:enum tree_node_kind. */
137 static const char * const tree_node_kind_names[] = {
138 "decls",
139 "types",
140 "blocks",
141 "stmts",
142 "refs",
143 "exprs",
144 "constants",
145 "identifiers",
146 "vecs",
147 "binfos",
148 "ssa names",
149 "constructors",
150 "random kinds",
151 "lang_decl kinds",
152 "lang_type kinds",
153 "omp clauses",
156 /* Unique id for next decl created. */
157 static GTY(()) int next_decl_uid;
158 /* Unique id for next type created. */
159 static GTY(()) unsigned next_type_uid = 1;
160 /* Unique id for next debug decl created. Use negative numbers,
161 to catch erroneous uses. */
162 static GTY(()) int next_debug_decl_uid;
164 /* Since we cannot rehash a type after it is in the table, we have to
165 keep the hash code. */
167 struct GTY((for_user)) type_hash {
168 unsigned long hash;
169 tree type;
172 /* Initial size of the hash table (rounded to next prime). */
173 #define TYPE_HASH_INITIAL_SIZE 1000
175 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
177 static hashval_t hash (type_hash *t) { return t->hash; }
178 static bool equal (type_hash *a, type_hash *b);
180 static int
181 keep_cache_entry (type_hash *&t)
183 return ggc_marked_p (t->type);
187 /* Now here is the hash table. When recording a type, it is added to
188 the slot whose index is the hash code. Note that the hash table is
189 used for several kinds of types (function types, array types and
190 array index range types, for now). While all these live in the
191 same table, they are completely independent, and the hash code is
192 computed differently for each of these. */
194 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
196 /* Hash table and temporary node for larger integer const values. */
197 static GTY (()) tree int_cst_node;
199 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
201 static hashval_t hash (tree t);
202 static bool equal (tree x, tree y);
205 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
207 /* Hash table for optimization flags and target option flags. Use the same
208 hash table for both sets of options. Nodes for building the current
209 optimization and target option nodes. The assumption is most of the time
210 the options created will already be in the hash table, so we avoid
211 allocating and freeing up a node repeatably. */
212 static GTY (()) tree cl_optimization_node;
213 static GTY (()) tree cl_target_option_node;
215 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
217 static hashval_t hash (tree t);
218 static bool equal (tree x, tree y);
221 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
223 /* General tree->tree mapping structure for use in hash tables. */
226 static GTY ((cache))
227 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
229 static GTY ((cache))
230 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
232 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
234 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
236 static bool
237 equal (tree_vec_map *a, tree_vec_map *b)
239 return a->base.from == b->base.from;
242 static int
243 keep_cache_entry (tree_vec_map *&m)
245 return ggc_marked_p (m->base.from);
249 static GTY ((cache))
250 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
252 static void set_type_quals (tree, int);
253 static void print_type_hash_statistics (void);
254 static void print_debug_expr_statistics (void);
255 static void print_value_expr_statistics (void);
257 tree global_trees[TI_MAX];
258 tree integer_types[itk_none];
260 bool int_n_enabled_p[NUM_INT_N_ENTS];
261 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
263 bool tree_contains_struct[MAX_TREE_CODES][64];
265 /* Number of operands for each OpenMP clause. */
266 unsigned const char omp_clause_num_ops[] =
268 0, /* OMP_CLAUSE_ERROR */
269 1, /* OMP_CLAUSE_PRIVATE */
270 1, /* OMP_CLAUSE_SHARED */
271 1, /* OMP_CLAUSE_FIRSTPRIVATE */
272 2, /* OMP_CLAUSE_LASTPRIVATE */
273 5, /* OMP_CLAUSE_REDUCTION */
274 1, /* OMP_CLAUSE_COPYIN */
275 1, /* OMP_CLAUSE_COPYPRIVATE */
276 3, /* OMP_CLAUSE_LINEAR */
277 2, /* OMP_CLAUSE_ALIGNED */
278 1, /* OMP_CLAUSE_DEPEND */
279 1, /* OMP_CLAUSE_UNIFORM */
280 1, /* OMP_CLAUSE_TO_DECLARE */
281 1, /* OMP_CLAUSE_LINK */
282 2, /* OMP_CLAUSE_FROM */
283 2, /* OMP_CLAUSE_TO */
284 2, /* OMP_CLAUSE_MAP */
285 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
286 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
287 2, /* OMP_CLAUSE__CACHE_ */
288 2, /* OMP_CLAUSE_GANG */
289 1, /* OMP_CLAUSE_ASYNC */
290 1, /* OMP_CLAUSE_WAIT */
291 0, /* OMP_CLAUSE_AUTO */
292 0, /* OMP_CLAUSE_SEQ */
293 1, /* OMP_CLAUSE__LOOPTEMP_ */
294 1, /* OMP_CLAUSE_IF */
295 1, /* OMP_CLAUSE_NUM_THREADS */
296 1, /* OMP_CLAUSE_SCHEDULE */
297 0, /* OMP_CLAUSE_NOWAIT */
298 1, /* OMP_CLAUSE_ORDERED */
299 0, /* OMP_CLAUSE_DEFAULT */
300 3, /* OMP_CLAUSE_COLLAPSE */
301 0, /* OMP_CLAUSE_UNTIED */
302 1, /* OMP_CLAUSE_FINAL */
303 0, /* OMP_CLAUSE_MERGEABLE */
304 1, /* OMP_CLAUSE_DEVICE */
305 1, /* OMP_CLAUSE_DIST_SCHEDULE */
306 0, /* OMP_CLAUSE_INBRANCH */
307 0, /* OMP_CLAUSE_NOTINBRANCH */
308 1, /* OMP_CLAUSE_NUM_TEAMS */
309 1, /* OMP_CLAUSE_THREAD_LIMIT */
310 0, /* OMP_CLAUSE_PROC_BIND */
311 1, /* OMP_CLAUSE_SAFELEN */
312 1, /* OMP_CLAUSE_SIMDLEN */
313 0, /* OMP_CLAUSE_FOR */
314 0, /* OMP_CLAUSE_PARALLEL */
315 0, /* OMP_CLAUSE_SECTIONS */
316 0, /* OMP_CLAUSE_TASKGROUP */
317 1, /* OMP_CLAUSE_PRIORITY */
318 1, /* OMP_CLAUSE_GRAINSIZE */
319 1, /* OMP_CLAUSE_NUM_TASKS */
320 0, /* OMP_CLAUSE_NOGROUP */
321 0, /* OMP_CLAUSE_THREADS */
322 0, /* OMP_CLAUSE_SIMD */
323 1, /* OMP_CLAUSE_HINT */
324 0, /* OMP_CLAUSE_DEFALTMAP */
325 1, /* OMP_CLAUSE__SIMDUID_ */
326 0, /* OMP_CLAUSE__SIMT_ */
327 0, /* OMP_CLAUSE_INDEPENDENT */
328 1, /* OMP_CLAUSE_WORKER */
329 1, /* OMP_CLAUSE_VECTOR */
330 1, /* OMP_CLAUSE_NUM_GANGS */
331 1, /* OMP_CLAUSE_NUM_WORKERS */
332 1, /* OMP_CLAUSE_VECTOR_LENGTH */
333 3, /* OMP_CLAUSE_TILE */
334 2, /* OMP_CLAUSE__GRIDDIM_ */
337 const char * const omp_clause_code_name[] =
339 "error_clause",
340 "private",
341 "shared",
342 "firstprivate",
343 "lastprivate",
344 "reduction",
345 "copyin",
346 "copyprivate",
347 "linear",
348 "aligned",
349 "depend",
350 "uniform",
351 "to",
352 "link",
353 "from",
354 "to",
355 "map",
356 "use_device_ptr",
357 "is_device_ptr",
358 "_cache_",
359 "gang",
360 "async",
361 "wait",
362 "auto",
363 "seq",
364 "_looptemp_",
365 "if",
366 "num_threads",
367 "schedule",
368 "nowait",
369 "ordered",
370 "default",
371 "collapse",
372 "untied",
373 "final",
374 "mergeable",
375 "device",
376 "dist_schedule",
377 "inbranch",
378 "notinbranch",
379 "num_teams",
380 "thread_limit",
381 "proc_bind",
382 "safelen",
383 "simdlen",
384 "for",
385 "parallel",
386 "sections",
387 "taskgroup",
388 "priority",
389 "grainsize",
390 "num_tasks",
391 "nogroup",
392 "threads",
393 "simd",
394 "hint",
395 "defaultmap",
396 "_simduid_",
397 "_simt_",
398 "independent",
399 "worker",
400 "vector",
401 "num_gangs",
402 "num_workers",
403 "vector_length",
404 "tile",
405 "_griddim_"
409 /* Return the tree node structure used by tree code CODE. */
411 static inline enum tree_node_structure_enum
412 tree_node_structure_for_code (enum tree_code code)
414 switch (TREE_CODE_CLASS (code))
416 case tcc_declaration:
418 switch (code)
420 case FIELD_DECL:
421 return TS_FIELD_DECL;
422 case PARM_DECL:
423 return TS_PARM_DECL;
424 case VAR_DECL:
425 return TS_VAR_DECL;
426 case LABEL_DECL:
427 return TS_LABEL_DECL;
428 case RESULT_DECL:
429 return TS_RESULT_DECL;
430 case DEBUG_EXPR_DECL:
431 return TS_DECL_WRTL;
432 case CONST_DECL:
433 return TS_CONST_DECL;
434 case TYPE_DECL:
435 return TS_TYPE_DECL;
436 case FUNCTION_DECL:
437 return TS_FUNCTION_DECL;
438 case TRANSLATION_UNIT_DECL:
439 return TS_TRANSLATION_UNIT_DECL;
440 default:
441 return TS_DECL_NON_COMMON;
444 case tcc_type:
445 return TS_TYPE_NON_COMMON;
446 case tcc_reference:
447 case tcc_comparison:
448 case tcc_unary:
449 case tcc_binary:
450 case tcc_expression:
451 case tcc_statement:
452 case tcc_vl_exp:
453 return TS_EXP;
454 default: /* tcc_constant and tcc_exceptional */
455 break;
457 switch (code)
459 /* tcc_constant cases. */
460 case VOID_CST: return TS_TYPED;
461 case INTEGER_CST: return TS_INT_CST;
462 case REAL_CST: return TS_REAL_CST;
463 case FIXED_CST: return TS_FIXED_CST;
464 case COMPLEX_CST: return TS_COMPLEX;
465 case VECTOR_CST: return TS_VECTOR;
466 case STRING_CST: return TS_STRING;
467 /* tcc_exceptional cases. */
468 case ERROR_MARK: return TS_COMMON;
469 case IDENTIFIER_NODE: return TS_IDENTIFIER;
470 case TREE_LIST: return TS_LIST;
471 case TREE_VEC: return TS_VEC;
472 case SSA_NAME: return TS_SSA_NAME;
473 case PLACEHOLDER_EXPR: return TS_COMMON;
474 case STATEMENT_LIST: return TS_STATEMENT_LIST;
475 case BLOCK: return TS_BLOCK;
476 case CONSTRUCTOR: return TS_CONSTRUCTOR;
477 case TREE_BINFO: return TS_BINFO;
478 case OMP_CLAUSE: return TS_OMP_CLAUSE;
479 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
480 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
482 default:
483 gcc_unreachable ();
488 /* Initialize tree_contains_struct to describe the hierarchy of tree
489 nodes. */
491 static void
492 initialize_tree_contains_struct (void)
494 unsigned i;
496 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
498 enum tree_code code;
499 enum tree_node_structure_enum ts_code;
501 code = (enum tree_code) i;
502 ts_code = tree_node_structure_for_code (code);
504 /* Mark the TS structure itself. */
505 tree_contains_struct[code][ts_code] = 1;
507 /* Mark all the structures that TS is derived from. */
508 switch (ts_code)
510 case TS_TYPED:
511 case TS_BLOCK:
512 case TS_OPTIMIZATION:
513 case TS_TARGET_OPTION:
514 MARK_TS_BASE (code);
515 break;
517 case TS_COMMON:
518 case TS_INT_CST:
519 case TS_REAL_CST:
520 case TS_FIXED_CST:
521 case TS_VECTOR:
522 case TS_STRING:
523 case TS_COMPLEX:
524 case TS_SSA_NAME:
525 case TS_CONSTRUCTOR:
526 case TS_EXP:
527 case TS_STATEMENT_LIST:
528 MARK_TS_TYPED (code);
529 break;
531 case TS_IDENTIFIER:
532 case TS_DECL_MINIMAL:
533 case TS_TYPE_COMMON:
534 case TS_LIST:
535 case TS_VEC:
536 case TS_BINFO:
537 case TS_OMP_CLAUSE:
538 MARK_TS_COMMON (code);
539 break;
541 case TS_TYPE_WITH_LANG_SPECIFIC:
542 MARK_TS_TYPE_COMMON (code);
543 break;
545 case TS_TYPE_NON_COMMON:
546 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
547 break;
549 case TS_DECL_COMMON:
550 MARK_TS_DECL_MINIMAL (code);
551 break;
553 case TS_DECL_WRTL:
554 case TS_CONST_DECL:
555 MARK_TS_DECL_COMMON (code);
556 break;
558 case TS_DECL_NON_COMMON:
559 MARK_TS_DECL_WITH_VIS (code);
560 break;
562 case TS_DECL_WITH_VIS:
563 case TS_PARM_DECL:
564 case TS_LABEL_DECL:
565 case TS_RESULT_DECL:
566 MARK_TS_DECL_WRTL (code);
567 break;
569 case TS_FIELD_DECL:
570 MARK_TS_DECL_COMMON (code);
571 break;
573 case TS_VAR_DECL:
574 MARK_TS_DECL_WITH_VIS (code);
575 break;
577 case TS_TYPE_DECL:
578 case TS_FUNCTION_DECL:
579 MARK_TS_DECL_NON_COMMON (code);
580 break;
582 case TS_TRANSLATION_UNIT_DECL:
583 MARK_TS_DECL_COMMON (code);
584 break;
586 default:
587 gcc_unreachable ();
591 /* Basic consistency checks for attributes used in fold. */
592 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
593 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
594 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
595 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
596 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
597 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
598 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
599 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
600 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
601 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
602 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
603 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
604 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
605 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
606 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
607 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
608 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
609 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
610 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
611 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
612 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
613 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
614 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
615 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
616 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
617 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
618 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
619 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
620 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
621 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
622 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
623 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
624 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
625 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
626 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
627 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
628 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
629 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
630 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
631 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
635 /* Init tree.c. */
637 void
638 init_ttree (void)
640 /* Initialize the hash table of types. */
641 type_hash_table
642 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
644 debug_expr_for_decl
645 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
647 value_expr_for_decl
648 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
650 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
652 int_cst_node = make_int_cst (1, 1);
654 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
656 cl_optimization_node = make_node (OPTIMIZATION_NODE);
657 cl_target_option_node = make_node (TARGET_OPTION_NODE);
659 /* Initialize the tree_contains_struct array. */
660 initialize_tree_contains_struct ();
661 lang_hooks.init_ts ();
665 /* The name of the object as the assembler will see it (but before any
666 translations made by ASM_OUTPUT_LABELREF). Often this is the same
667 as DECL_NAME. It is an IDENTIFIER_NODE. */
668 tree
669 decl_assembler_name (tree decl)
671 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
672 lang_hooks.set_decl_assembler_name (decl);
673 return DECL_ASSEMBLER_NAME_RAW (decl);
676 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
677 (either of which may be NULL). Inform the FE, if this changes the
678 name. */
680 void
681 overwrite_decl_assembler_name (tree decl, tree name)
683 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
684 lang_hooks.overwrite_decl_assembler_name (decl, name);
687 /* When the target supports COMDAT groups, this indicates which group the
688 DECL is associated with. This can be either an IDENTIFIER_NODE or a
689 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
690 tree
691 decl_comdat_group (const_tree node)
693 struct symtab_node *snode = symtab_node::get (node);
694 if (!snode)
695 return NULL;
696 return snode->get_comdat_group ();
699 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
700 tree
701 decl_comdat_group_id (const_tree node)
703 struct symtab_node *snode = symtab_node::get (node);
704 if (!snode)
705 return NULL;
706 return snode->get_comdat_group_id ();
709 /* When the target supports named section, return its name as IDENTIFIER_NODE
710 or NULL if it is in no section. */
711 const char *
712 decl_section_name (const_tree node)
714 struct symtab_node *snode = symtab_node::get (node);
715 if (!snode)
716 return NULL;
717 return snode->get_section ();
720 /* Set section name of NODE to VALUE (that is expected to be
721 identifier node) */
722 void
723 set_decl_section_name (tree node, const char *value)
725 struct symtab_node *snode;
727 if (value == NULL)
729 snode = symtab_node::get (node);
730 if (!snode)
731 return;
733 else if (VAR_P (node))
734 snode = varpool_node::get_create (node);
735 else
736 snode = cgraph_node::get_create (node);
737 snode->set_section (value);
740 /* Return TLS model of a variable NODE. */
741 enum tls_model
742 decl_tls_model (const_tree node)
744 struct varpool_node *snode = varpool_node::get (node);
745 if (!snode)
746 return TLS_MODEL_NONE;
747 return snode->tls_model;
750 /* Set TLS model of variable NODE to MODEL. */
751 void
752 set_decl_tls_model (tree node, enum tls_model model)
754 struct varpool_node *vnode;
756 if (model == TLS_MODEL_NONE)
758 vnode = varpool_node::get (node);
759 if (!vnode)
760 return;
762 else
763 vnode = varpool_node::get_create (node);
764 vnode->tls_model = model;
767 /* Compute the number of bytes occupied by a tree with code CODE.
768 This function cannot be used for nodes that have variable sizes,
769 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
770 size_t
771 tree_code_size (enum tree_code code)
773 switch (TREE_CODE_CLASS (code))
775 case tcc_declaration: /* A decl node */
776 switch (code)
778 case FIELD_DECL: return sizeof (tree_field_decl);
779 case PARM_DECL: return sizeof (tree_parm_decl);
780 case VAR_DECL: return sizeof (tree_var_decl);
781 case LABEL_DECL: return sizeof (tree_label_decl);
782 case RESULT_DECL: return sizeof (tree_result_decl);
783 case CONST_DECL: return sizeof (tree_const_decl);
784 case TYPE_DECL: return sizeof (tree_type_decl);
785 case FUNCTION_DECL: return sizeof (tree_function_decl);
786 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
787 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
788 case NAMESPACE_DECL:
789 case IMPORTED_DECL:
790 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
791 default:
792 gcc_checking_assert (code >= NUM_TREE_CODES);
793 return lang_hooks.tree_size (code);
796 case tcc_type: /* a type node */
797 switch (code)
799 case OFFSET_TYPE:
800 case ENUMERAL_TYPE:
801 case BOOLEAN_TYPE:
802 case INTEGER_TYPE:
803 case REAL_TYPE:
804 case POINTER_TYPE:
805 case REFERENCE_TYPE:
806 case NULLPTR_TYPE:
807 case FIXED_POINT_TYPE:
808 case COMPLEX_TYPE:
809 case VECTOR_TYPE:
810 case ARRAY_TYPE:
811 case RECORD_TYPE:
812 case UNION_TYPE:
813 case QUAL_UNION_TYPE:
814 case VOID_TYPE:
815 case POINTER_BOUNDS_TYPE:
816 case FUNCTION_TYPE:
817 case METHOD_TYPE:
818 case LANG_TYPE: return sizeof (tree_type_non_common);
819 default:
820 gcc_checking_assert (code >= NUM_TREE_CODES);
821 return lang_hooks.tree_size (code);
824 case tcc_reference: /* a reference */
825 case tcc_expression: /* an expression */
826 case tcc_statement: /* an expression with side effects */
827 case tcc_comparison: /* a comparison expression */
828 case tcc_unary: /* a unary arithmetic expression */
829 case tcc_binary: /* a binary arithmetic expression */
830 return (sizeof (struct tree_exp)
831 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
833 case tcc_constant: /* a constant */
834 switch (code)
836 case VOID_CST: return sizeof (tree_typed);
837 case INTEGER_CST: gcc_unreachable ();
838 case REAL_CST: return sizeof (tree_real_cst);
839 case FIXED_CST: return sizeof (tree_fixed_cst);
840 case COMPLEX_CST: return sizeof (tree_complex);
841 case VECTOR_CST: gcc_unreachable ();
842 case STRING_CST: gcc_unreachable ();
843 default:
844 gcc_checking_assert (code >= NUM_TREE_CODES);
845 return lang_hooks.tree_size (code);
848 case tcc_exceptional: /* something random, like an identifier. */
849 switch (code)
851 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
852 case TREE_LIST: return sizeof (tree_list);
854 case ERROR_MARK:
855 case PLACEHOLDER_EXPR: return sizeof (tree_common);
857 case TREE_VEC: gcc_unreachable ();
858 case OMP_CLAUSE: gcc_unreachable ();
860 case SSA_NAME: return sizeof (tree_ssa_name);
862 case STATEMENT_LIST: return sizeof (tree_statement_list);
863 case BLOCK: return sizeof (struct tree_block);
864 case CONSTRUCTOR: return sizeof (tree_constructor);
865 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
866 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
868 default:
869 gcc_checking_assert (code >= NUM_TREE_CODES);
870 return lang_hooks.tree_size (code);
873 default:
874 gcc_unreachable ();
878 /* Compute the number of bytes occupied by NODE. This routine only
879 looks at TREE_CODE, except for those nodes that have variable sizes. */
880 size_t
881 tree_size (const_tree node)
883 const enum tree_code code = TREE_CODE (node);
884 switch (code)
886 case INTEGER_CST:
887 return (sizeof (struct tree_int_cst)
888 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
890 case TREE_BINFO:
891 return (offsetof (struct tree_binfo, base_binfos)
892 + vec<tree, va_gc>
893 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
895 case TREE_VEC:
896 return (sizeof (struct tree_vec)
897 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
899 case VECTOR_CST:
900 return (sizeof (struct tree_vector)
901 + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
903 case STRING_CST:
904 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
906 case OMP_CLAUSE:
907 return (sizeof (struct tree_omp_clause)
908 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
909 * sizeof (tree));
911 default:
912 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
913 return (sizeof (struct tree_exp)
914 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
915 else
916 return tree_code_size (code);
920 /* Record interesting allocation statistics for a tree node with CODE
921 and LENGTH. */
923 static void
924 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
925 size_t length ATTRIBUTE_UNUSED)
927 enum tree_code_class type = TREE_CODE_CLASS (code);
928 tree_node_kind kind;
930 if (!GATHER_STATISTICS)
931 return;
933 switch (type)
935 case tcc_declaration: /* A decl node */
936 kind = d_kind;
937 break;
939 case tcc_type: /* a type node */
940 kind = t_kind;
941 break;
943 case tcc_statement: /* an expression with side effects */
944 kind = s_kind;
945 break;
947 case tcc_reference: /* a reference */
948 kind = r_kind;
949 break;
951 case tcc_expression: /* an expression */
952 case tcc_comparison: /* a comparison expression */
953 case tcc_unary: /* a unary arithmetic expression */
954 case tcc_binary: /* a binary arithmetic expression */
955 kind = e_kind;
956 break;
958 case tcc_constant: /* a constant */
959 kind = c_kind;
960 break;
962 case tcc_exceptional: /* something random, like an identifier. */
963 switch (code)
965 case IDENTIFIER_NODE:
966 kind = id_kind;
967 break;
969 case TREE_VEC:
970 kind = vec_kind;
971 break;
973 case TREE_BINFO:
974 kind = binfo_kind;
975 break;
977 case SSA_NAME:
978 kind = ssa_name_kind;
979 break;
981 case BLOCK:
982 kind = b_kind;
983 break;
985 case CONSTRUCTOR:
986 kind = constr_kind;
987 break;
989 case OMP_CLAUSE:
990 kind = omp_clause_kind;
991 break;
993 default:
994 kind = x_kind;
995 break;
997 break;
999 case tcc_vl_exp:
1000 kind = e_kind;
1001 break;
1003 default:
1004 gcc_unreachable ();
1007 tree_code_counts[(int) code]++;
1008 tree_node_counts[(int) kind]++;
1009 tree_node_sizes[(int) kind] += length;
1012 /* Allocate and return a new UID from the DECL_UID namespace. */
1015 allocate_decl_uid (void)
1017 return next_decl_uid++;
1020 /* Return a newly allocated node of code CODE. For decl and type
1021 nodes, some other fields are initialized. The rest of the node is
1022 initialized to zero. This function cannot be used for TREE_VEC,
1023 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1024 tree_code_size.
1026 Achoo! I got a code in the node. */
1028 tree
1029 make_node (enum tree_code code MEM_STAT_DECL)
1031 tree t;
1032 enum tree_code_class type = TREE_CODE_CLASS (code);
1033 size_t length = tree_code_size (code);
1035 record_node_allocation_statistics (code, length);
1037 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1038 TREE_SET_CODE (t, code);
1040 switch (type)
1042 case tcc_statement:
1043 if (code != DEBUG_BEGIN_STMT)
1044 TREE_SIDE_EFFECTS (t) = 1;
1045 break;
1047 case tcc_declaration:
1048 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1050 if (code == FUNCTION_DECL)
1052 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1053 SET_DECL_MODE (t, FUNCTION_MODE);
1055 else
1056 SET_DECL_ALIGN (t, 1);
1058 DECL_SOURCE_LOCATION (t) = input_location;
1059 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1060 DECL_UID (t) = --next_debug_decl_uid;
1061 else
1063 DECL_UID (t) = allocate_decl_uid ();
1064 SET_DECL_PT_UID (t, -1);
1066 if (TREE_CODE (t) == LABEL_DECL)
1067 LABEL_DECL_UID (t) = -1;
1069 break;
1071 case tcc_type:
1072 TYPE_UID (t) = next_type_uid++;
1073 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1074 TYPE_USER_ALIGN (t) = 0;
1075 TYPE_MAIN_VARIANT (t) = t;
1076 TYPE_CANONICAL (t) = t;
1078 /* Default to no attributes for type, but let target change that. */
1079 TYPE_ATTRIBUTES (t) = NULL_TREE;
1080 targetm.set_default_type_attributes (t);
1082 /* We have not yet computed the alias set for this type. */
1083 TYPE_ALIAS_SET (t) = -1;
1084 break;
1086 case tcc_constant:
1087 TREE_CONSTANT (t) = 1;
1088 break;
1090 case tcc_expression:
1091 switch (code)
1093 case INIT_EXPR:
1094 case MODIFY_EXPR:
1095 case VA_ARG_EXPR:
1096 case PREDECREMENT_EXPR:
1097 case PREINCREMENT_EXPR:
1098 case POSTDECREMENT_EXPR:
1099 case POSTINCREMENT_EXPR:
1100 /* All of these have side-effects, no matter what their
1101 operands are. */
1102 TREE_SIDE_EFFECTS (t) = 1;
1103 break;
1105 default:
1106 break;
1108 break;
1110 case tcc_exceptional:
1111 switch (code)
1113 case TARGET_OPTION_NODE:
1114 TREE_TARGET_OPTION(t)
1115 = ggc_cleared_alloc<struct cl_target_option> ();
1116 break;
1118 case OPTIMIZATION_NODE:
1119 TREE_OPTIMIZATION (t)
1120 = ggc_cleared_alloc<struct cl_optimization> ();
1121 break;
1123 default:
1124 break;
1126 break;
1128 default:
1129 /* Other classes need no special treatment. */
1130 break;
1133 return t;
1136 /* Free tree node. */
1138 void
1139 free_node (tree node)
1141 enum tree_code code = TREE_CODE (node);
1142 if (GATHER_STATISTICS)
1144 tree_code_counts[(int) TREE_CODE (node)]--;
1145 tree_node_counts[(int) t_kind]--;
1146 tree_node_sizes[(int) t_kind] -= tree_size (node);
1148 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1149 vec_free (CONSTRUCTOR_ELTS (node));
1150 else if (code == BLOCK)
1151 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1152 else if (code == TREE_BINFO)
1153 vec_free (BINFO_BASE_ACCESSES (node));
1154 ggc_free (node);
1157 /* Return a new node with the same contents as NODE except that its
1158 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1160 tree
1161 copy_node (tree node MEM_STAT_DECL)
1163 tree t;
1164 enum tree_code code = TREE_CODE (node);
1165 size_t length;
1167 gcc_assert (code != STATEMENT_LIST);
1169 length = tree_size (node);
1170 record_node_allocation_statistics (code, length);
1171 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1172 memcpy (t, node, length);
1174 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1175 TREE_CHAIN (t) = 0;
1176 TREE_ASM_WRITTEN (t) = 0;
1177 TREE_VISITED (t) = 0;
1179 if (TREE_CODE_CLASS (code) == tcc_declaration)
1181 if (code == DEBUG_EXPR_DECL)
1182 DECL_UID (t) = --next_debug_decl_uid;
1183 else
1185 DECL_UID (t) = allocate_decl_uid ();
1186 if (DECL_PT_UID_SET_P (node))
1187 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1189 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1190 && DECL_HAS_VALUE_EXPR_P (node))
1192 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1193 DECL_HAS_VALUE_EXPR_P (t) = 1;
1195 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1196 if (VAR_P (node))
1198 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1199 t->decl_with_vis.symtab_node = NULL;
1201 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1203 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1204 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1206 if (TREE_CODE (node) == FUNCTION_DECL)
1208 DECL_STRUCT_FUNCTION (t) = NULL;
1209 t->decl_with_vis.symtab_node = NULL;
1212 else if (TREE_CODE_CLASS (code) == tcc_type)
1214 TYPE_UID (t) = next_type_uid++;
1215 /* The following is so that the debug code for
1216 the copy is different from the original type.
1217 The two statements usually duplicate each other
1218 (because they clear fields of the same union),
1219 but the optimizer should catch that. */
1220 TYPE_SYMTAB_ADDRESS (t) = 0;
1221 TYPE_SYMTAB_DIE (t) = 0;
1223 /* Do not copy the values cache. */
1224 if (TYPE_CACHED_VALUES_P (t))
1226 TYPE_CACHED_VALUES_P (t) = 0;
1227 TYPE_CACHED_VALUES (t) = NULL_TREE;
1230 else if (code == TARGET_OPTION_NODE)
1232 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1233 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1234 sizeof (struct cl_target_option));
1236 else if (code == OPTIMIZATION_NODE)
1238 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1239 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1240 sizeof (struct cl_optimization));
1243 return t;
1246 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1247 For example, this can copy a list made of TREE_LIST nodes. */
1249 tree
1250 copy_list (tree list)
1252 tree head;
1253 tree prev, next;
1255 if (list == 0)
1256 return 0;
1258 head = prev = copy_node (list);
1259 next = TREE_CHAIN (list);
1260 while (next)
1262 TREE_CHAIN (prev) = copy_node (next);
1263 prev = TREE_CHAIN (prev);
1264 next = TREE_CHAIN (next);
1266 return head;
1270 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1271 INTEGER_CST with value CST and type TYPE. */
1273 static unsigned int
1274 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1276 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1277 /* We need extra HWIs if CST is an unsigned integer with its
1278 upper bit set. */
1279 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1280 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1281 return cst.get_len ();
1284 /* Return a new INTEGER_CST with value CST and type TYPE. */
1286 static tree
1287 build_new_int_cst (tree type, const wide_int &cst)
1289 unsigned int len = cst.get_len ();
1290 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1291 tree nt = make_int_cst (len, ext_len);
1293 if (len < ext_len)
1295 --ext_len;
1296 TREE_INT_CST_ELT (nt, ext_len)
1297 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1298 for (unsigned int i = len; i < ext_len; ++i)
1299 TREE_INT_CST_ELT (nt, i) = -1;
1301 else if (TYPE_UNSIGNED (type)
1302 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1304 len--;
1305 TREE_INT_CST_ELT (nt, len)
1306 = zext_hwi (cst.elt (len),
1307 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1310 for (unsigned int i = 0; i < len; i++)
1311 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1312 TREE_TYPE (nt) = type;
1313 return nt;
1316 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1318 tree
1319 build_int_cst (tree type, HOST_WIDE_INT low)
1321 /* Support legacy code. */
1322 if (!type)
1323 type = integer_type_node;
1325 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1328 tree
1329 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1331 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1334 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1336 tree
1337 build_int_cst_type (tree type, HOST_WIDE_INT low)
1339 gcc_assert (type);
1340 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1343 /* Constructs tree in type TYPE from with value given by CST. Signedness
1344 of CST is assumed to be the same as the signedness of TYPE. */
1346 tree
1347 double_int_to_tree (tree type, double_int cst)
1349 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1352 /* We force the wide_int CST to the range of the type TYPE by sign or
1353 zero extending it. OVERFLOWABLE indicates if we are interested in
1354 overflow of the value, when >0 we are only interested in signed
1355 overflow, for <0 we are interested in any overflow. OVERFLOWED
1356 indicates whether overflow has already occurred. CONST_OVERFLOWED
1357 indicates whether constant overflow has already occurred. We force
1358 T's value to be within range of T's type (by setting to 0 or 1 all
1359 the bits outside the type's range). We set TREE_OVERFLOWED if,
1360 OVERFLOWED is nonzero,
1361 or OVERFLOWABLE is >0 and signed overflow occurs
1362 or OVERFLOWABLE is <0 and any overflow occurs
1363 We return a new tree node for the extended wide_int. The node
1364 is shared if no overflow flags are set. */
1367 tree
1368 force_fit_type (tree type, const wide_int_ref &cst,
1369 int overflowable, bool overflowed)
1371 signop sign = TYPE_SIGN (type);
1373 /* If we need to set overflow flags, return a new unshared node. */
1374 if (overflowed || !wi::fits_to_tree_p (cst, type))
1376 if (overflowed
1377 || overflowable < 0
1378 || (overflowable > 0 && sign == SIGNED))
1380 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1381 tree t = build_new_int_cst (type, tmp);
1382 TREE_OVERFLOW (t) = 1;
1383 return t;
1387 /* Else build a shared node. */
1388 return wide_int_to_tree (type, cst);
1391 /* These are the hash table functions for the hash table of INTEGER_CST
1392 nodes of a sizetype. */
1394 /* Return the hash code X, an INTEGER_CST. */
1396 hashval_t
1397 int_cst_hasher::hash (tree x)
1399 const_tree const t = x;
1400 hashval_t code = TYPE_UID (TREE_TYPE (t));
1401 int i;
1403 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1404 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1406 return code;
1409 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1410 is the same as that given by *Y, which is the same. */
1412 bool
1413 int_cst_hasher::equal (tree x, tree y)
1415 const_tree const xt = x;
1416 const_tree const yt = y;
1418 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1419 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1420 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1421 return false;
1423 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1424 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1425 return false;
1427 return true;
1430 /* Create an INT_CST node of TYPE and value CST.
1431 The returned node is always shared. For small integers we use a
1432 per-type vector cache, for larger ones we use a single hash table.
1433 The value is extended from its precision according to the sign of
1434 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1435 the upper bits and ensures that hashing and value equality based
1436 upon the underlying HOST_WIDE_INTs works without masking. */
1438 tree
1439 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1441 tree t;
1442 int ix = -1;
1443 int limit = 0;
1445 gcc_assert (type);
1446 unsigned int prec = TYPE_PRECISION (type);
1447 signop sgn = TYPE_SIGN (type);
1449 /* Verify that everything is canonical. */
1450 int l = pcst.get_len ();
1451 if (l > 1)
1453 if (pcst.elt (l - 1) == 0)
1454 gcc_checking_assert (pcst.elt (l - 2) < 0);
1455 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1456 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1459 wide_int cst = wide_int::from (pcst, prec, sgn);
1460 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1462 if (ext_len == 1)
1464 /* We just need to store a single HOST_WIDE_INT. */
1465 HOST_WIDE_INT hwi;
1466 if (TYPE_UNSIGNED (type))
1467 hwi = cst.to_uhwi ();
1468 else
1469 hwi = cst.to_shwi ();
1471 switch (TREE_CODE (type))
1473 case NULLPTR_TYPE:
1474 gcc_assert (hwi == 0);
1475 /* Fallthru. */
1477 case POINTER_TYPE:
1478 case REFERENCE_TYPE:
1479 case POINTER_BOUNDS_TYPE:
1480 /* Cache NULL pointer and zero bounds. */
1481 if (hwi == 0)
1483 limit = 1;
1484 ix = 0;
1486 break;
1488 case BOOLEAN_TYPE:
1489 /* Cache false or true. */
1490 limit = 2;
1491 if (IN_RANGE (hwi, 0, 1))
1492 ix = hwi;
1493 break;
1495 case INTEGER_TYPE:
1496 case OFFSET_TYPE:
1497 if (TYPE_SIGN (type) == UNSIGNED)
1499 /* Cache [0, N). */
1500 limit = INTEGER_SHARE_LIMIT;
1501 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1502 ix = hwi;
1504 else
1506 /* Cache [-1, N). */
1507 limit = INTEGER_SHARE_LIMIT + 1;
1508 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1509 ix = hwi + 1;
1511 break;
1513 case ENUMERAL_TYPE:
1514 break;
1516 default:
1517 gcc_unreachable ();
1520 if (ix >= 0)
1522 /* Look for it in the type's vector of small shared ints. */
1523 if (!TYPE_CACHED_VALUES_P (type))
1525 TYPE_CACHED_VALUES_P (type) = 1;
1526 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1529 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1530 if (t)
1531 /* Make sure no one is clobbering the shared constant. */
1532 gcc_checking_assert (TREE_TYPE (t) == type
1533 && TREE_INT_CST_NUNITS (t) == 1
1534 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1535 && TREE_INT_CST_EXT_NUNITS (t) == 1
1536 && TREE_INT_CST_ELT (t, 0) == hwi);
1537 else
1539 /* Create a new shared int. */
1540 t = build_new_int_cst (type, cst);
1541 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1544 else
1546 /* Use the cache of larger shared ints, using int_cst_node as
1547 a temporary. */
1549 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1550 TREE_TYPE (int_cst_node) = type;
1552 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1553 t = *slot;
1554 if (!t)
1556 /* Insert this one into the hash table. */
1557 t = int_cst_node;
1558 *slot = t;
1559 /* Make a new node for next time round. */
1560 int_cst_node = make_int_cst (1, 1);
1564 else
1566 /* The value either hashes properly or we drop it on the floor
1567 for the gc to take care of. There will not be enough of them
1568 to worry about. */
1570 tree nt = build_new_int_cst (type, cst);
1571 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1572 t = *slot;
1573 if (!t)
1575 /* Insert this one into the hash table. */
1576 t = nt;
1577 *slot = t;
1579 else
1580 ggc_free (nt);
1583 return t;
1586 void
1587 cache_integer_cst (tree t)
1589 tree type = TREE_TYPE (t);
1590 int ix = -1;
1591 int limit = 0;
1592 int prec = TYPE_PRECISION (type);
1594 gcc_assert (!TREE_OVERFLOW (t));
1596 switch (TREE_CODE (type))
1598 case NULLPTR_TYPE:
1599 gcc_assert (integer_zerop (t));
1600 /* Fallthru. */
1602 case POINTER_TYPE:
1603 case REFERENCE_TYPE:
1604 /* Cache NULL pointer. */
1605 if (integer_zerop (t))
1607 limit = 1;
1608 ix = 0;
1610 break;
1612 case BOOLEAN_TYPE:
1613 /* Cache false or true. */
1614 limit = 2;
1615 if (wi::ltu_p (wi::to_wide (t), 2))
1616 ix = TREE_INT_CST_ELT (t, 0);
1617 break;
1619 case INTEGER_TYPE:
1620 case OFFSET_TYPE:
1621 if (TYPE_UNSIGNED (type))
1623 /* Cache 0..N */
1624 limit = INTEGER_SHARE_LIMIT;
1626 /* This is a little hokie, but if the prec is smaller than
1627 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1628 obvious test will not get the correct answer. */
1629 if (prec < HOST_BITS_PER_WIDE_INT)
1631 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1632 ix = tree_to_uhwi (t);
1634 else if (wi::ltu_p (wi::to_wide (t), INTEGER_SHARE_LIMIT))
1635 ix = tree_to_uhwi (t);
1637 else
1639 /* Cache -1..N */
1640 limit = INTEGER_SHARE_LIMIT + 1;
1642 if (integer_minus_onep (t))
1643 ix = 0;
1644 else if (!wi::neg_p (wi::to_wide (t)))
1646 if (prec < HOST_BITS_PER_WIDE_INT)
1648 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1649 ix = tree_to_shwi (t) + 1;
1651 else if (wi::ltu_p (wi::to_wide (t), INTEGER_SHARE_LIMIT))
1652 ix = tree_to_shwi (t) + 1;
1655 break;
1657 case ENUMERAL_TYPE:
1658 break;
1660 default:
1661 gcc_unreachable ();
1664 if (ix >= 0)
1666 /* Look for it in the type's vector of small shared ints. */
1667 if (!TYPE_CACHED_VALUES_P (type))
1669 TYPE_CACHED_VALUES_P (type) = 1;
1670 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1673 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1674 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1676 else
1678 /* Use the cache of larger shared ints. */
1679 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1680 /* If there is already an entry for the number verify it's the
1681 same. */
1682 if (*slot)
1683 gcc_assert (wi::to_wide (tree (*slot)) == wi::to_wide (t));
1684 else
1685 /* Otherwise insert this one into the hash table. */
1686 *slot = t;
1691 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1692 and the rest are zeros. */
1694 tree
1695 build_low_bits_mask (tree type, unsigned bits)
1697 gcc_assert (bits <= TYPE_PRECISION (type));
1699 return wide_int_to_tree (type, wi::mask (bits, false,
1700 TYPE_PRECISION (type)));
1703 /* Checks that X is integer constant that can be expressed in (unsigned)
1704 HOST_WIDE_INT without loss of precision. */
1706 bool
1707 cst_and_fits_in_hwi (const_tree x)
1709 return (TREE_CODE (x) == INTEGER_CST
1710 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
1713 /* Build a newly constructed VECTOR_CST with the given values of
1714 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
1716 tree
1717 make_vector (unsigned log2_npatterns,
1718 unsigned int nelts_per_pattern MEM_STAT_DECL)
1720 gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
1721 tree t;
1722 unsigned npatterns = 1 << log2_npatterns;
1723 unsigned encoded_nelts = npatterns * nelts_per_pattern;
1724 unsigned length = (sizeof (struct tree_vector)
1725 + (encoded_nelts - 1) * sizeof (tree));
1727 record_node_allocation_statistics (VECTOR_CST, length);
1729 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1731 TREE_SET_CODE (t, VECTOR_CST);
1732 TREE_CONSTANT (t) = 1;
1733 VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
1734 VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
1736 return t;
1739 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1740 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1742 tree
1743 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1745 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1746 unsigned HOST_WIDE_INT idx;
1747 tree value;
1749 tree_vector_builder vec (type, nelts, 1);
1750 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1752 if (TREE_CODE (value) == VECTOR_CST)
1753 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
1754 vec.quick_push (VECTOR_CST_ELT (value, i));
1755 else
1756 vec.quick_push (value);
1758 while (vec.length () < nelts)
1759 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
1761 return vec.build ();
1764 /* Build a vector of type VECTYPE where all the elements are SCs. */
1765 tree
1766 build_vector_from_val (tree vectype, tree sc)
1768 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1770 if (sc == error_mark_node)
1771 return sc;
1773 /* Verify that the vector type is suitable for SC. Note that there
1774 is some inconsistency in the type-system with respect to restrict
1775 qualifications of pointers. Vector types always have a main-variant
1776 element type and the qualification is applied to the vector-type.
1777 So TREE_TYPE (vector-type) does not return a properly qualified
1778 vector element-type. */
1779 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1780 TREE_TYPE (vectype)));
1782 if (CONSTANT_CLASS_P (sc))
1784 tree_vector_builder v (vectype, 1, 1);
1785 v.quick_push (sc);
1786 return v.build ();
1788 else if (0)
1789 return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
1790 else
1792 vec<constructor_elt, va_gc> *v;
1793 vec_alloc (v, nunits);
1794 for (i = 0; i < nunits; ++i)
1795 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1796 return build_constructor (vectype, v);
1800 /* Build a vector series of type TYPE in which element I has the value
1801 BASE + I * STEP. The result is a constant if BASE and STEP are constant
1802 and a VEC_SERIES_EXPR otherwise. */
1804 tree
1805 build_vec_series (tree type, tree base, tree step)
1807 if (integer_zerop (step))
1808 return build_vector_from_val (type, base);
1809 if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
1811 tree_vector_builder builder (type, 1, 3);
1812 tree elt1 = wide_int_to_tree (TREE_TYPE (base),
1813 wi::to_wide (base) + wi::to_wide (step));
1814 tree elt2 = wide_int_to_tree (TREE_TYPE (base),
1815 wi::to_wide (elt1) + wi::to_wide (step));
1816 builder.quick_push (base);
1817 builder.quick_push (elt1);
1818 builder.quick_push (elt2);
1819 return builder.build ();
1821 return build2 (VEC_SERIES_EXPR, type, base, step);
1824 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
1825 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
1827 void
1828 recompute_constructor_flags (tree c)
1830 unsigned int i;
1831 tree val;
1832 bool constant_p = true;
1833 bool side_effects_p = false;
1834 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
1836 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
1838 /* Mostly ctors will have elts that don't have side-effects, so
1839 the usual case is to scan all the elements. Hence a single
1840 loop for both const and side effects, rather than one loop
1841 each (with early outs). */
1842 if (!TREE_CONSTANT (val))
1843 constant_p = false;
1844 if (TREE_SIDE_EFFECTS (val))
1845 side_effects_p = true;
1848 TREE_SIDE_EFFECTS (c) = side_effects_p;
1849 TREE_CONSTANT (c) = constant_p;
1852 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
1853 CONSTRUCTOR C. */
1855 void
1856 verify_constructor_flags (tree c)
1858 unsigned int i;
1859 tree val;
1860 bool constant_p = TREE_CONSTANT (c);
1861 bool side_effects_p = TREE_SIDE_EFFECTS (c);
1862 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
1864 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
1866 if (constant_p && !TREE_CONSTANT (val))
1867 internal_error ("non-constant element in constant CONSTRUCTOR");
1868 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
1869 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
1873 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1874 are in the vec pointed to by VALS. */
1875 tree
1876 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1878 tree c = make_node (CONSTRUCTOR);
1880 TREE_TYPE (c) = type;
1881 CONSTRUCTOR_ELTS (c) = vals;
1883 recompute_constructor_flags (c);
1885 return c;
1888 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1889 INDEX and VALUE. */
1890 tree
1891 build_constructor_single (tree type, tree index, tree value)
1893 vec<constructor_elt, va_gc> *v;
1894 constructor_elt elt = {index, value};
1896 vec_alloc (v, 1);
1897 v->quick_push (elt);
1899 return build_constructor (type, v);
1903 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1904 are in a list pointed to by VALS. */
1905 tree
1906 build_constructor_from_list (tree type, tree vals)
1908 tree t;
1909 vec<constructor_elt, va_gc> *v = NULL;
1911 if (vals)
1913 vec_alloc (v, list_length (vals));
1914 for (t = vals; t; t = TREE_CHAIN (t))
1915 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1918 return build_constructor (type, v);
1921 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1922 of elements, provided as index/value pairs. */
1924 tree
1925 build_constructor_va (tree type, int nelts, ...)
1927 vec<constructor_elt, va_gc> *v = NULL;
1928 va_list p;
1930 va_start (p, nelts);
1931 vec_alloc (v, nelts);
1932 while (nelts--)
1934 tree index = va_arg (p, tree);
1935 tree value = va_arg (p, tree);
1936 CONSTRUCTOR_APPEND_ELT (v, index, value);
1938 va_end (p);
1939 return build_constructor (type, v);
1942 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1944 tree
1945 build_fixed (tree type, FIXED_VALUE_TYPE f)
1947 tree v;
1948 FIXED_VALUE_TYPE *fp;
1950 v = make_node (FIXED_CST);
1951 fp = ggc_alloc<fixed_value> ();
1952 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1954 TREE_TYPE (v) = type;
1955 TREE_FIXED_CST_PTR (v) = fp;
1956 return v;
1959 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1961 tree
1962 build_real (tree type, REAL_VALUE_TYPE d)
1964 tree v;
1965 REAL_VALUE_TYPE *dp;
1966 int overflow = 0;
1968 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1969 Consider doing it via real_convert now. */
1971 v = make_node (REAL_CST);
1972 dp = ggc_alloc<real_value> ();
1973 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1975 TREE_TYPE (v) = type;
1976 TREE_REAL_CST_PTR (v) = dp;
1977 TREE_OVERFLOW (v) = overflow;
1978 return v;
1981 /* Like build_real, but first truncate D to the type. */
1983 tree
1984 build_real_truncate (tree type, REAL_VALUE_TYPE d)
1986 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
1989 /* Return a new REAL_CST node whose type is TYPE
1990 and whose value is the integer value of the INTEGER_CST node I. */
1992 REAL_VALUE_TYPE
1993 real_value_from_int_cst (const_tree type, const_tree i)
1995 REAL_VALUE_TYPE d;
1997 /* Clear all bits of the real value type so that we can later do
1998 bitwise comparisons to see if two values are the same. */
1999 memset (&d, 0, sizeof d);
2001 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2002 TYPE_SIGN (TREE_TYPE (i)));
2003 return d;
2006 /* Given a tree representing an integer constant I, return a tree
2007 representing the same value as a floating-point constant of type TYPE. */
2009 tree
2010 build_real_from_int_cst (tree type, const_tree i)
2012 tree v;
2013 int overflow = TREE_OVERFLOW (i);
2015 v = build_real (type, real_value_from_int_cst (type, i));
2017 TREE_OVERFLOW (v) |= overflow;
2018 return v;
2021 /* Return a newly constructed STRING_CST node whose value is
2022 the LEN characters at STR.
2023 Note that for a C string literal, LEN should include the trailing NUL.
2024 The TREE_TYPE is not initialized. */
2026 tree
2027 build_string (int len, const char *str)
2029 tree s;
2030 size_t length;
2032 /* Do not waste bytes provided by padding of struct tree_string. */
2033 length = len + offsetof (struct tree_string, str) + 1;
2035 record_node_allocation_statistics (STRING_CST, length);
2037 s = (tree) ggc_internal_alloc (length);
2039 memset (s, 0, sizeof (struct tree_typed));
2040 TREE_SET_CODE (s, STRING_CST);
2041 TREE_CONSTANT (s) = 1;
2042 TREE_STRING_LENGTH (s) = len;
2043 memcpy (s->string.str, str, len);
2044 s->string.str[len] = '\0';
2046 return s;
2049 /* Return a newly constructed COMPLEX_CST node whose value is
2050 specified by the real and imaginary parts REAL and IMAG.
2051 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2052 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2054 tree
2055 build_complex (tree type, tree real, tree imag)
2057 tree t = make_node (COMPLEX_CST);
2059 TREE_REALPART (t) = real;
2060 TREE_IMAGPART (t) = imag;
2061 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2062 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2063 return t;
2066 /* Build a complex (inf +- 0i), such as for the result of cproj.
2067 TYPE is the complex tree type of the result. If NEG is true, the
2068 imaginary zero is negative. */
2070 tree
2071 build_complex_inf (tree type, bool neg)
2073 REAL_VALUE_TYPE rinf, rzero = dconst0;
2075 real_inf (&rinf);
2076 rzero.sign = neg;
2077 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2078 build_real (TREE_TYPE (type), rzero));
2081 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2082 element is set to 1. In particular, this is 1 + i for complex types. */
2084 tree
2085 build_each_one_cst (tree type)
2087 if (TREE_CODE (type) == COMPLEX_TYPE)
2089 tree scalar = build_one_cst (TREE_TYPE (type));
2090 return build_complex (type, scalar, scalar);
2092 else
2093 return build_one_cst (type);
2096 /* Return a constant of arithmetic type TYPE which is the
2097 multiplicative identity of the set TYPE. */
2099 tree
2100 build_one_cst (tree type)
2102 switch (TREE_CODE (type))
2104 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2105 case POINTER_TYPE: case REFERENCE_TYPE:
2106 case OFFSET_TYPE:
2107 return build_int_cst (type, 1);
2109 case REAL_TYPE:
2110 return build_real (type, dconst1);
2112 case FIXED_POINT_TYPE:
2113 /* We can only generate 1 for accum types. */
2114 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2115 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2117 case VECTOR_TYPE:
2119 tree scalar = build_one_cst (TREE_TYPE (type));
2121 return build_vector_from_val (type, scalar);
2124 case COMPLEX_TYPE:
2125 return build_complex (type,
2126 build_one_cst (TREE_TYPE (type)),
2127 build_zero_cst (TREE_TYPE (type)));
2129 default:
2130 gcc_unreachable ();
2134 /* Return an integer of type TYPE containing all 1's in as much precision as
2135 it contains, or a complex or vector whose subparts are such integers. */
2137 tree
2138 build_all_ones_cst (tree type)
2140 if (TREE_CODE (type) == COMPLEX_TYPE)
2142 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2143 return build_complex (type, scalar, scalar);
2145 else
2146 return build_minus_one_cst (type);
2149 /* Return a constant of arithmetic type TYPE which is the
2150 opposite of the multiplicative identity of the set TYPE. */
2152 tree
2153 build_minus_one_cst (tree type)
2155 switch (TREE_CODE (type))
2157 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2158 case POINTER_TYPE: case REFERENCE_TYPE:
2159 case OFFSET_TYPE:
2160 return build_int_cst (type, -1);
2162 case REAL_TYPE:
2163 return build_real (type, dconstm1);
2165 case FIXED_POINT_TYPE:
2166 /* We can only generate 1 for accum types. */
2167 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2168 return build_fixed (type,
2169 fixed_from_double_int (double_int_minus_one,
2170 SCALAR_TYPE_MODE (type)));
2172 case VECTOR_TYPE:
2174 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2176 return build_vector_from_val (type, scalar);
2179 case COMPLEX_TYPE:
2180 return build_complex (type,
2181 build_minus_one_cst (TREE_TYPE (type)),
2182 build_zero_cst (TREE_TYPE (type)));
2184 default:
2185 gcc_unreachable ();
2189 /* Build 0 constant of type TYPE. This is used by constructor folding
2190 and thus the constant should be represented in memory by
2191 zero(es). */
2193 tree
2194 build_zero_cst (tree type)
2196 switch (TREE_CODE (type))
2198 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2199 case POINTER_TYPE: case REFERENCE_TYPE:
2200 case OFFSET_TYPE: case NULLPTR_TYPE:
2201 return build_int_cst (type, 0);
2203 case REAL_TYPE:
2204 return build_real (type, dconst0);
2206 case FIXED_POINT_TYPE:
2207 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2209 case VECTOR_TYPE:
2211 tree scalar = build_zero_cst (TREE_TYPE (type));
2213 return build_vector_from_val (type, scalar);
2216 case COMPLEX_TYPE:
2218 tree zero = build_zero_cst (TREE_TYPE (type));
2220 return build_complex (type, zero, zero);
2223 default:
2224 if (!AGGREGATE_TYPE_P (type))
2225 return fold_convert (type, integer_zero_node);
2226 return build_constructor (type, NULL);
2231 /* Build a BINFO with LEN language slots. */
2233 tree
2234 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2236 tree t;
2237 size_t length = (offsetof (struct tree_binfo, base_binfos)
2238 + vec<tree, va_gc>::embedded_size (base_binfos));
2240 record_node_allocation_statistics (TREE_BINFO, length);
2242 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2244 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2246 TREE_SET_CODE (t, TREE_BINFO);
2248 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2250 return t;
2253 /* Create a CASE_LABEL_EXPR tree node and return it. */
2255 tree
2256 build_case_label (tree low_value, tree high_value, tree label_decl)
2258 tree t = make_node (CASE_LABEL_EXPR);
2260 TREE_TYPE (t) = void_type_node;
2261 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2263 CASE_LOW (t) = low_value;
2264 CASE_HIGH (t) = high_value;
2265 CASE_LABEL (t) = label_decl;
2266 CASE_CHAIN (t) = NULL_TREE;
2268 return t;
2271 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2272 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2273 The latter determines the length of the HOST_WIDE_INT vector. */
2275 tree
2276 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2278 tree t;
2279 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2280 + sizeof (struct tree_int_cst));
2282 gcc_assert (len);
2283 record_node_allocation_statistics (INTEGER_CST, length);
2285 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2287 TREE_SET_CODE (t, INTEGER_CST);
2288 TREE_INT_CST_NUNITS (t) = len;
2289 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2290 /* to_offset can only be applied to trees that are offset_int-sized
2291 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2292 must be exactly the precision of offset_int and so LEN is correct. */
2293 if (ext_len <= OFFSET_INT_ELTS)
2294 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2295 else
2296 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2298 TREE_CONSTANT (t) = 1;
2300 return t;
2303 /* Build a newly constructed TREE_VEC node of length LEN. */
2305 tree
2306 make_tree_vec (int len MEM_STAT_DECL)
2308 tree t;
2309 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2311 record_node_allocation_statistics (TREE_VEC, length);
2313 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2315 TREE_SET_CODE (t, TREE_VEC);
2316 TREE_VEC_LENGTH (t) = len;
2318 return t;
2321 /* Grow a TREE_VEC node to new length LEN. */
2323 tree
2324 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2326 gcc_assert (TREE_CODE (v) == TREE_VEC);
2328 int oldlen = TREE_VEC_LENGTH (v);
2329 gcc_assert (len > oldlen);
2331 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2332 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2334 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2336 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2338 TREE_VEC_LENGTH (v) = len;
2340 return v;
2343 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2344 fixed, and scalar, complex or vector. */
2347 zerop (const_tree expr)
2349 return (integer_zerop (expr)
2350 || real_zerop (expr)
2351 || fixed_zerop (expr));
2354 /* Return 1 if EXPR is the integer constant zero or a complex constant
2355 of zero. */
2358 integer_zerop (const_tree expr)
2360 switch (TREE_CODE (expr))
2362 case INTEGER_CST:
2363 return wi::to_wide (expr) == 0;
2364 case COMPLEX_CST:
2365 return (integer_zerop (TREE_REALPART (expr))
2366 && integer_zerop (TREE_IMAGPART (expr)));
2367 case VECTOR_CST:
2368 return (VECTOR_CST_NPATTERNS (expr) == 1
2369 && VECTOR_CST_DUPLICATE_P (expr)
2370 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2371 default:
2372 return false;
2376 /* Return 1 if EXPR is the integer constant one or the corresponding
2377 complex constant. */
2380 integer_onep (const_tree expr)
2382 switch (TREE_CODE (expr))
2384 case INTEGER_CST:
2385 return wi::eq_p (wi::to_widest (expr), 1);
2386 case COMPLEX_CST:
2387 return (integer_onep (TREE_REALPART (expr))
2388 && integer_zerop (TREE_IMAGPART (expr)));
2389 case VECTOR_CST:
2390 return (VECTOR_CST_NPATTERNS (expr) == 1
2391 && VECTOR_CST_DUPLICATE_P (expr)
2392 && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2393 default:
2394 return false;
2398 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2399 return 1 if every piece is the integer constant one. */
2402 integer_each_onep (const_tree expr)
2404 if (TREE_CODE (expr) == COMPLEX_CST)
2405 return (integer_onep (TREE_REALPART (expr))
2406 && integer_onep (TREE_IMAGPART (expr)));
2407 else
2408 return integer_onep (expr);
2411 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2412 it contains, or a complex or vector whose subparts are such integers. */
2415 integer_all_onesp (const_tree expr)
2417 if (TREE_CODE (expr) == COMPLEX_CST
2418 && integer_all_onesp (TREE_REALPART (expr))
2419 && integer_all_onesp (TREE_IMAGPART (expr)))
2420 return 1;
2422 else if (TREE_CODE (expr) == VECTOR_CST)
2423 return (VECTOR_CST_NPATTERNS (expr) == 1
2424 && VECTOR_CST_DUPLICATE_P (expr)
2425 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2427 else if (TREE_CODE (expr) != INTEGER_CST)
2428 return 0;
2430 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2431 == wi::to_wide (expr));
2434 /* Return 1 if EXPR is the integer constant minus one. */
2437 integer_minus_onep (const_tree expr)
2439 if (TREE_CODE (expr) == COMPLEX_CST)
2440 return (integer_all_onesp (TREE_REALPART (expr))
2441 && integer_zerop (TREE_IMAGPART (expr)));
2442 else
2443 return integer_all_onesp (expr);
2446 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2447 one bit on). */
2450 integer_pow2p (const_tree expr)
2452 if (TREE_CODE (expr) == COMPLEX_CST
2453 && integer_pow2p (TREE_REALPART (expr))
2454 && integer_zerop (TREE_IMAGPART (expr)))
2455 return 1;
2457 if (TREE_CODE (expr) != INTEGER_CST)
2458 return 0;
2460 return wi::popcount (wi::to_wide (expr)) == 1;
2463 /* Return 1 if EXPR is an integer constant other than zero or a
2464 complex constant other than zero. */
2467 integer_nonzerop (const_tree expr)
2469 return ((TREE_CODE (expr) == INTEGER_CST
2470 && wi::to_wide (expr) != 0)
2471 || (TREE_CODE (expr) == COMPLEX_CST
2472 && (integer_nonzerop (TREE_REALPART (expr))
2473 || integer_nonzerop (TREE_IMAGPART (expr)))));
2476 /* Return 1 if EXPR is the integer constant one. For vector,
2477 return 1 if every piece is the integer constant minus one
2478 (representing the value TRUE). */
2481 integer_truep (const_tree expr)
2483 if (TREE_CODE (expr) == VECTOR_CST)
2484 return integer_all_onesp (expr);
2485 return integer_onep (expr);
2488 /* Return 1 if EXPR is the fixed-point constant zero. */
2491 fixed_zerop (const_tree expr)
2493 return (TREE_CODE (expr) == FIXED_CST
2494 && TREE_FIXED_CST (expr).data.is_zero ());
2497 /* Return the power of two represented by a tree node known to be a
2498 power of two. */
2501 tree_log2 (const_tree expr)
2503 if (TREE_CODE (expr) == COMPLEX_CST)
2504 return tree_log2 (TREE_REALPART (expr));
2506 return wi::exact_log2 (wi::to_wide (expr));
2509 /* Similar, but return the largest integer Y such that 2 ** Y is less
2510 than or equal to EXPR. */
2513 tree_floor_log2 (const_tree expr)
2515 if (TREE_CODE (expr) == COMPLEX_CST)
2516 return tree_log2 (TREE_REALPART (expr));
2518 return wi::floor_log2 (wi::to_wide (expr));
2521 /* Return number of known trailing zero bits in EXPR, or, if the value of
2522 EXPR is known to be zero, the precision of it's type. */
2524 unsigned int
2525 tree_ctz (const_tree expr)
2527 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2528 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2529 return 0;
2531 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2532 switch (TREE_CODE (expr))
2534 case INTEGER_CST:
2535 ret1 = wi::ctz (wi::to_wide (expr));
2536 return MIN (ret1, prec);
2537 case SSA_NAME:
2538 ret1 = wi::ctz (get_nonzero_bits (expr));
2539 return MIN (ret1, prec);
2540 case PLUS_EXPR:
2541 case MINUS_EXPR:
2542 case BIT_IOR_EXPR:
2543 case BIT_XOR_EXPR:
2544 case MIN_EXPR:
2545 case MAX_EXPR:
2546 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2547 if (ret1 == 0)
2548 return ret1;
2549 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2550 return MIN (ret1, ret2);
2551 case POINTER_PLUS_EXPR:
2552 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2553 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2554 /* Second operand is sizetype, which could be in theory
2555 wider than pointer's precision. Make sure we never
2556 return more than prec. */
2557 ret2 = MIN (ret2, prec);
2558 return MIN (ret1, ret2);
2559 case BIT_AND_EXPR:
2560 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2561 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2562 return MAX (ret1, ret2);
2563 case MULT_EXPR:
2564 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2565 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2566 return MIN (ret1 + ret2, prec);
2567 case LSHIFT_EXPR:
2568 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2569 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2570 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2572 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2573 return MIN (ret1 + ret2, prec);
2575 return ret1;
2576 case RSHIFT_EXPR:
2577 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2578 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2580 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2581 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2582 if (ret1 > ret2)
2583 return ret1 - ret2;
2585 return 0;
2586 case TRUNC_DIV_EXPR:
2587 case CEIL_DIV_EXPR:
2588 case FLOOR_DIV_EXPR:
2589 case ROUND_DIV_EXPR:
2590 case EXACT_DIV_EXPR:
2591 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2592 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2594 int l = tree_log2 (TREE_OPERAND (expr, 1));
2595 if (l >= 0)
2597 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2598 ret2 = l;
2599 if (ret1 > ret2)
2600 return ret1 - ret2;
2603 return 0;
2604 CASE_CONVERT:
2605 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2606 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2607 ret1 = prec;
2608 return MIN (ret1, prec);
2609 case SAVE_EXPR:
2610 return tree_ctz (TREE_OPERAND (expr, 0));
2611 case COND_EXPR:
2612 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2613 if (ret1 == 0)
2614 return 0;
2615 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2616 return MIN (ret1, ret2);
2617 case COMPOUND_EXPR:
2618 return tree_ctz (TREE_OPERAND (expr, 1));
2619 case ADDR_EXPR:
2620 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2621 if (ret1 > BITS_PER_UNIT)
2623 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2624 return MIN (ret1, prec);
2626 return 0;
2627 default:
2628 return 0;
2632 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2633 decimal float constants, so don't return 1 for them. */
2636 real_zerop (const_tree expr)
2638 switch (TREE_CODE (expr))
2640 case REAL_CST:
2641 return real_equal (&TREE_REAL_CST (expr), &dconst0)
2642 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2643 case COMPLEX_CST:
2644 return real_zerop (TREE_REALPART (expr))
2645 && real_zerop (TREE_IMAGPART (expr));
2646 case VECTOR_CST:
2648 /* Don't simply check for a duplicate because the predicate
2649 accepts both +0.0 and -0.0. */
2650 unsigned count = vector_cst_encoded_nelts (expr);
2651 for (unsigned int i = 0; i < count; ++i)
2652 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
2653 return false;
2654 return true;
2656 default:
2657 return false;
2661 /* Return 1 if EXPR is the real constant one in real or complex form.
2662 Trailing zeroes matter for decimal float constants, so don't return
2663 1 for them. */
2666 real_onep (const_tree expr)
2668 switch (TREE_CODE (expr))
2670 case REAL_CST:
2671 return real_equal (&TREE_REAL_CST (expr), &dconst1)
2672 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2673 case COMPLEX_CST:
2674 return real_onep (TREE_REALPART (expr))
2675 && real_zerop (TREE_IMAGPART (expr));
2676 case VECTOR_CST:
2677 return (VECTOR_CST_NPATTERNS (expr) == 1
2678 && VECTOR_CST_DUPLICATE_P (expr)
2679 && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2680 default:
2681 return false;
2685 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2686 matter for decimal float constants, so don't return 1 for them. */
2689 real_minus_onep (const_tree expr)
2691 switch (TREE_CODE (expr))
2693 case REAL_CST:
2694 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2695 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2696 case COMPLEX_CST:
2697 return real_minus_onep (TREE_REALPART (expr))
2698 && real_zerop (TREE_IMAGPART (expr));
2699 case VECTOR_CST:
2700 return (VECTOR_CST_NPATTERNS (expr) == 1
2701 && VECTOR_CST_DUPLICATE_P (expr)
2702 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2703 default:
2704 return false;
2708 /* Nonzero if EXP is a constant or a cast of a constant. */
2711 really_constant_p (const_tree exp)
2713 /* This is not quite the same as STRIP_NOPS. It does more. */
2714 while (CONVERT_EXPR_P (exp)
2715 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2716 exp = TREE_OPERAND (exp, 0);
2717 return TREE_CONSTANT (exp);
2720 /* Return first list element whose TREE_VALUE is ELEM.
2721 Return 0 if ELEM is not in LIST. */
2723 tree
2724 value_member (tree elem, tree list)
2726 while (list)
2728 if (elem == TREE_VALUE (list))
2729 return list;
2730 list = TREE_CHAIN (list);
2732 return NULL_TREE;
2735 /* Return first list element whose TREE_PURPOSE is ELEM.
2736 Return 0 if ELEM is not in LIST. */
2738 tree
2739 purpose_member (const_tree elem, tree list)
2741 while (list)
2743 if (elem == TREE_PURPOSE (list))
2744 return list;
2745 list = TREE_CHAIN (list);
2747 return NULL_TREE;
2750 /* Return true if ELEM is in V. */
2752 bool
2753 vec_member (const_tree elem, vec<tree, va_gc> *v)
2755 unsigned ix;
2756 tree t;
2757 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2758 if (elem == t)
2759 return true;
2760 return false;
2763 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2764 NULL_TREE. */
2766 tree
2767 chain_index (int idx, tree chain)
2769 for (; chain && idx > 0; --idx)
2770 chain = TREE_CHAIN (chain);
2771 return chain;
2774 /* Return nonzero if ELEM is part of the chain CHAIN. */
2777 chain_member (const_tree elem, const_tree chain)
2779 while (chain)
2781 if (elem == chain)
2782 return 1;
2783 chain = DECL_CHAIN (chain);
2786 return 0;
2789 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2790 We expect a null pointer to mark the end of the chain.
2791 This is the Lisp primitive `length'. */
2794 list_length (const_tree t)
2796 const_tree p = t;
2797 #ifdef ENABLE_TREE_CHECKING
2798 const_tree q = t;
2799 #endif
2800 int len = 0;
2802 while (p)
2804 p = TREE_CHAIN (p);
2805 #ifdef ENABLE_TREE_CHECKING
2806 if (len % 2)
2807 q = TREE_CHAIN (q);
2808 gcc_assert (p != q);
2809 #endif
2810 len++;
2813 return len;
2816 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2817 UNION_TYPE TYPE, or NULL_TREE if none. */
2819 tree
2820 first_field (const_tree type)
2822 tree t = TYPE_FIELDS (type);
2823 while (t && TREE_CODE (t) != FIELD_DECL)
2824 t = TREE_CHAIN (t);
2825 return t;
2828 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2829 by modifying the last node in chain 1 to point to chain 2.
2830 This is the Lisp primitive `nconc'. */
2832 tree
2833 chainon (tree op1, tree op2)
2835 tree t1;
2837 if (!op1)
2838 return op2;
2839 if (!op2)
2840 return op1;
2842 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2843 continue;
2844 TREE_CHAIN (t1) = op2;
2846 #ifdef ENABLE_TREE_CHECKING
2848 tree t2;
2849 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2850 gcc_assert (t2 != t1);
2852 #endif
2854 return op1;
2857 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2859 tree
2860 tree_last (tree chain)
2862 tree next;
2863 if (chain)
2864 while ((next = TREE_CHAIN (chain)))
2865 chain = next;
2866 return chain;
2869 /* Reverse the order of elements in the chain T,
2870 and return the new head of the chain (old last element). */
2872 tree
2873 nreverse (tree t)
2875 tree prev = 0, decl, next;
2876 for (decl = t; decl; decl = next)
2878 /* We shouldn't be using this function to reverse BLOCK chains; we
2879 have blocks_nreverse for that. */
2880 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2881 next = TREE_CHAIN (decl);
2882 TREE_CHAIN (decl) = prev;
2883 prev = decl;
2885 return prev;
2888 /* Return a newly created TREE_LIST node whose
2889 purpose and value fields are PARM and VALUE. */
2891 tree
2892 build_tree_list (tree parm, tree value MEM_STAT_DECL)
2894 tree t = make_node (TREE_LIST PASS_MEM_STAT);
2895 TREE_PURPOSE (t) = parm;
2896 TREE_VALUE (t) = value;
2897 return t;
2900 /* Build a chain of TREE_LIST nodes from a vector. */
2902 tree
2903 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2905 tree ret = NULL_TREE;
2906 tree *pp = &ret;
2907 unsigned int i;
2908 tree t;
2909 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2911 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
2912 pp = &TREE_CHAIN (*pp);
2914 return ret;
2917 /* Return a newly created TREE_LIST node whose
2918 purpose and value fields are PURPOSE and VALUE
2919 and whose TREE_CHAIN is CHAIN. */
2921 tree
2922 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
2924 tree node;
2926 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2927 memset (node, 0, sizeof (struct tree_common));
2929 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2931 TREE_SET_CODE (node, TREE_LIST);
2932 TREE_CHAIN (node) = chain;
2933 TREE_PURPOSE (node) = purpose;
2934 TREE_VALUE (node) = value;
2935 return node;
2938 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2939 trees. */
2941 vec<tree, va_gc> *
2942 ctor_to_vec (tree ctor)
2944 vec<tree, va_gc> *vec;
2945 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2946 unsigned int ix;
2947 tree val;
2949 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2950 vec->quick_push (val);
2952 return vec;
2955 /* Return the size nominally occupied by an object of type TYPE
2956 when it resides in memory. The value is measured in units of bytes,
2957 and its data type is that normally used for type sizes
2958 (which is the first type created by make_signed_type or
2959 make_unsigned_type). */
2961 tree
2962 size_in_bytes_loc (location_t loc, const_tree type)
2964 tree t;
2966 if (type == error_mark_node)
2967 return integer_zero_node;
2969 type = TYPE_MAIN_VARIANT (type);
2970 t = TYPE_SIZE_UNIT (type);
2972 if (t == 0)
2974 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
2975 return size_zero_node;
2978 return t;
2981 /* Return the size of TYPE (in bytes) as a wide integer
2982 or return -1 if the size can vary or is larger than an integer. */
2984 HOST_WIDE_INT
2985 int_size_in_bytes (const_tree type)
2987 tree t;
2989 if (type == error_mark_node)
2990 return 0;
2992 type = TYPE_MAIN_VARIANT (type);
2993 t = TYPE_SIZE_UNIT (type);
2995 if (t && tree_fits_uhwi_p (t))
2996 return TREE_INT_CST_LOW (t);
2997 else
2998 return -1;
3001 /* Return the maximum size of TYPE (in bytes) as a wide integer
3002 or return -1 if the size can vary or is larger than an integer. */
3004 HOST_WIDE_INT
3005 max_int_size_in_bytes (const_tree type)
3007 HOST_WIDE_INT size = -1;
3008 tree size_tree;
3010 /* If this is an array type, check for a possible MAX_SIZE attached. */
3012 if (TREE_CODE (type) == ARRAY_TYPE)
3014 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3016 if (size_tree && tree_fits_uhwi_p (size_tree))
3017 size = tree_to_uhwi (size_tree);
3020 /* If we still haven't been able to get a size, see if the language
3021 can compute a maximum size. */
3023 if (size == -1)
3025 size_tree = lang_hooks.types.max_size (type);
3027 if (size_tree && tree_fits_uhwi_p (size_tree))
3028 size = tree_to_uhwi (size_tree);
3031 return size;
3034 /* Return the bit position of FIELD, in bits from the start of the record.
3035 This is a tree of type bitsizetype. */
3037 tree
3038 bit_position (const_tree field)
3040 return bit_from_pos (DECL_FIELD_OFFSET (field),
3041 DECL_FIELD_BIT_OFFSET (field));
3044 /* Return the byte position of FIELD, in bytes from the start of the record.
3045 This is a tree of type sizetype. */
3047 tree
3048 byte_position (const_tree field)
3050 return byte_from_pos (DECL_FIELD_OFFSET (field),
3051 DECL_FIELD_BIT_OFFSET (field));
3054 /* Likewise, but return as an integer. It must be representable in
3055 that way (since it could be a signed value, we don't have the
3056 option of returning -1 like int_size_in_byte can. */
3058 HOST_WIDE_INT
3059 int_byte_position (const_tree field)
3061 return tree_to_shwi (byte_position (field));
3064 /* Return the strictest alignment, in bits, that T is known to have. */
3066 unsigned int
3067 expr_align (const_tree t)
3069 unsigned int align0, align1;
3071 switch (TREE_CODE (t))
3073 CASE_CONVERT: case NON_LVALUE_EXPR:
3074 /* If we have conversions, we know that the alignment of the
3075 object must meet each of the alignments of the types. */
3076 align0 = expr_align (TREE_OPERAND (t, 0));
3077 align1 = TYPE_ALIGN (TREE_TYPE (t));
3078 return MAX (align0, align1);
3080 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
3081 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
3082 case CLEANUP_POINT_EXPR:
3083 /* These don't change the alignment of an object. */
3084 return expr_align (TREE_OPERAND (t, 0));
3086 case COND_EXPR:
3087 /* The best we can do is say that the alignment is the least aligned
3088 of the two arms. */
3089 align0 = expr_align (TREE_OPERAND (t, 1));
3090 align1 = expr_align (TREE_OPERAND (t, 2));
3091 return MIN (align0, align1);
3093 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3094 meaningfully, it's always 1. */
3095 case LABEL_DECL: case CONST_DECL:
3096 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3097 case FUNCTION_DECL:
3098 gcc_assert (DECL_ALIGN (t) != 0);
3099 return DECL_ALIGN (t);
3101 default:
3102 break;
3105 /* Otherwise take the alignment from that of the type. */
3106 return TYPE_ALIGN (TREE_TYPE (t));
3109 /* Return, as a tree node, the number of elements for TYPE (which is an
3110 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3112 tree
3113 array_type_nelts (const_tree type)
3115 tree index_type, min, max;
3117 /* If they did it with unspecified bounds, then we should have already
3118 given an error about it before we got here. */
3119 if (! TYPE_DOMAIN (type))
3120 return error_mark_node;
3122 index_type = TYPE_DOMAIN (type);
3123 min = TYPE_MIN_VALUE (index_type);
3124 max = TYPE_MAX_VALUE (index_type);
3126 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3127 if (!max)
3128 return error_mark_node;
3130 return (integer_zerop (min)
3131 ? max
3132 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3135 /* If arg is static -- a reference to an object in static storage -- then
3136 return the object. This is not the same as the C meaning of `static'.
3137 If arg isn't static, return NULL. */
3139 tree
3140 staticp (tree arg)
3142 switch (TREE_CODE (arg))
3144 case FUNCTION_DECL:
3145 /* Nested functions are static, even though taking their address will
3146 involve a trampoline as we unnest the nested function and create
3147 the trampoline on the tree level. */
3148 return arg;
3150 case VAR_DECL:
3151 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3152 && ! DECL_THREAD_LOCAL_P (arg)
3153 && ! DECL_DLLIMPORT_P (arg)
3154 ? arg : NULL);
3156 case CONST_DECL:
3157 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3158 ? arg : NULL);
3160 case CONSTRUCTOR:
3161 return TREE_STATIC (arg) ? arg : NULL;
3163 case LABEL_DECL:
3164 case STRING_CST:
3165 return arg;
3167 case COMPONENT_REF:
3168 /* If the thing being referenced is not a field, then it is
3169 something language specific. */
3170 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3172 /* If we are referencing a bitfield, we can't evaluate an
3173 ADDR_EXPR at compile time and so it isn't a constant. */
3174 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3175 return NULL;
3177 return staticp (TREE_OPERAND (arg, 0));
3179 case BIT_FIELD_REF:
3180 return NULL;
3182 case INDIRECT_REF:
3183 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3185 case ARRAY_REF:
3186 case ARRAY_RANGE_REF:
3187 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3188 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3189 return staticp (TREE_OPERAND (arg, 0));
3190 else
3191 return NULL;
3193 case COMPOUND_LITERAL_EXPR:
3194 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3196 default:
3197 return NULL;
3204 /* Return whether OP is a DECL whose address is function-invariant. */
3206 bool
3207 decl_address_invariant_p (const_tree op)
3209 /* The conditions below are slightly less strict than the one in
3210 staticp. */
3212 switch (TREE_CODE (op))
3214 case PARM_DECL:
3215 case RESULT_DECL:
3216 case LABEL_DECL:
3217 case FUNCTION_DECL:
3218 return true;
3220 case VAR_DECL:
3221 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3222 || DECL_THREAD_LOCAL_P (op)
3223 || DECL_CONTEXT (op) == current_function_decl
3224 || decl_function_context (op) == current_function_decl)
3225 return true;
3226 break;
3228 case CONST_DECL:
3229 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3230 || decl_function_context (op) == current_function_decl)
3231 return true;
3232 break;
3234 default:
3235 break;
3238 return false;
3241 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3243 bool
3244 decl_address_ip_invariant_p (const_tree op)
3246 /* The conditions below are slightly less strict than the one in
3247 staticp. */
3249 switch (TREE_CODE (op))
3251 case LABEL_DECL:
3252 case FUNCTION_DECL:
3253 case STRING_CST:
3254 return true;
3256 case VAR_DECL:
3257 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3258 && !DECL_DLLIMPORT_P (op))
3259 || DECL_THREAD_LOCAL_P (op))
3260 return true;
3261 break;
3263 case CONST_DECL:
3264 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3265 return true;
3266 break;
3268 default:
3269 break;
3272 return false;
3276 /* Return true if T is function-invariant (internal function, does
3277 not handle arithmetic; that's handled in skip_simple_arithmetic and
3278 tree_invariant_p). */
3280 static bool
3281 tree_invariant_p_1 (tree t)
3283 tree op;
3285 if (TREE_CONSTANT (t)
3286 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3287 return true;
3289 switch (TREE_CODE (t))
3291 case SAVE_EXPR:
3292 return true;
3294 case ADDR_EXPR:
3295 op = TREE_OPERAND (t, 0);
3296 while (handled_component_p (op))
3298 switch (TREE_CODE (op))
3300 case ARRAY_REF:
3301 case ARRAY_RANGE_REF:
3302 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3303 || TREE_OPERAND (op, 2) != NULL_TREE
3304 || TREE_OPERAND (op, 3) != NULL_TREE)
3305 return false;
3306 break;
3308 case COMPONENT_REF:
3309 if (TREE_OPERAND (op, 2) != NULL_TREE)
3310 return false;
3311 break;
3313 default:;
3315 op = TREE_OPERAND (op, 0);
3318 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3320 default:
3321 break;
3324 return false;
3327 /* Return true if T is function-invariant. */
3329 bool
3330 tree_invariant_p (tree t)
3332 tree inner = skip_simple_arithmetic (t);
3333 return tree_invariant_p_1 (inner);
3336 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3337 Do this to any expression which may be used in more than one place,
3338 but must be evaluated only once.
3340 Normally, expand_expr would reevaluate the expression each time.
3341 Calling save_expr produces something that is evaluated and recorded
3342 the first time expand_expr is called on it. Subsequent calls to
3343 expand_expr just reuse the recorded value.
3345 The call to expand_expr that generates code that actually computes
3346 the value is the first call *at compile time*. Subsequent calls
3347 *at compile time* generate code to use the saved value.
3348 This produces correct result provided that *at run time* control
3349 always flows through the insns made by the first expand_expr
3350 before reaching the other places where the save_expr was evaluated.
3351 You, the caller of save_expr, must make sure this is so.
3353 Constants, and certain read-only nodes, are returned with no
3354 SAVE_EXPR because that is safe. Expressions containing placeholders
3355 are not touched; see tree.def for an explanation of what these
3356 are used for. */
3358 tree
3359 save_expr (tree expr)
3361 tree inner;
3363 /* If the tree evaluates to a constant, then we don't want to hide that
3364 fact (i.e. this allows further folding, and direct checks for constants).
3365 However, a read-only object that has side effects cannot be bypassed.
3366 Since it is no problem to reevaluate literals, we just return the
3367 literal node. */
3368 inner = skip_simple_arithmetic (expr);
3369 if (TREE_CODE (inner) == ERROR_MARK)
3370 return inner;
3372 if (tree_invariant_p_1 (inner))
3373 return expr;
3375 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3376 it means that the size or offset of some field of an object depends on
3377 the value within another field.
3379 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3380 and some variable since it would then need to be both evaluated once and
3381 evaluated more than once. Front-ends must assure this case cannot
3382 happen by surrounding any such subexpressions in their own SAVE_EXPR
3383 and forcing evaluation at the proper time. */
3384 if (contains_placeholder_p (inner))
3385 return expr;
3387 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3389 /* This expression might be placed ahead of a jump to ensure that the
3390 value was computed on both sides of the jump. So make sure it isn't
3391 eliminated as dead. */
3392 TREE_SIDE_EFFECTS (expr) = 1;
3393 return expr;
3396 /* Look inside EXPR into any simple arithmetic operations. Return the
3397 outermost non-arithmetic or non-invariant node. */
3399 tree
3400 skip_simple_arithmetic (tree expr)
3402 /* We don't care about whether this can be used as an lvalue in this
3403 context. */
3404 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3405 expr = TREE_OPERAND (expr, 0);
3407 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3408 a constant, it will be more efficient to not make another SAVE_EXPR since
3409 it will allow better simplification and GCSE will be able to merge the
3410 computations if they actually occur. */
3411 while (true)
3413 if (UNARY_CLASS_P (expr))
3414 expr = TREE_OPERAND (expr, 0);
3415 else if (BINARY_CLASS_P (expr))
3417 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3418 expr = TREE_OPERAND (expr, 0);
3419 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3420 expr = TREE_OPERAND (expr, 1);
3421 else
3422 break;
3424 else
3425 break;
3428 return expr;
3431 /* Look inside EXPR into simple arithmetic operations involving constants.
3432 Return the outermost non-arithmetic or non-constant node. */
3434 tree
3435 skip_simple_constant_arithmetic (tree expr)
3437 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3438 expr = TREE_OPERAND (expr, 0);
3440 while (true)
3442 if (UNARY_CLASS_P (expr))
3443 expr = TREE_OPERAND (expr, 0);
3444 else if (BINARY_CLASS_P (expr))
3446 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3447 expr = TREE_OPERAND (expr, 0);
3448 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3449 expr = TREE_OPERAND (expr, 1);
3450 else
3451 break;
3453 else
3454 break;
3457 return expr;
3460 /* Return which tree structure is used by T. */
3462 enum tree_node_structure_enum
3463 tree_node_structure (const_tree t)
3465 const enum tree_code code = TREE_CODE (t);
3466 return tree_node_structure_for_code (code);
3469 /* Set various status flags when building a CALL_EXPR object T. */
3471 static void
3472 process_call_operands (tree t)
3474 bool side_effects = TREE_SIDE_EFFECTS (t);
3475 bool read_only = false;
3476 int i = call_expr_flags (t);
3478 /* Calls have side-effects, except those to const or pure functions. */
3479 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3480 side_effects = true;
3481 /* Propagate TREE_READONLY of arguments for const functions. */
3482 if (i & ECF_CONST)
3483 read_only = true;
3485 if (!side_effects || read_only)
3486 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3488 tree op = TREE_OPERAND (t, i);
3489 if (op && TREE_SIDE_EFFECTS (op))
3490 side_effects = true;
3491 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3492 read_only = false;
3495 TREE_SIDE_EFFECTS (t) = side_effects;
3496 TREE_READONLY (t) = read_only;
3499 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3500 size or offset that depends on a field within a record. */
3502 bool
3503 contains_placeholder_p (const_tree exp)
3505 enum tree_code code;
3507 if (!exp)
3508 return 0;
3510 code = TREE_CODE (exp);
3511 if (code == PLACEHOLDER_EXPR)
3512 return 1;
3514 switch (TREE_CODE_CLASS (code))
3516 case tcc_reference:
3517 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3518 position computations since they will be converted into a
3519 WITH_RECORD_EXPR involving the reference, which will assume
3520 here will be valid. */
3521 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3523 case tcc_exceptional:
3524 if (code == TREE_LIST)
3525 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3526 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3527 break;
3529 case tcc_unary:
3530 case tcc_binary:
3531 case tcc_comparison:
3532 case tcc_expression:
3533 switch (code)
3535 case COMPOUND_EXPR:
3536 /* Ignoring the first operand isn't quite right, but works best. */
3537 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3539 case COND_EXPR:
3540 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3541 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3542 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3544 case SAVE_EXPR:
3545 /* The save_expr function never wraps anything containing
3546 a PLACEHOLDER_EXPR. */
3547 return 0;
3549 default:
3550 break;
3553 switch (TREE_CODE_LENGTH (code))
3555 case 1:
3556 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3557 case 2:
3558 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3559 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3560 default:
3561 return 0;
3564 case tcc_vl_exp:
3565 switch (code)
3567 case CALL_EXPR:
3569 const_tree arg;
3570 const_call_expr_arg_iterator iter;
3571 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3572 if (CONTAINS_PLACEHOLDER_P (arg))
3573 return 1;
3574 return 0;
3576 default:
3577 return 0;
3580 default:
3581 return 0;
3583 return 0;
3586 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3587 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3588 field positions. */
3590 static bool
3591 type_contains_placeholder_1 (const_tree type)
3593 /* If the size contains a placeholder or the parent type (component type in
3594 the case of arrays) type involves a placeholder, this type does. */
3595 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3596 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3597 || (!POINTER_TYPE_P (type)
3598 && TREE_TYPE (type)
3599 && type_contains_placeholder_p (TREE_TYPE (type))))
3600 return true;
3602 /* Now do type-specific checks. Note that the last part of the check above
3603 greatly limits what we have to do below. */
3604 switch (TREE_CODE (type))
3606 case VOID_TYPE:
3607 case POINTER_BOUNDS_TYPE:
3608 case COMPLEX_TYPE:
3609 case ENUMERAL_TYPE:
3610 case BOOLEAN_TYPE:
3611 case POINTER_TYPE:
3612 case OFFSET_TYPE:
3613 case REFERENCE_TYPE:
3614 case METHOD_TYPE:
3615 case FUNCTION_TYPE:
3616 case VECTOR_TYPE:
3617 case NULLPTR_TYPE:
3618 return false;
3620 case INTEGER_TYPE:
3621 case REAL_TYPE:
3622 case FIXED_POINT_TYPE:
3623 /* Here we just check the bounds. */
3624 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3625 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3627 case ARRAY_TYPE:
3628 /* We have already checked the component type above, so just check
3629 the domain type. Flexible array members have a null domain. */
3630 return TYPE_DOMAIN (type) ?
3631 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
3633 case RECORD_TYPE:
3634 case UNION_TYPE:
3635 case QUAL_UNION_TYPE:
3637 tree field;
3639 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3640 if (TREE_CODE (field) == FIELD_DECL
3641 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3642 || (TREE_CODE (type) == QUAL_UNION_TYPE
3643 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3644 || type_contains_placeholder_p (TREE_TYPE (field))))
3645 return true;
3647 return false;
3650 default:
3651 gcc_unreachable ();
3655 /* Wrapper around above function used to cache its result. */
3657 bool
3658 type_contains_placeholder_p (tree type)
3660 bool result;
3662 /* If the contains_placeholder_bits field has been initialized,
3663 then we know the answer. */
3664 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3665 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3667 /* Indicate that we've seen this type node, and the answer is false.
3668 This is what we want to return if we run into recursion via fields. */
3669 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3671 /* Compute the real value. */
3672 result = type_contains_placeholder_1 (type);
3674 /* Store the real value. */
3675 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3677 return result;
3680 /* Push tree EXP onto vector QUEUE if it is not already present. */
3682 static void
3683 push_without_duplicates (tree exp, vec<tree> *queue)
3685 unsigned int i;
3686 tree iter;
3688 FOR_EACH_VEC_ELT (*queue, i, iter)
3689 if (simple_cst_equal (iter, exp) == 1)
3690 break;
3692 if (!iter)
3693 queue->safe_push (exp);
3696 /* Given a tree EXP, find all occurrences of references to fields
3697 in a PLACEHOLDER_EXPR and place them in vector REFS without
3698 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3699 we assume here that EXP contains only arithmetic expressions
3700 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3701 argument list. */
3703 void
3704 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3706 enum tree_code code = TREE_CODE (exp);
3707 tree inner;
3708 int i;
3710 /* We handle TREE_LIST and COMPONENT_REF separately. */
3711 if (code == TREE_LIST)
3713 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3714 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3716 else if (code == COMPONENT_REF)
3718 for (inner = TREE_OPERAND (exp, 0);
3719 REFERENCE_CLASS_P (inner);
3720 inner = TREE_OPERAND (inner, 0))
3723 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3724 push_without_duplicates (exp, refs);
3725 else
3726 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3728 else
3729 switch (TREE_CODE_CLASS (code))
3731 case tcc_constant:
3732 break;
3734 case tcc_declaration:
3735 /* Variables allocated to static storage can stay. */
3736 if (!TREE_STATIC (exp))
3737 push_without_duplicates (exp, refs);
3738 break;
3740 case tcc_expression:
3741 /* This is the pattern built in ada/make_aligning_type. */
3742 if (code == ADDR_EXPR
3743 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3745 push_without_duplicates (exp, refs);
3746 break;
3749 /* Fall through. */
3751 case tcc_exceptional:
3752 case tcc_unary:
3753 case tcc_binary:
3754 case tcc_comparison:
3755 case tcc_reference:
3756 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3757 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3758 break;
3760 case tcc_vl_exp:
3761 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3762 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3763 break;
3765 default:
3766 gcc_unreachable ();
3770 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3771 return a tree with all occurrences of references to F in a
3772 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3773 CONST_DECLs. Note that we assume here that EXP contains only
3774 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3775 occurring only in their argument list. */
3777 tree
3778 substitute_in_expr (tree exp, tree f, tree r)
3780 enum tree_code code = TREE_CODE (exp);
3781 tree op0, op1, op2, op3;
3782 tree new_tree;
3784 /* We handle TREE_LIST and COMPONENT_REF separately. */
3785 if (code == TREE_LIST)
3787 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3788 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3789 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3790 return exp;
3792 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3794 else if (code == COMPONENT_REF)
3796 tree inner;
3798 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3799 and it is the right field, replace it with R. */
3800 for (inner = TREE_OPERAND (exp, 0);
3801 REFERENCE_CLASS_P (inner);
3802 inner = TREE_OPERAND (inner, 0))
3805 /* The field. */
3806 op1 = TREE_OPERAND (exp, 1);
3808 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3809 return r;
3811 /* If this expression hasn't been completed let, leave it alone. */
3812 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3813 return exp;
3815 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3816 if (op0 == TREE_OPERAND (exp, 0))
3817 return exp;
3819 new_tree
3820 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3822 else
3823 switch (TREE_CODE_CLASS (code))
3825 case tcc_constant:
3826 return exp;
3828 case tcc_declaration:
3829 if (exp == f)
3830 return r;
3831 else
3832 return exp;
3834 case tcc_expression:
3835 if (exp == f)
3836 return r;
3838 /* Fall through. */
3840 case tcc_exceptional:
3841 case tcc_unary:
3842 case tcc_binary:
3843 case tcc_comparison:
3844 case tcc_reference:
3845 switch (TREE_CODE_LENGTH (code))
3847 case 0:
3848 return exp;
3850 case 1:
3851 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3852 if (op0 == TREE_OPERAND (exp, 0))
3853 return exp;
3855 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3856 break;
3858 case 2:
3859 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3860 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3862 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3863 return exp;
3865 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3866 break;
3868 case 3:
3869 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3870 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3871 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3873 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3874 && op2 == TREE_OPERAND (exp, 2))
3875 return exp;
3877 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3878 break;
3880 case 4:
3881 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3882 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3883 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3884 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3886 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3887 && op2 == TREE_OPERAND (exp, 2)
3888 && op3 == TREE_OPERAND (exp, 3))
3889 return exp;
3891 new_tree
3892 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3893 break;
3895 default:
3896 gcc_unreachable ();
3898 break;
3900 case tcc_vl_exp:
3902 int i;
3904 new_tree = NULL_TREE;
3906 /* If we are trying to replace F with a constant or with another
3907 instance of one of the arguments of the call, inline back
3908 functions which do nothing else than computing a value from
3909 the arguments they are passed. This makes it possible to
3910 fold partially or entirely the replacement expression. */
3911 if (code == CALL_EXPR)
3913 bool maybe_inline = false;
3914 if (CONSTANT_CLASS_P (r))
3915 maybe_inline = true;
3916 else
3917 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
3918 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
3920 maybe_inline = true;
3921 break;
3923 if (maybe_inline)
3925 tree t = maybe_inline_call_in_expr (exp);
3926 if (t)
3927 return SUBSTITUTE_IN_EXPR (t, f, r);
3931 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3933 tree op = TREE_OPERAND (exp, i);
3934 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3935 if (new_op != op)
3937 if (!new_tree)
3938 new_tree = copy_node (exp);
3939 TREE_OPERAND (new_tree, i) = new_op;
3943 if (new_tree)
3945 new_tree = fold (new_tree);
3946 if (TREE_CODE (new_tree) == CALL_EXPR)
3947 process_call_operands (new_tree);
3949 else
3950 return exp;
3952 break;
3954 default:
3955 gcc_unreachable ();
3958 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3960 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3961 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3963 return new_tree;
3966 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3967 for it within OBJ, a tree that is an object or a chain of references. */
3969 tree
3970 substitute_placeholder_in_expr (tree exp, tree obj)
3972 enum tree_code code = TREE_CODE (exp);
3973 tree op0, op1, op2, op3;
3974 tree new_tree;
3976 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3977 in the chain of OBJ. */
3978 if (code == PLACEHOLDER_EXPR)
3980 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3981 tree elt;
3983 for (elt = obj; elt != 0;
3984 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3985 || TREE_CODE (elt) == COND_EXPR)
3986 ? TREE_OPERAND (elt, 1)
3987 : (REFERENCE_CLASS_P (elt)
3988 || UNARY_CLASS_P (elt)
3989 || BINARY_CLASS_P (elt)
3990 || VL_EXP_CLASS_P (elt)
3991 || EXPRESSION_CLASS_P (elt))
3992 ? TREE_OPERAND (elt, 0) : 0))
3993 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3994 return elt;
3996 for (elt = obj; elt != 0;
3997 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3998 || TREE_CODE (elt) == COND_EXPR)
3999 ? TREE_OPERAND (elt, 1)
4000 : (REFERENCE_CLASS_P (elt)
4001 || UNARY_CLASS_P (elt)
4002 || BINARY_CLASS_P (elt)
4003 || VL_EXP_CLASS_P (elt)
4004 || EXPRESSION_CLASS_P (elt))
4005 ? TREE_OPERAND (elt, 0) : 0))
4006 if (POINTER_TYPE_P (TREE_TYPE (elt))
4007 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4008 == need_type))
4009 return fold_build1 (INDIRECT_REF, need_type, elt);
4011 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4012 survives until RTL generation, there will be an error. */
4013 return exp;
4016 /* TREE_LIST is special because we need to look at TREE_VALUE
4017 and TREE_CHAIN, not TREE_OPERANDS. */
4018 else if (code == TREE_LIST)
4020 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4021 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4022 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4023 return exp;
4025 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4027 else
4028 switch (TREE_CODE_CLASS (code))
4030 case tcc_constant:
4031 case tcc_declaration:
4032 return exp;
4034 case tcc_exceptional:
4035 case tcc_unary:
4036 case tcc_binary:
4037 case tcc_comparison:
4038 case tcc_expression:
4039 case tcc_reference:
4040 case tcc_statement:
4041 switch (TREE_CODE_LENGTH (code))
4043 case 0:
4044 return exp;
4046 case 1:
4047 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4048 if (op0 == TREE_OPERAND (exp, 0))
4049 return exp;
4051 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4052 break;
4054 case 2:
4055 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4056 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4058 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4059 return exp;
4061 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4062 break;
4064 case 3:
4065 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4066 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4067 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4069 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4070 && op2 == TREE_OPERAND (exp, 2))
4071 return exp;
4073 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4074 break;
4076 case 4:
4077 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4078 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4079 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4080 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4082 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4083 && op2 == TREE_OPERAND (exp, 2)
4084 && op3 == TREE_OPERAND (exp, 3))
4085 return exp;
4087 new_tree
4088 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4089 break;
4091 default:
4092 gcc_unreachable ();
4094 break;
4096 case tcc_vl_exp:
4098 int i;
4100 new_tree = NULL_TREE;
4102 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4104 tree op = TREE_OPERAND (exp, i);
4105 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4106 if (new_op != op)
4108 if (!new_tree)
4109 new_tree = copy_node (exp);
4110 TREE_OPERAND (new_tree, i) = new_op;
4114 if (new_tree)
4116 new_tree = fold (new_tree);
4117 if (TREE_CODE (new_tree) == CALL_EXPR)
4118 process_call_operands (new_tree);
4120 else
4121 return exp;
4123 break;
4125 default:
4126 gcc_unreachable ();
4129 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4131 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4132 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4134 return new_tree;
4138 /* Subroutine of stabilize_reference; this is called for subtrees of
4139 references. Any expression with side-effects must be put in a SAVE_EXPR
4140 to ensure that it is only evaluated once.
4142 We don't put SAVE_EXPR nodes around everything, because assigning very
4143 simple expressions to temporaries causes us to miss good opportunities
4144 for optimizations. Among other things, the opportunity to fold in the
4145 addition of a constant into an addressing mode often gets lost, e.g.
4146 "y[i+1] += x;". In general, we take the approach that we should not make
4147 an assignment unless we are forced into it - i.e., that any non-side effect
4148 operator should be allowed, and that cse should take care of coalescing
4149 multiple utterances of the same expression should that prove fruitful. */
4151 static tree
4152 stabilize_reference_1 (tree e)
4154 tree result;
4155 enum tree_code code = TREE_CODE (e);
4157 /* We cannot ignore const expressions because it might be a reference
4158 to a const array but whose index contains side-effects. But we can
4159 ignore things that are actual constant or that already have been
4160 handled by this function. */
4162 if (tree_invariant_p (e))
4163 return e;
4165 switch (TREE_CODE_CLASS (code))
4167 case tcc_exceptional:
4168 case tcc_type:
4169 case tcc_declaration:
4170 case tcc_comparison:
4171 case tcc_statement:
4172 case tcc_expression:
4173 case tcc_reference:
4174 case tcc_vl_exp:
4175 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4176 so that it will only be evaluated once. */
4177 /* The reference (r) and comparison (<) classes could be handled as
4178 below, but it is generally faster to only evaluate them once. */
4179 if (TREE_SIDE_EFFECTS (e))
4180 return save_expr (e);
4181 return e;
4183 case tcc_constant:
4184 /* Constants need no processing. In fact, we should never reach
4185 here. */
4186 return e;
4188 case tcc_binary:
4189 /* Division is slow and tends to be compiled with jumps,
4190 especially the division by powers of 2 that is often
4191 found inside of an array reference. So do it just once. */
4192 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4193 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4194 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4195 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4196 return save_expr (e);
4197 /* Recursively stabilize each operand. */
4198 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4199 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4200 break;
4202 case tcc_unary:
4203 /* Recursively stabilize each operand. */
4204 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4205 break;
4207 default:
4208 gcc_unreachable ();
4211 TREE_TYPE (result) = TREE_TYPE (e);
4212 TREE_READONLY (result) = TREE_READONLY (e);
4213 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4214 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4216 return result;
4219 /* Stabilize a reference so that we can use it any number of times
4220 without causing its operands to be evaluated more than once.
4221 Returns the stabilized reference. This works by means of save_expr,
4222 so see the caveats in the comments about save_expr.
4224 Also allows conversion expressions whose operands are references.
4225 Any other kind of expression is returned unchanged. */
4227 tree
4228 stabilize_reference (tree ref)
4230 tree result;
4231 enum tree_code code = TREE_CODE (ref);
4233 switch (code)
4235 case VAR_DECL:
4236 case PARM_DECL:
4237 case RESULT_DECL:
4238 /* No action is needed in this case. */
4239 return ref;
4241 CASE_CONVERT:
4242 case FLOAT_EXPR:
4243 case FIX_TRUNC_EXPR:
4244 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4245 break;
4247 case INDIRECT_REF:
4248 result = build_nt (INDIRECT_REF,
4249 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4250 break;
4252 case COMPONENT_REF:
4253 result = build_nt (COMPONENT_REF,
4254 stabilize_reference (TREE_OPERAND (ref, 0)),
4255 TREE_OPERAND (ref, 1), NULL_TREE);
4256 break;
4258 case BIT_FIELD_REF:
4259 result = build_nt (BIT_FIELD_REF,
4260 stabilize_reference (TREE_OPERAND (ref, 0)),
4261 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4262 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4263 break;
4265 case ARRAY_REF:
4266 result = build_nt (ARRAY_REF,
4267 stabilize_reference (TREE_OPERAND (ref, 0)),
4268 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4269 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4270 break;
4272 case ARRAY_RANGE_REF:
4273 result = build_nt (ARRAY_RANGE_REF,
4274 stabilize_reference (TREE_OPERAND (ref, 0)),
4275 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4276 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4277 break;
4279 case COMPOUND_EXPR:
4280 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4281 it wouldn't be ignored. This matters when dealing with
4282 volatiles. */
4283 return stabilize_reference_1 (ref);
4285 /* If arg isn't a kind of lvalue we recognize, make no change.
4286 Caller should recognize the error for an invalid lvalue. */
4287 default:
4288 return ref;
4290 case ERROR_MARK:
4291 return error_mark_node;
4294 TREE_TYPE (result) = TREE_TYPE (ref);
4295 TREE_READONLY (result) = TREE_READONLY (ref);
4296 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4297 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4299 return result;
4302 /* Low-level constructors for expressions. */
4304 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4305 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4307 void
4308 recompute_tree_invariant_for_addr_expr (tree t)
4310 tree node;
4311 bool tc = true, se = false;
4313 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4315 /* We started out assuming this address is both invariant and constant, but
4316 does not have side effects. Now go down any handled components and see if
4317 any of them involve offsets that are either non-constant or non-invariant.
4318 Also check for side-effects.
4320 ??? Note that this code makes no attempt to deal with the case where
4321 taking the address of something causes a copy due to misalignment. */
4323 #define UPDATE_FLAGS(NODE) \
4324 do { tree _node = (NODE); \
4325 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4326 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4328 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4329 node = TREE_OPERAND (node, 0))
4331 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4332 array reference (probably made temporarily by the G++ front end),
4333 so ignore all the operands. */
4334 if ((TREE_CODE (node) == ARRAY_REF
4335 || TREE_CODE (node) == ARRAY_RANGE_REF)
4336 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4338 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4339 if (TREE_OPERAND (node, 2))
4340 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4341 if (TREE_OPERAND (node, 3))
4342 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4344 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4345 FIELD_DECL, apparently. The G++ front end can put something else
4346 there, at least temporarily. */
4347 else if (TREE_CODE (node) == COMPONENT_REF
4348 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4350 if (TREE_OPERAND (node, 2))
4351 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4355 node = lang_hooks.expr_to_decl (node, &tc, &se);
4357 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4358 the address, since &(*a)->b is a form of addition. If it's a constant, the
4359 address is constant too. If it's a decl, its address is constant if the
4360 decl is static. Everything else is not constant and, furthermore,
4361 taking the address of a volatile variable is not volatile. */
4362 if (TREE_CODE (node) == INDIRECT_REF
4363 || TREE_CODE (node) == MEM_REF)
4364 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4365 else if (CONSTANT_CLASS_P (node))
4367 else if (DECL_P (node))
4368 tc &= (staticp (node) != NULL_TREE);
4369 else
4371 tc = false;
4372 se |= TREE_SIDE_EFFECTS (node);
4376 TREE_CONSTANT (t) = tc;
4377 TREE_SIDE_EFFECTS (t) = se;
4378 #undef UPDATE_FLAGS
4381 /* Build an expression of code CODE, data type TYPE, and operands as
4382 specified. Expressions and reference nodes can be created this way.
4383 Constants, decls, types and misc nodes cannot be.
4385 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4386 enough for all extant tree codes. */
4388 tree
4389 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4391 tree t;
4393 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4395 t = make_node (code PASS_MEM_STAT);
4396 TREE_TYPE (t) = tt;
4398 return t;
4401 tree
4402 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4404 int length = sizeof (struct tree_exp);
4405 tree t;
4407 record_node_allocation_statistics (code, length);
4409 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4411 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4413 memset (t, 0, sizeof (struct tree_common));
4415 TREE_SET_CODE (t, code);
4417 TREE_TYPE (t) = type;
4418 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4419 TREE_OPERAND (t, 0) = node;
4420 if (node && !TYPE_P (node))
4422 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4423 TREE_READONLY (t) = TREE_READONLY (node);
4426 if (TREE_CODE_CLASS (code) == tcc_statement)
4428 if (code != DEBUG_BEGIN_STMT)
4429 TREE_SIDE_EFFECTS (t) = 1;
4431 else switch (code)
4433 case VA_ARG_EXPR:
4434 /* All of these have side-effects, no matter what their
4435 operands are. */
4436 TREE_SIDE_EFFECTS (t) = 1;
4437 TREE_READONLY (t) = 0;
4438 break;
4440 case INDIRECT_REF:
4441 /* Whether a dereference is readonly has nothing to do with whether
4442 its operand is readonly. */
4443 TREE_READONLY (t) = 0;
4444 break;
4446 case ADDR_EXPR:
4447 if (node)
4448 recompute_tree_invariant_for_addr_expr (t);
4449 break;
4451 default:
4452 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4453 && node && !TYPE_P (node)
4454 && TREE_CONSTANT (node))
4455 TREE_CONSTANT (t) = 1;
4456 if (TREE_CODE_CLASS (code) == tcc_reference
4457 && node && TREE_THIS_VOLATILE (node))
4458 TREE_THIS_VOLATILE (t) = 1;
4459 break;
4462 return t;
4465 #define PROCESS_ARG(N) \
4466 do { \
4467 TREE_OPERAND (t, N) = arg##N; \
4468 if (arg##N &&!TYPE_P (arg##N)) \
4470 if (TREE_SIDE_EFFECTS (arg##N)) \
4471 side_effects = 1; \
4472 if (!TREE_READONLY (arg##N) \
4473 && !CONSTANT_CLASS_P (arg##N)) \
4474 (void) (read_only = 0); \
4475 if (!TREE_CONSTANT (arg##N)) \
4476 (void) (constant = 0); \
4478 } while (0)
4480 tree
4481 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4483 bool constant, read_only, side_effects, div_by_zero;
4484 tree t;
4486 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4488 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4489 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4490 /* When sizetype precision doesn't match that of pointers
4491 we need to be able to build explicit extensions or truncations
4492 of the offset argument. */
4493 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4494 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4495 && TREE_CODE (arg1) == INTEGER_CST);
4497 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4498 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4499 && ptrofftype_p (TREE_TYPE (arg1)));
4501 t = make_node (code PASS_MEM_STAT);
4502 TREE_TYPE (t) = tt;
4504 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4505 result based on those same flags for the arguments. But if the
4506 arguments aren't really even `tree' expressions, we shouldn't be trying
4507 to do this. */
4509 /* Expressions without side effects may be constant if their
4510 arguments are as well. */
4511 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4512 || TREE_CODE_CLASS (code) == tcc_binary);
4513 read_only = 1;
4514 side_effects = TREE_SIDE_EFFECTS (t);
4516 switch (code)
4518 case TRUNC_DIV_EXPR:
4519 case CEIL_DIV_EXPR:
4520 case FLOOR_DIV_EXPR:
4521 case ROUND_DIV_EXPR:
4522 case EXACT_DIV_EXPR:
4523 case CEIL_MOD_EXPR:
4524 case FLOOR_MOD_EXPR:
4525 case ROUND_MOD_EXPR:
4526 case TRUNC_MOD_EXPR:
4527 div_by_zero = integer_zerop (arg1);
4528 break;
4529 default:
4530 div_by_zero = false;
4533 PROCESS_ARG (0);
4534 PROCESS_ARG (1);
4536 TREE_SIDE_EFFECTS (t) = side_effects;
4537 if (code == MEM_REF)
4539 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4541 tree o = TREE_OPERAND (arg0, 0);
4542 TREE_READONLY (t) = TREE_READONLY (o);
4543 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4546 else
4548 TREE_READONLY (t) = read_only;
4549 /* Don't mark X / 0 as constant. */
4550 TREE_CONSTANT (t) = constant && !div_by_zero;
4551 TREE_THIS_VOLATILE (t)
4552 = (TREE_CODE_CLASS (code) == tcc_reference
4553 && arg0 && TREE_THIS_VOLATILE (arg0));
4556 return t;
4560 tree
4561 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
4562 tree arg2 MEM_STAT_DECL)
4564 bool constant, read_only, side_effects;
4565 tree t;
4567 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4568 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4570 t = make_node (code PASS_MEM_STAT);
4571 TREE_TYPE (t) = tt;
4573 read_only = 1;
4575 /* As a special exception, if COND_EXPR has NULL branches, we
4576 assume that it is a gimple statement and always consider
4577 it to have side effects. */
4578 if (code == COND_EXPR
4579 && tt == void_type_node
4580 && arg1 == NULL_TREE
4581 && arg2 == NULL_TREE)
4582 side_effects = true;
4583 else
4584 side_effects = TREE_SIDE_EFFECTS (t);
4586 PROCESS_ARG (0);
4587 PROCESS_ARG (1);
4588 PROCESS_ARG (2);
4590 if (code == COND_EXPR)
4591 TREE_READONLY (t) = read_only;
4593 TREE_SIDE_EFFECTS (t) = side_effects;
4594 TREE_THIS_VOLATILE (t)
4595 = (TREE_CODE_CLASS (code) == tcc_reference
4596 && arg0 && TREE_THIS_VOLATILE (arg0));
4598 return t;
4601 tree
4602 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
4603 tree arg2, tree arg3 MEM_STAT_DECL)
4605 bool constant, read_only, side_effects;
4606 tree t;
4608 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4610 t = make_node (code PASS_MEM_STAT);
4611 TREE_TYPE (t) = tt;
4613 side_effects = TREE_SIDE_EFFECTS (t);
4615 PROCESS_ARG (0);
4616 PROCESS_ARG (1);
4617 PROCESS_ARG (2);
4618 PROCESS_ARG (3);
4620 TREE_SIDE_EFFECTS (t) = side_effects;
4621 TREE_THIS_VOLATILE (t)
4622 = (TREE_CODE_CLASS (code) == tcc_reference
4623 && arg0 && TREE_THIS_VOLATILE (arg0));
4625 return t;
4628 tree
4629 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
4630 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4632 bool constant, read_only, side_effects;
4633 tree t;
4635 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4637 t = make_node (code PASS_MEM_STAT);
4638 TREE_TYPE (t) = tt;
4640 side_effects = TREE_SIDE_EFFECTS (t);
4642 PROCESS_ARG (0);
4643 PROCESS_ARG (1);
4644 PROCESS_ARG (2);
4645 PROCESS_ARG (3);
4646 PROCESS_ARG (4);
4648 TREE_SIDE_EFFECTS (t) = side_effects;
4649 if (code == TARGET_MEM_REF)
4651 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4653 tree o = TREE_OPERAND (arg0, 0);
4654 TREE_READONLY (t) = TREE_READONLY (o);
4655 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4658 else
4659 TREE_THIS_VOLATILE (t)
4660 = (TREE_CODE_CLASS (code) == tcc_reference
4661 && arg0 && TREE_THIS_VOLATILE (arg0));
4663 return t;
4666 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4667 on the pointer PTR. */
4669 tree
4670 build_simple_mem_ref_loc (location_t loc, tree ptr)
4672 HOST_WIDE_INT offset = 0;
4673 tree ptype = TREE_TYPE (ptr);
4674 tree tem;
4675 /* For convenience allow addresses that collapse to a simple base
4676 and offset. */
4677 if (TREE_CODE (ptr) == ADDR_EXPR
4678 && (handled_component_p (TREE_OPERAND (ptr, 0))
4679 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4681 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4682 gcc_assert (ptr);
4683 if (TREE_CODE (ptr) == MEM_REF)
4685 offset += mem_ref_offset (ptr).to_short_addr ();
4686 ptr = TREE_OPERAND (ptr, 0);
4688 else
4689 ptr = build_fold_addr_expr (ptr);
4690 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4692 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4693 ptr, build_int_cst (ptype, offset));
4694 SET_EXPR_LOCATION (tem, loc);
4695 return tem;
4698 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4700 offset_int
4701 mem_ref_offset (const_tree t)
4703 return offset_int::from (wi::to_wide (TREE_OPERAND (t, 1)), SIGNED);
4706 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4707 offsetted by OFFSET units. */
4709 tree
4710 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4712 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4713 build_fold_addr_expr (base),
4714 build_int_cst (ptr_type_node, offset));
4715 tree addr = build1 (ADDR_EXPR, type, ref);
4716 recompute_tree_invariant_for_addr_expr (addr);
4717 return addr;
4720 /* Similar except don't specify the TREE_TYPE
4721 and leave the TREE_SIDE_EFFECTS as 0.
4722 It is permissible for arguments to be null,
4723 or even garbage if their values do not matter. */
4725 tree
4726 build_nt (enum tree_code code, ...)
4728 tree t;
4729 int length;
4730 int i;
4731 va_list p;
4733 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4735 va_start (p, code);
4737 t = make_node (code);
4738 length = TREE_CODE_LENGTH (code);
4740 for (i = 0; i < length; i++)
4741 TREE_OPERAND (t, i) = va_arg (p, tree);
4743 va_end (p);
4744 return t;
4747 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4748 tree vec. */
4750 tree
4751 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4753 tree ret, t;
4754 unsigned int ix;
4756 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4757 CALL_EXPR_FN (ret) = fn;
4758 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4759 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4760 CALL_EXPR_ARG (ret, ix) = t;
4761 return ret;
4764 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4765 We do NOT enter this node in any sort of symbol table.
4767 LOC is the location of the decl.
4769 layout_decl is used to set up the decl's storage layout.
4770 Other slots are initialized to 0 or null pointers. */
4772 tree
4773 build_decl (location_t loc, enum tree_code code, tree name,
4774 tree type MEM_STAT_DECL)
4776 tree t;
4778 t = make_node (code PASS_MEM_STAT);
4779 DECL_SOURCE_LOCATION (t) = loc;
4781 /* if (type == error_mark_node)
4782 type = integer_type_node; */
4783 /* That is not done, deliberately, so that having error_mark_node
4784 as the type can suppress useless errors in the use of this variable. */
4786 DECL_NAME (t) = name;
4787 TREE_TYPE (t) = type;
4789 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4790 layout_decl (t, 0);
4792 return t;
4795 /* Builds and returns function declaration with NAME and TYPE. */
4797 tree
4798 build_fn_decl (const char *name, tree type)
4800 tree id = get_identifier (name);
4801 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4803 DECL_EXTERNAL (decl) = 1;
4804 TREE_PUBLIC (decl) = 1;
4805 DECL_ARTIFICIAL (decl) = 1;
4806 TREE_NOTHROW (decl) = 1;
4808 return decl;
4811 vec<tree, va_gc> *all_translation_units;
4813 /* Builds a new translation-unit decl with name NAME, queues it in the
4814 global list of translation-unit decls and returns it. */
4816 tree
4817 build_translation_unit_decl (tree name)
4819 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4820 name, NULL_TREE);
4821 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4822 vec_safe_push (all_translation_units, tu);
4823 return tu;
4827 /* BLOCK nodes are used to represent the structure of binding contours
4828 and declarations, once those contours have been exited and their contents
4829 compiled. This information is used for outputting debugging info. */
4831 tree
4832 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4834 tree block = make_node (BLOCK);
4836 BLOCK_VARS (block) = vars;
4837 BLOCK_SUBBLOCKS (block) = subblocks;
4838 BLOCK_SUPERCONTEXT (block) = supercontext;
4839 BLOCK_CHAIN (block) = chain;
4840 return block;
4844 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4846 LOC is the location to use in tree T. */
4848 void
4849 protected_set_expr_location (tree t, location_t loc)
4851 if (CAN_HAVE_LOCATION_P (t))
4852 SET_EXPR_LOCATION (t, loc);
4855 /* Reset the expression *EXPR_P, a size or position.
4857 ??? We could reset all non-constant sizes or positions. But it's cheap
4858 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4860 We need to reset self-referential sizes or positions because they cannot
4861 be gimplified and thus can contain a CALL_EXPR after the gimplification
4862 is finished, which will run afoul of LTO streaming. And they need to be
4863 reset to something essentially dummy but not constant, so as to preserve
4864 the properties of the object they are attached to. */
4866 static inline void
4867 free_lang_data_in_one_sizepos (tree *expr_p)
4869 tree expr = *expr_p;
4870 if (CONTAINS_PLACEHOLDER_P (expr))
4871 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4875 /* Reset all the fields in a binfo node BINFO. We only keep
4876 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4878 static void
4879 free_lang_data_in_binfo (tree binfo)
4881 unsigned i;
4882 tree t;
4884 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4886 BINFO_VIRTUALS (binfo) = NULL_TREE;
4887 BINFO_BASE_ACCESSES (binfo) = NULL;
4888 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4889 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4891 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4892 free_lang_data_in_binfo (t);
4896 /* Reset all language specific information still present in TYPE. */
4898 static void
4899 free_lang_data_in_type (tree type)
4901 gcc_assert (TYPE_P (type));
4903 /* Give the FE a chance to remove its own data first. */
4904 lang_hooks.free_lang_data (type);
4906 TREE_LANG_FLAG_0 (type) = 0;
4907 TREE_LANG_FLAG_1 (type) = 0;
4908 TREE_LANG_FLAG_2 (type) = 0;
4909 TREE_LANG_FLAG_3 (type) = 0;
4910 TREE_LANG_FLAG_4 (type) = 0;
4911 TREE_LANG_FLAG_5 (type) = 0;
4912 TREE_LANG_FLAG_6 (type) = 0;
4914 if (TREE_CODE (type) == FUNCTION_TYPE)
4916 /* Remove the const and volatile qualifiers from arguments. The
4917 C++ front end removes them, but the C front end does not,
4918 leading to false ODR violation errors when merging two
4919 instances of the same function signature compiled by
4920 different front ends. */
4921 for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4923 tree arg_type = TREE_VALUE (p);
4925 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
4927 int quals = TYPE_QUALS (arg_type)
4928 & ~TYPE_QUAL_CONST
4929 & ~TYPE_QUAL_VOLATILE;
4930 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
4931 free_lang_data_in_type (TREE_VALUE (p));
4933 /* C++ FE uses TREE_PURPOSE to store initial values. */
4934 TREE_PURPOSE (p) = NULL;
4937 else if (TREE_CODE (type) == METHOD_TYPE)
4938 for (tree p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4939 /* C++ FE uses TREE_PURPOSE to store initial values. */
4940 TREE_PURPOSE (p) = NULL;
4941 else if (RECORD_OR_UNION_TYPE_P (type))
4943 /* Remove members that are not FIELD_DECLs (and maybe
4944 TYPE_DECLs) from the field list of an aggregate. These occur
4945 in C++. */
4946 for (tree *prev = &TYPE_FIELDS (type), member; (member = *prev);)
4947 if (TREE_CODE (member) == FIELD_DECL
4948 || (TREE_CODE (member) == TYPE_DECL
4949 && !DECL_IGNORED_P (member)
4950 && debug_info_level > DINFO_LEVEL_TERSE
4951 && !is_redundant_typedef (member)))
4952 prev = &DECL_CHAIN (member);
4953 else
4954 *prev = DECL_CHAIN (member);
4956 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
4957 and danagle the pointer from time to time. */
4958 if (TYPE_VFIELD (type) && TREE_CODE (TYPE_VFIELD (type)) != FIELD_DECL)
4959 TYPE_VFIELD (type) = NULL_TREE;
4961 if (TYPE_BINFO (type))
4963 free_lang_data_in_binfo (TYPE_BINFO (type));
4964 /* We need to preserve link to bases and virtual table for all
4965 polymorphic types to make devirtualization machinery working.
4966 Debug output cares only about bases, but output also
4967 virtual table pointers so merging of -fdevirtualize and
4968 -fno-devirtualize units is easier. */
4969 if ((!BINFO_VTABLE (TYPE_BINFO (type))
4970 || !flag_devirtualize)
4971 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
4972 && !BINFO_VTABLE (TYPE_BINFO (type)))
4973 || debug_info_level != DINFO_LEVEL_NONE))
4974 TYPE_BINFO (type) = NULL;
4977 else if (INTEGRAL_TYPE_P (type)
4978 || SCALAR_FLOAT_TYPE_P (type)
4979 || FIXED_POINT_TYPE_P (type))
4981 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
4982 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
4985 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
4987 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
4988 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
4990 if (TYPE_CONTEXT (type)
4991 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
4993 tree ctx = TYPE_CONTEXT (type);
4996 ctx = BLOCK_SUPERCONTEXT (ctx);
4998 while (ctx && TREE_CODE (ctx) == BLOCK);
4999 TYPE_CONTEXT (type) = ctx;
5004 /* Return true if DECL may need an assembler name to be set. */
5006 static inline bool
5007 need_assembler_name_p (tree decl)
5009 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5010 Rule merging. This makes type_odr_p to return true on those types during
5011 LTO and by comparing the mangled name, we can say what types are intended
5012 to be equivalent across compilation unit.
5014 We do not store names of type_in_anonymous_namespace_p.
5016 Record, union and enumeration type have linkage that allows use
5017 to check type_in_anonymous_namespace_p. We do not mangle compound types
5018 that always can be compared structurally.
5020 Similarly for builtin types, we compare properties of their main variant.
5021 A special case are integer types where mangling do make differences
5022 between char/signed char/unsigned char etc. Storing name for these makes
5023 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5024 See cp/mangle.c:write_builtin_type for details. */
5026 if (flag_lto_odr_type_mering
5027 && TREE_CODE (decl) == TYPE_DECL
5028 && DECL_NAME (decl)
5029 && decl == TYPE_NAME (TREE_TYPE (decl))
5030 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
5031 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5032 && (type_with_linkage_p (TREE_TYPE (decl))
5033 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5034 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5035 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5036 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5037 if (!VAR_OR_FUNCTION_DECL_P (decl))
5038 return false;
5040 /* If DECL already has its assembler name set, it does not need a
5041 new one. */
5042 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5043 || DECL_ASSEMBLER_NAME_SET_P (decl))
5044 return false;
5046 /* Abstract decls do not need an assembler name. */
5047 if (DECL_ABSTRACT_P (decl))
5048 return false;
5050 /* For VAR_DECLs, only static, public and external symbols need an
5051 assembler name. */
5052 if (VAR_P (decl)
5053 && !TREE_STATIC (decl)
5054 && !TREE_PUBLIC (decl)
5055 && !DECL_EXTERNAL (decl))
5056 return false;
5058 if (TREE_CODE (decl) == FUNCTION_DECL)
5060 /* Do not set assembler name on builtins. Allow RTL expansion to
5061 decide whether to expand inline or via a regular call. */
5062 if (DECL_BUILT_IN (decl)
5063 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5064 return false;
5066 /* Functions represented in the callgraph need an assembler name. */
5067 if (cgraph_node::get (decl) != NULL)
5068 return true;
5070 /* Unused and not public functions don't need an assembler name. */
5071 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5072 return false;
5075 return true;
5079 /* Reset all language specific information still present in symbol
5080 DECL. */
5082 static void
5083 free_lang_data_in_decl (tree decl)
5085 gcc_assert (DECL_P (decl));
5087 /* Give the FE a chance to remove its own data first. */
5088 lang_hooks.free_lang_data (decl);
5090 TREE_LANG_FLAG_0 (decl) = 0;
5091 TREE_LANG_FLAG_1 (decl) = 0;
5092 TREE_LANG_FLAG_2 (decl) = 0;
5093 TREE_LANG_FLAG_3 (decl) = 0;
5094 TREE_LANG_FLAG_4 (decl) = 0;
5095 TREE_LANG_FLAG_5 (decl) = 0;
5096 TREE_LANG_FLAG_6 (decl) = 0;
5098 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5099 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5100 if (TREE_CODE (decl) == FIELD_DECL)
5102 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5103 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5104 DECL_QUALIFIER (decl) = NULL_TREE;
5107 if (TREE_CODE (decl) == FUNCTION_DECL)
5109 struct cgraph_node *node;
5110 if (!(node = cgraph_node::get (decl))
5111 || (!node->definition && !node->clones))
5113 if (node)
5114 node->release_body ();
5115 else
5117 release_function_body (decl);
5118 DECL_ARGUMENTS (decl) = NULL;
5119 DECL_RESULT (decl) = NULL;
5120 DECL_INITIAL (decl) = error_mark_node;
5123 if (gimple_has_body_p (decl) || (node && node->thunk.thunk_p))
5125 tree t;
5127 /* If DECL has a gimple body, then the context for its
5128 arguments must be DECL. Otherwise, it doesn't really
5129 matter, as we will not be emitting any code for DECL. In
5130 general, there may be other instances of DECL created by
5131 the front end and since PARM_DECLs are generally shared,
5132 their DECL_CONTEXT changes as the replicas of DECL are
5133 created. The only time where DECL_CONTEXT is important
5134 is for the FUNCTION_DECLs that have a gimple body (since
5135 the PARM_DECL will be used in the function's body). */
5136 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5137 DECL_CONTEXT (t) = decl;
5138 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5139 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5140 = target_option_default_node;
5141 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5142 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5143 = optimization_default_node;
5146 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5147 At this point, it is not needed anymore. */
5148 DECL_SAVED_TREE (decl) = NULL_TREE;
5150 /* Clear the abstract origin if it refers to a method.
5151 Otherwise dwarf2out.c will ICE as we splice functions out of
5152 TYPE_FIELDS and thus the origin will not be output
5153 correctly. */
5154 if (DECL_ABSTRACT_ORIGIN (decl)
5155 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5156 && RECORD_OR_UNION_TYPE_P
5157 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5158 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5160 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5161 DECL_VINDEX referring to itself into a vtable slot number as it
5162 should. Happens with functions that are copied and then forgotten
5163 about. Just clear it, it won't matter anymore. */
5164 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5165 DECL_VINDEX (decl) = NULL_TREE;
5167 else if (VAR_P (decl))
5169 if ((DECL_EXTERNAL (decl)
5170 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5171 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5172 DECL_INITIAL (decl) = NULL_TREE;
5174 else if (TREE_CODE (decl) == TYPE_DECL)
5176 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5177 DECL_VISIBILITY_SPECIFIED (decl) = 0;
5178 DECL_INITIAL (decl) = NULL_TREE;
5180 else if (TREE_CODE (decl) == FIELD_DECL)
5181 DECL_INITIAL (decl) = NULL_TREE;
5182 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5183 && DECL_INITIAL (decl)
5184 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5186 /* Strip builtins from the translation-unit BLOCK. We still have targets
5187 without builtin_decl_explicit support and also builtins are shared
5188 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5189 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5190 while (*nextp)
5192 tree var = *nextp;
5193 if (TREE_CODE (var) == FUNCTION_DECL
5194 && DECL_BUILT_IN (var))
5195 *nextp = TREE_CHAIN (var);
5196 else
5197 nextp = &TREE_CHAIN (var);
5203 /* Data used when collecting DECLs and TYPEs for language data removal. */
5205 struct free_lang_data_d
5207 free_lang_data_d () : decls (100), types (100) {}
5209 /* Worklist to avoid excessive recursion. */
5210 auto_vec<tree> worklist;
5212 /* Set of traversed objects. Used to avoid duplicate visits. */
5213 hash_set<tree> pset;
5215 /* Array of symbols to process with free_lang_data_in_decl. */
5216 auto_vec<tree> decls;
5218 /* Array of types to process with free_lang_data_in_type. */
5219 auto_vec<tree> types;
5223 /* Save all language fields needed to generate proper debug information
5224 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5226 static void
5227 save_debug_info_for_decl (tree t)
5229 /*struct saved_debug_info_d *sdi;*/
5231 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5233 /* FIXME. Partial implementation for saving debug info removed. */
5237 /* Save all language fields needed to generate proper debug information
5238 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5240 static void
5241 save_debug_info_for_type (tree t)
5243 /*struct saved_debug_info_d *sdi;*/
5245 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5247 /* FIXME. Partial implementation for saving debug info removed. */
5251 /* Add type or decl T to one of the list of tree nodes that need their
5252 language data removed. The lists are held inside FLD. */
5254 static void
5255 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5257 if (DECL_P (t))
5259 fld->decls.safe_push (t);
5260 if (debug_info_level > DINFO_LEVEL_TERSE)
5261 save_debug_info_for_decl (t);
5263 else if (TYPE_P (t))
5265 fld->types.safe_push (t);
5266 if (debug_info_level > DINFO_LEVEL_TERSE)
5267 save_debug_info_for_type (t);
5269 else
5270 gcc_unreachable ();
5273 /* Push tree node T into FLD->WORKLIST. */
5275 static inline void
5276 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5278 if (t && !is_lang_specific (t) && !fld->pset.contains (t))
5279 fld->worklist.safe_push ((t));
5283 /* Operand callback helper for free_lang_data_in_node. *TP is the
5284 subtree operand being considered. */
5286 static tree
5287 find_decls_types_r (tree *tp, int *ws, void *data)
5289 tree t = *tp;
5290 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5292 if (TREE_CODE (t) == TREE_LIST)
5293 return NULL_TREE;
5295 /* Language specific nodes will be removed, so there is no need
5296 to gather anything under them. */
5297 if (is_lang_specific (t))
5299 *ws = 0;
5300 return NULL_TREE;
5303 if (DECL_P (t))
5305 /* Note that walk_tree does not traverse every possible field in
5306 decls, so we have to do our own traversals here. */
5307 add_tree_to_fld_list (t, fld);
5309 fld_worklist_push (DECL_NAME (t), fld);
5310 fld_worklist_push (DECL_CONTEXT (t), fld);
5311 fld_worklist_push (DECL_SIZE (t), fld);
5312 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5314 /* We are going to remove everything under DECL_INITIAL for
5315 TYPE_DECLs. No point walking them. */
5316 if (TREE_CODE (t) != TYPE_DECL)
5317 fld_worklist_push (DECL_INITIAL (t), fld);
5319 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5320 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5322 if (TREE_CODE (t) == FUNCTION_DECL)
5324 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5325 fld_worklist_push (DECL_RESULT (t), fld);
5327 else if (TREE_CODE (t) == TYPE_DECL)
5329 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5331 else if (TREE_CODE (t) == FIELD_DECL)
5333 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5334 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5335 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5336 fld_worklist_push (DECL_FCONTEXT (t), fld);
5339 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL)
5340 && DECL_HAS_VALUE_EXPR_P (t))
5341 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5343 if (TREE_CODE (t) != FIELD_DECL
5344 && TREE_CODE (t) != TYPE_DECL)
5345 fld_worklist_push (TREE_CHAIN (t), fld);
5346 *ws = 0;
5348 else if (TYPE_P (t))
5350 /* Note that walk_tree does not traverse every possible field in
5351 types, so we have to do our own traversals here. */
5352 add_tree_to_fld_list (t, fld);
5354 if (!RECORD_OR_UNION_TYPE_P (t))
5355 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5356 fld_worklist_push (TYPE_SIZE (t), fld);
5357 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5358 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5359 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5360 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5361 fld_worklist_push (TYPE_NAME (t), fld);
5362 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5363 them and thus do not and want not to reach unused pointer types
5364 this way. */
5365 if (!POINTER_TYPE_P (t))
5366 fld_worklist_push (TYPE_MIN_VALUE_RAW (t), fld);
5367 /* TYPE_MAX_VALUE_RAW is TYPE_BINFO for record types. */
5368 if (!RECORD_OR_UNION_TYPE_P (t))
5369 fld_worklist_push (TYPE_MAX_VALUE_RAW (t), fld);
5370 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5371 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5372 do not and want not to reach unused variants this way. */
5373 if (TYPE_CONTEXT (t))
5375 tree ctx = TYPE_CONTEXT (t);
5376 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5377 So push that instead. */
5378 while (ctx && TREE_CODE (ctx) == BLOCK)
5379 ctx = BLOCK_SUPERCONTEXT (ctx);
5380 fld_worklist_push (ctx, fld);
5382 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5383 and want not to reach unused types this way. */
5385 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5387 unsigned i;
5388 tree tem;
5389 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5390 fld_worklist_push (TREE_TYPE (tem), fld);
5391 fld_worklist_push (BINFO_VIRTUALS (TYPE_BINFO (t)), fld);
5393 if (RECORD_OR_UNION_TYPE_P (t))
5395 tree tem;
5396 /* Push all TYPE_FIELDS - there can be interleaving interesting
5397 and non-interesting things. */
5398 tem = TYPE_FIELDS (t);
5399 while (tem)
5401 if (TREE_CODE (tem) == FIELD_DECL
5402 || (TREE_CODE (tem) == TYPE_DECL
5403 && !DECL_IGNORED_P (tem)
5404 && debug_info_level > DINFO_LEVEL_TERSE
5405 && !is_redundant_typedef (tem)))
5406 fld_worklist_push (tem, fld);
5407 tem = TREE_CHAIN (tem);
5411 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5412 *ws = 0;
5414 else if (TREE_CODE (t) == BLOCK)
5416 tree tem;
5417 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5418 fld_worklist_push (tem, fld);
5419 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5420 fld_worklist_push (tem, fld);
5421 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5424 if (TREE_CODE (t) != IDENTIFIER_NODE
5425 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5426 fld_worklist_push (TREE_TYPE (t), fld);
5428 return NULL_TREE;
5432 /* Find decls and types in T. */
5434 static void
5435 find_decls_types (tree t, struct free_lang_data_d *fld)
5437 while (1)
5439 if (!fld->pset.contains (t))
5440 walk_tree (&t, find_decls_types_r, fld, &fld->pset);
5441 if (fld->worklist.is_empty ())
5442 break;
5443 t = fld->worklist.pop ();
5447 /* Translate all the types in LIST with the corresponding runtime
5448 types. */
5450 static tree
5451 get_eh_types_for_runtime (tree list)
5453 tree head, prev;
5455 if (list == NULL_TREE)
5456 return NULL_TREE;
5458 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5459 prev = head;
5460 list = TREE_CHAIN (list);
5461 while (list)
5463 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5464 TREE_CHAIN (prev) = n;
5465 prev = TREE_CHAIN (prev);
5466 list = TREE_CHAIN (list);
5469 return head;
5473 /* Find decls and types referenced in EH region R and store them in
5474 FLD->DECLS and FLD->TYPES. */
5476 static void
5477 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5479 switch (r->type)
5481 case ERT_CLEANUP:
5482 break;
5484 case ERT_TRY:
5486 eh_catch c;
5488 /* The types referenced in each catch must first be changed to the
5489 EH types used at runtime. This removes references to FE types
5490 in the region. */
5491 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5493 c->type_list = get_eh_types_for_runtime (c->type_list);
5494 walk_tree (&c->type_list, find_decls_types_r, fld, &fld->pset);
5497 break;
5499 case ERT_ALLOWED_EXCEPTIONS:
5500 r->u.allowed.type_list
5501 = get_eh_types_for_runtime (r->u.allowed.type_list);
5502 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, &fld->pset);
5503 break;
5505 case ERT_MUST_NOT_THROW:
5506 walk_tree (&r->u.must_not_throw.failure_decl,
5507 find_decls_types_r, fld, &fld->pset);
5508 break;
5513 /* Find decls and types referenced in cgraph node N and store them in
5514 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5515 look for *every* kind of DECL and TYPE node reachable from N,
5516 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5517 NAMESPACE_DECLs, etc). */
5519 static void
5520 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5522 basic_block bb;
5523 struct function *fn;
5524 unsigned ix;
5525 tree t;
5527 find_decls_types (n->decl, fld);
5529 if (!gimple_has_body_p (n->decl))
5530 return;
5532 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5534 fn = DECL_STRUCT_FUNCTION (n->decl);
5536 /* Traverse locals. */
5537 FOR_EACH_LOCAL_DECL (fn, ix, t)
5538 find_decls_types (t, fld);
5540 /* Traverse EH regions in FN. */
5542 eh_region r;
5543 FOR_ALL_EH_REGION_FN (r, fn)
5544 find_decls_types_in_eh_region (r, fld);
5547 /* Traverse every statement in FN. */
5548 FOR_EACH_BB_FN (bb, fn)
5550 gphi_iterator psi;
5551 gimple_stmt_iterator si;
5552 unsigned i;
5554 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5556 gphi *phi = psi.phi ();
5558 for (i = 0; i < gimple_phi_num_args (phi); i++)
5560 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5561 find_decls_types (*arg_p, fld);
5565 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5567 gimple *stmt = gsi_stmt (si);
5569 if (is_gimple_call (stmt))
5570 find_decls_types (gimple_call_fntype (stmt), fld);
5572 for (i = 0; i < gimple_num_ops (stmt); i++)
5574 tree arg = gimple_op (stmt, i);
5575 find_decls_types (arg, fld);
5582 /* Find decls and types referenced in varpool node N and store them in
5583 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5584 look for *every* kind of DECL and TYPE node reachable from N,
5585 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5586 NAMESPACE_DECLs, etc). */
5588 static void
5589 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5591 find_decls_types (v->decl, fld);
5594 /* If T needs an assembler name, have one created for it. */
5596 void
5597 assign_assembler_name_if_needed (tree t)
5599 if (need_assembler_name_p (t))
5601 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5602 diagnostics that use input_location to show locus
5603 information. The problem here is that, at this point,
5604 input_location is generally anchored to the end of the file
5605 (since the parser is long gone), so we don't have a good
5606 position to pin it to.
5608 To alleviate this problem, this uses the location of T's
5609 declaration. Examples of this are
5610 testsuite/g++.dg/template/cond2.C and
5611 testsuite/g++.dg/template/pr35240.C. */
5612 location_t saved_location = input_location;
5613 input_location = DECL_SOURCE_LOCATION (t);
5615 decl_assembler_name (t);
5617 input_location = saved_location;
5622 /* Free language specific information for every operand and expression
5623 in every node of the call graph. This process operates in three stages:
5625 1- Every callgraph node and varpool node is traversed looking for
5626 decls and types embedded in them. This is a more exhaustive
5627 search than that done by find_referenced_vars, because it will
5628 also collect individual fields, decls embedded in types, etc.
5630 2- All the decls found are sent to free_lang_data_in_decl.
5632 3- All the types found are sent to free_lang_data_in_type.
5634 The ordering between decls and types is important because
5635 free_lang_data_in_decl sets assembler names, which includes
5636 mangling. So types cannot be freed up until assembler names have
5637 been set up. */
5639 static void
5640 free_lang_data_in_cgraph (void)
5642 struct cgraph_node *n;
5643 varpool_node *v;
5644 struct free_lang_data_d fld;
5645 tree t;
5646 unsigned i;
5647 alias_pair *p;
5649 /* Find decls and types in the body of every function in the callgraph. */
5650 FOR_EACH_FUNCTION (n)
5651 find_decls_types_in_node (n, &fld);
5653 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5654 find_decls_types (p->decl, &fld);
5656 /* Find decls and types in every varpool symbol. */
5657 FOR_EACH_VARIABLE (v)
5658 find_decls_types_in_var (v, &fld);
5660 /* Set the assembler name on every decl found. We need to do this
5661 now because free_lang_data_in_decl will invalidate data needed
5662 for mangling. This breaks mangling on interdependent decls. */
5663 FOR_EACH_VEC_ELT (fld.decls, i, t)
5664 assign_assembler_name_if_needed (t);
5666 /* Traverse every decl found freeing its language data. */
5667 FOR_EACH_VEC_ELT (fld.decls, i, t)
5668 free_lang_data_in_decl (t);
5670 /* Traverse every type found freeing its language data. */
5671 FOR_EACH_VEC_ELT (fld.types, i, t)
5672 free_lang_data_in_type (t);
5673 if (flag_checking)
5675 FOR_EACH_VEC_ELT (fld.types, i, t)
5676 verify_type (t);
5681 /* Free resources that are used by FE but are not needed once they are done. */
5683 static unsigned
5684 free_lang_data (void)
5686 unsigned i;
5688 /* If we are the LTO frontend we have freed lang-specific data already. */
5689 if (in_lto_p
5690 || (!flag_generate_lto && !flag_generate_offload))
5691 return 0;
5693 /* Provide a dummy TRANSLATION_UNIT_DECL if the FE failed to provide one. */
5694 if (vec_safe_is_empty (all_translation_units))
5695 build_translation_unit_decl (NULL_TREE);
5697 /* Allocate and assign alias sets to the standard integer types
5698 while the slots are still in the way the frontends generated them. */
5699 for (i = 0; i < itk_none; ++i)
5700 if (integer_types[i])
5701 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5703 /* Traverse the IL resetting language specific information for
5704 operands, expressions, etc. */
5705 free_lang_data_in_cgraph ();
5707 /* Create gimple variants for common types. */
5708 for (unsigned i = 0;
5709 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
5710 ++i)
5711 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
5713 /* Reset some langhooks. Do not reset types_compatible_p, it may
5714 still be used indirectly via the get_alias_set langhook. */
5715 lang_hooks.dwarf_name = lhd_dwarf_name;
5716 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5717 lang_hooks.gimplify_expr = lhd_gimplify_expr;
5719 /* We do not want the default decl_assembler_name implementation,
5720 rather if we have fixed everything we want a wrapper around it
5721 asserting that all non-local symbols already got their assembler
5722 name and only produce assembler names for local symbols. Or rather
5723 make sure we never call decl_assembler_name on local symbols and
5724 devise a separate, middle-end private scheme for it. */
5726 /* Reset diagnostic machinery. */
5727 tree_diagnostics_defaults (global_dc);
5729 return 0;
5733 namespace {
5735 const pass_data pass_data_ipa_free_lang_data =
5737 SIMPLE_IPA_PASS, /* type */
5738 "*free_lang_data", /* name */
5739 OPTGROUP_NONE, /* optinfo_flags */
5740 TV_IPA_FREE_LANG_DATA, /* tv_id */
5741 0, /* properties_required */
5742 0, /* properties_provided */
5743 0, /* properties_destroyed */
5744 0, /* todo_flags_start */
5745 0, /* todo_flags_finish */
5748 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5750 public:
5751 pass_ipa_free_lang_data (gcc::context *ctxt)
5752 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5755 /* opt_pass methods: */
5756 virtual unsigned int execute (function *) { return free_lang_data (); }
5758 }; // class pass_ipa_free_lang_data
5760 } // anon namespace
5762 simple_ipa_opt_pass *
5763 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5765 return new pass_ipa_free_lang_data (ctxt);
5768 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5769 of the various TYPE_QUAL values. */
5771 static void
5772 set_type_quals (tree type, int type_quals)
5774 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
5775 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
5776 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
5777 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
5778 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
5781 /* Returns true iff CAND and BASE have equivalent language-specific
5782 qualifiers. */
5784 bool
5785 check_lang_type (const_tree cand, const_tree base)
5787 if (lang_hooks.types.type_hash_eq == NULL)
5788 return true;
5789 /* type_hash_eq currently only applies to these types. */
5790 if (TREE_CODE (cand) != FUNCTION_TYPE
5791 && TREE_CODE (cand) != METHOD_TYPE)
5792 return true;
5793 return lang_hooks.types.type_hash_eq (cand, base);
5796 /* Returns true iff unqualified CAND and BASE are equivalent. */
5798 bool
5799 check_base_type (const_tree cand, const_tree base)
5801 return (TYPE_NAME (cand) == TYPE_NAME (base)
5802 /* Apparently this is needed for Objective-C. */
5803 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5804 /* Check alignment. */
5805 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
5806 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5807 TYPE_ATTRIBUTES (base)));
5810 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5812 bool
5813 check_qualified_type (const_tree cand, const_tree base, int type_quals)
5815 return (TYPE_QUALS (cand) == type_quals
5816 && check_base_type (cand, base)
5817 && check_lang_type (cand, base));
5820 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
5822 static bool
5823 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
5825 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
5826 && TYPE_NAME (cand) == TYPE_NAME (base)
5827 /* Apparently this is needed for Objective-C. */
5828 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5829 /* Check alignment. */
5830 && TYPE_ALIGN (cand) == align
5831 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5832 TYPE_ATTRIBUTES (base))
5833 && check_lang_type (cand, base));
5836 /* This function checks to see if TYPE matches the size one of the built-in
5837 atomic types, and returns that core atomic type. */
5839 static tree
5840 find_atomic_core_type (tree type)
5842 tree base_atomic_type;
5844 /* Only handle complete types. */
5845 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
5846 return NULL_TREE;
5848 switch (tree_to_uhwi (TYPE_SIZE (type)))
5850 case 8:
5851 base_atomic_type = atomicQI_type_node;
5852 break;
5854 case 16:
5855 base_atomic_type = atomicHI_type_node;
5856 break;
5858 case 32:
5859 base_atomic_type = atomicSI_type_node;
5860 break;
5862 case 64:
5863 base_atomic_type = atomicDI_type_node;
5864 break;
5866 case 128:
5867 base_atomic_type = atomicTI_type_node;
5868 break;
5870 default:
5871 base_atomic_type = NULL_TREE;
5874 return base_atomic_type;
5877 /* Return a version of the TYPE, qualified as indicated by the
5878 TYPE_QUALS, if one exists. If no qualified version exists yet,
5879 return NULL_TREE. */
5881 tree
5882 get_qualified_type (tree type, int type_quals)
5884 tree t;
5886 if (TYPE_QUALS (type) == type_quals)
5887 return type;
5889 /* Search the chain of variants to see if there is already one there just
5890 like the one we need to have. If so, use that existing one. We must
5891 preserve the TYPE_NAME, since there is code that depends on this. */
5892 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5893 if (check_qualified_type (t, type, type_quals))
5894 return t;
5896 return NULL_TREE;
5899 /* Like get_qualified_type, but creates the type if it does not
5900 exist. This function never returns NULL_TREE. */
5902 tree
5903 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
5905 tree t;
5907 /* See if we already have the appropriate qualified variant. */
5908 t = get_qualified_type (type, type_quals);
5910 /* If not, build it. */
5911 if (!t)
5913 t = build_variant_type_copy (type PASS_MEM_STAT);
5914 set_type_quals (t, type_quals);
5916 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
5918 /* See if this object can map to a basic atomic type. */
5919 tree atomic_type = find_atomic_core_type (type);
5920 if (atomic_type)
5922 /* Ensure the alignment of this type is compatible with
5923 the required alignment of the atomic type. */
5924 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
5925 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
5929 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5930 /* Propagate structural equality. */
5931 SET_TYPE_STRUCTURAL_EQUALITY (t);
5932 else if (TYPE_CANONICAL (type) != type)
5933 /* Build the underlying canonical type, since it is different
5934 from TYPE. */
5936 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
5937 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
5939 else
5940 /* T is its own canonical type. */
5941 TYPE_CANONICAL (t) = t;
5945 return t;
5948 /* Create a variant of type T with alignment ALIGN. */
5950 tree
5951 build_aligned_type (tree type, unsigned int align)
5953 tree t;
5955 if (TYPE_PACKED (type)
5956 || TYPE_ALIGN (type) == align)
5957 return type;
5959 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5960 if (check_aligned_type (t, type, align))
5961 return t;
5963 t = build_variant_type_copy (type);
5964 SET_TYPE_ALIGN (t, align);
5965 TYPE_USER_ALIGN (t) = 1;
5967 return t;
5970 /* Create a new distinct copy of TYPE. The new type is made its own
5971 MAIN_VARIANT. If TYPE requires structural equality checks, the
5972 resulting type requires structural equality checks; otherwise, its
5973 TYPE_CANONICAL points to itself. */
5975 tree
5976 build_distinct_type_copy (tree type MEM_STAT_DECL)
5978 tree t = copy_node (type PASS_MEM_STAT);
5980 TYPE_POINTER_TO (t) = 0;
5981 TYPE_REFERENCE_TO (t) = 0;
5983 /* Set the canonical type either to a new equivalence class, or
5984 propagate the need for structural equality checks. */
5985 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5986 SET_TYPE_STRUCTURAL_EQUALITY (t);
5987 else
5988 TYPE_CANONICAL (t) = t;
5990 /* Make it its own variant. */
5991 TYPE_MAIN_VARIANT (t) = t;
5992 TYPE_NEXT_VARIANT (t) = 0;
5994 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
5995 whose TREE_TYPE is not t. This can also happen in the Ada
5996 frontend when using subtypes. */
5998 return t;
6001 /* Create a new variant of TYPE, equivalent but distinct. This is so
6002 the caller can modify it. TYPE_CANONICAL for the return type will
6003 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6004 are considered equal by the language itself (or that both types
6005 require structural equality checks). */
6007 tree
6008 build_variant_type_copy (tree type MEM_STAT_DECL)
6010 tree t, m = TYPE_MAIN_VARIANT (type);
6012 t = build_distinct_type_copy (type PASS_MEM_STAT);
6014 /* Since we're building a variant, assume that it is a non-semantic
6015 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6016 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6017 /* Type variants have no alias set defined. */
6018 TYPE_ALIAS_SET (t) = -1;
6020 /* Add the new type to the chain of variants of TYPE. */
6021 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6022 TYPE_NEXT_VARIANT (m) = t;
6023 TYPE_MAIN_VARIANT (t) = m;
6025 return t;
6028 /* Return true if the from tree in both tree maps are equal. */
6031 tree_map_base_eq (const void *va, const void *vb)
6033 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6034 *const b = (const struct tree_map_base *) vb;
6035 return (a->from == b->from);
6038 /* Hash a from tree in a tree_base_map. */
6040 unsigned int
6041 tree_map_base_hash (const void *item)
6043 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6046 /* Return true if this tree map structure is marked for garbage collection
6047 purposes. We simply return true if the from tree is marked, so that this
6048 structure goes away when the from tree goes away. */
6051 tree_map_base_marked_p (const void *p)
6053 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6056 /* Hash a from tree in a tree_map. */
6058 unsigned int
6059 tree_map_hash (const void *item)
6061 return (((const struct tree_map *) item)->hash);
6064 /* Hash a from tree in a tree_decl_map. */
6066 unsigned int
6067 tree_decl_map_hash (const void *item)
6069 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6072 /* Return the initialization priority for DECL. */
6074 priority_type
6075 decl_init_priority_lookup (tree decl)
6077 symtab_node *snode = symtab_node::get (decl);
6079 if (!snode)
6080 return DEFAULT_INIT_PRIORITY;
6081 return
6082 snode->get_init_priority ();
6085 /* Return the finalization priority for DECL. */
6087 priority_type
6088 decl_fini_priority_lookup (tree decl)
6090 cgraph_node *node = cgraph_node::get (decl);
6092 if (!node)
6093 return DEFAULT_INIT_PRIORITY;
6094 return
6095 node->get_fini_priority ();
6098 /* Set the initialization priority for DECL to PRIORITY. */
6100 void
6101 decl_init_priority_insert (tree decl, priority_type priority)
6103 struct symtab_node *snode;
6105 if (priority == DEFAULT_INIT_PRIORITY)
6107 snode = symtab_node::get (decl);
6108 if (!snode)
6109 return;
6111 else if (VAR_P (decl))
6112 snode = varpool_node::get_create (decl);
6113 else
6114 snode = cgraph_node::get_create (decl);
6115 snode->set_init_priority (priority);
6118 /* Set the finalization priority for DECL to PRIORITY. */
6120 void
6121 decl_fini_priority_insert (tree decl, priority_type priority)
6123 struct cgraph_node *node;
6125 if (priority == DEFAULT_INIT_PRIORITY)
6127 node = cgraph_node::get (decl);
6128 if (!node)
6129 return;
6131 else
6132 node = cgraph_node::get_create (decl);
6133 node->set_fini_priority (priority);
6136 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6138 static void
6139 print_debug_expr_statistics (void)
6141 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6142 (long) debug_expr_for_decl->size (),
6143 (long) debug_expr_for_decl->elements (),
6144 debug_expr_for_decl->collisions ());
6147 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6149 static void
6150 print_value_expr_statistics (void)
6152 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6153 (long) value_expr_for_decl->size (),
6154 (long) value_expr_for_decl->elements (),
6155 value_expr_for_decl->collisions ());
6158 /* Lookup a debug expression for FROM, and return it if we find one. */
6160 tree
6161 decl_debug_expr_lookup (tree from)
6163 struct tree_decl_map *h, in;
6164 in.base.from = from;
6166 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6167 if (h)
6168 return h->to;
6169 return NULL_TREE;
6172 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6174 void
6175 decl_debug_expr_insert (tree from, tree to)
6177 struct tree_decl_map *h;
6179 h = ggc_alloc<tree_decl_map> ();
6180 h->base.from = from;
6181 h->to = to;
6182 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6185 /* Lookup a value expression for FROM, and return it if we find one. */
6187 tree
6188 decl_value_expr_lookup (tree from)
6190 struct tree_decl_map *h, in;
6191 in.base.from = from;
6193 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6194 if (h)
6195 return h->to;
6196 return NULL_TREE;
6199 /* Insert a mapping FROM->TO in the value expression hashtable. */
6201 void
6202 decl_value_expr_insert (tree from, tree to)
6204 struct tree_decl_map *h;
6206 h = ggc_alloc<tree_decl_map> ();
6207 h->base.from = from;
6208 h->to = to;
6209 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6212 /* Lookup a vector of debug arguments for FROM, and return it if we
6213 find one. */
6215 vec<tree, va_gc> **
6216 decl_debug_args_lookup (tree from)
6218 struct tree_vec_map *h, in;
6220 if (!DECL_HAS_DEBUG_ARGS_P (from))
6221 return NULL;
6222 gcc_checking_assert (debug_args_for_decl != NULL);
6223 in.base.from = from;
6224 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6225 if (h)
6226 return &h->to;
6227 return NULL;
6230 /* Insert a mapping FROM->empty vector of debug arguments in the value
6231 expression hashtable. */
6233 vec<tree, va_gc> **
6234 decl_debug_args_insert (tree from)
6236 struct tree_vec_map *h;
6237 tree_vec_map **loc;
6239 if (DECL_HAS_DEBUG_ARGS_P (from))
6240 return decl_debug_args_lookup (from);
6241 if (debug_args_for_decl == NULL)
6242 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6243 h = ggc_alloc<tree_vec_map> ();
6244 h->base.from = from;
6245 h->to = NULL;
6246 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6247 *loc = h;
6248 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6249 return &h->to;
6252 /* Hashing of types so that we don't make duplicates.
6253 The entry point is `type_hash_canon'. */
6255 /* Generate the default hash code for TYPE. This is designed for
6256 speed, rather than maximum entropy. */
6258 hashval_t
6259 type_hash_canon_hash (tree type)
6261 inchash::hash hstate;
6263 hstate.add_int (TREE_CODE (type));
6265 if (TREE_TYPE (type))
6266 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
6268 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
6269 /* Just the identifier is adequate to distinguish. */
6270 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
6272 switch (TREE_CODE (type))
6274 case METHOD_TYPE:
6275 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
6276 /* FALLTHROUGH. */
6277 case FUNCTION_TYPE:
6278 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6279 if (TREE_VALUE (t) != error_mark_node)
6280 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
6281 break;
6283 case OFFSET_TYPE:
6284 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
6285 break;
6287 case ARRAY_TYPE:
6289 if (TYPE_DOMAIN (type))
6290 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
6291 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
6293 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
6294 hstate.add_object (typeless);
6297 break;
6299 case INTEGER_TYPE:
6301 tree t = TYPE_MAX_VALUE (type);
6302 if (!t)
6303 t = TYPE_MIN_VALUE (type);
6304 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
6305 hstate.add_object (TREE_INT_CST_ELT (t, i));
6306 break;
6309 case REAL_TYPE:
6310 case FIXED_POINT_TYPE:
6312 unsigned prec = TYPE_PRECISION (type);
6313 hstate.add_object (prec);
6314 break;
6317 case VECTOR_TYPE:
6319 unsigned nunits = TYPE_VECTOR_SUBPARTS (type);
6320 hstate.add_object (nunits);
6321 break;
6324 default:
6325 break;
6328 return hstate.end ();
6331 /* These are the Hashtable callback functions. */
6333 /* Returns true iff the types are equivalent. */
6335 bool
6336 type_cache_hasher::equal (type_hash *a, type_hash *b)
6338 /* First test the things that are the same for all types. */
6339 if (a->hash != b->hash
6340 || TREE_CODE (a->type) != TREE_CODE (b->type)
6341 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6342 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6343 TYPE_ATTRIBUTES (b->type))
6344 || (TREE_CODE (a->type) != COMPLEX_TYPE
6345 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6346 return 0;
6348 /* Be careful about comparing arrays before and after the element type
6349 has been completed; don't compare TYPE_ALIGN unless both types are
6350 complete. */
6351 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6352 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6353 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6354 return 0;
6356 switch (TREE_CODE (a->type))
6358 case VOID_TYPE:
6359 case COMPLEX_TYPE:
6360 case POINTER_TYPE:
6361 case REFERENCE_TYPE:
6362 case NULLPTR_TYPE:
6363 return 1;
6365 case VECTOR_TYPE:
6366 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6368 case ENUMERAL_TYPE:
6369 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6370 && !(TYPE_VALUES (a->type)
6371 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6372 && TYPE_VALUES (b->type)
6373 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6374 && type_list_equal (TYPE_VALUES (a->type),
6375 TYPE_VALUES (b->type))))
6376 return 0;
6378 /* fall through */
6380 case INTEGER_TYPE:
6381 case REAL_TYPE:
6382 case BOOLEAN_TYPE:
6383 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6384 return false;
6385 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6386 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6387 TYPE_MAX_VALUE (b->type)))
6388 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6389 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6390 TYPE_MIN_VALUE (b->type))));
6392 case FIXED_POINT_TYPE:
6393 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6395 case OFFSET_TYPE:
6396 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6398 case METHOD_TYPE:
6399 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6400 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6401 || (TYPE_ARG_TYPES (a->type)
6402 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6403 && TYPE_ARG_TYPES (b->type)
6404 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6405 && type_list_equal (TYPE_ARG_TYPES (a->type),
6406 TYPE_ARG_TYPES (b->type)))))
6407 break;
6408 return 0;
6409 case ARRAY_TYPE:
6410 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6411 where the flag should be inherited from the element type
6412 and can change after ARRAY_TYPEs are created; on non-aggregates
6413 compare it and hash it, scalars will never have that flag set
6414 and we need to differentiate between arrays created by different
6415 front-ends or middle-end created arrays. */
6416 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
6417 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
6418 || (TYPE_TYPELESS_STORAGE (a->type)
6419 == TYPE_TYPELESS_STORAGE (b->type))));
6421 case RECORD_TYPE:
6422 case UNION_TYPE:
6423 case QUAL_UNION_TYPE:
6424 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6425 || (TYPE_FIELDS (a->type)
6426 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6427 && TYPE_FIELDS (b->type)
6428 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6429 && type_list_equal (TYPE_FIELDS (a->type),
6430 TYPE_FIELDS (b->type))));
6432 case FUNCTION_TYPE:
6433 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6434 || (TYPE_ARG_TYPES (a->type)
6435 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6436 && TYPE_ARG_TYPES (b->type)
6437 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6438 && type_list_equal (TYPE_ARG_TYPES (a->type),
6439 TYPE_ARG_TYPES (b->type))))
6440 break;
6441 return 0;
6443 default:
6444 return 0;
6447 if (lang_hooks.types.type_hash_eq != NULL)
6448 return lang_hooks.types.type_hash_eq (a->type, b->type);
6450 return 1;
6453 /* Given TYPE, and HASHCODE its hash code, return the canonical
6454 object for an identical type if one already exists.
6455 Otherwise, return TYPE, and record it as the canonical object.
6457 To use this function, first create a type of the sort you want.
6458 Then compute its hash code from the fields of the type that
6459 make it different from other similar types.
6460 Then call this function and use the value. */
6462 tree
6463 type_hash_canon (unsigned int hashcode, tree type)
6465 type_hash in;
6466 type_hash **loc;
6468 /* The hash table only contains main variants, so ensure that's what we're
6469 being passed. */
6470 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6472 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6473 must call that routine before comparing TYPE_ALIGNs. */
6474 layout_type (type);
6476 in.hash = hashcode;
6477 in.type = type;
6479 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6480 if (*loc)
6482 tree t1 = ((type_hash *) *loc)->type;
6483 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
6484 if (TYPE_UID (type) + 1 == next_type_uid)
6485 --next_type_uid;
6486 /* Free also min/max values and the cache for integer
6487 types. This can't be done in free_node, as LTO frees
6488 those on its own. */
6489 if (TREE_CODE (type) == INTEGER_TYPE)
6491 if (TYPE_MIN_VALUE (type)
6492 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
6494 /* Zero is always in TYPE_CACHED_VALUES. */
6495 if (! TYPE_UNSIGNED (type))
6496 int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
6497 ggc_free (TYPE_MIN_VALUE (type));
6499 if (TYPE_MAX_VALUE (type)
6500 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
6502 int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
6503 ggc_free (TYPE_MAX_VALUE (type));
6505 if (TYPE_CACHED_VALUES_P (type))
6506 ggc_free (TYPE_CACHED_VALUES (type));
6508 free_node (type);
6509 return t1;
6511 else
6513 struct type_hash *h;
6515 h = ggc_alloc<type_hash> ();
6516 h->hash = hashcode;
6517 h->type = type;
6518 *loc = h;
6520 return type;
6524 static void
6525 print_type_hash_statistics (void)
6527 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6528 (long) type_hash_table->size (),
6529 (long) type_hash_table->elements (),
6530 type_hash_table->collisions ());
6533 /* Given two lists of types
6534 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6535 return 1 if the lists contain the same types in the same order.
6536 Also, the TREE_PURPOSEs must match. */
6539 type_list_equal (const_tree l1, const_tree l2)
6541 const_tree t1, t2;
6543 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6544 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6545 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6546 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6547 && (TREE_TYPE (TREE_PURPOSE (t1))
6548 == TREE_TYPE (TREE_PURPOSE (t2))))))
6549 return 0;
6551 return t1 == t2;
6554 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6555 given by TYPE. If the argument list accepts variable arguments,
6556 then this function counts only the ordinary arguments. */
6559 type_num_arguments (const_tree type)
6561 int i = 0;
6562 tree t;
6564 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6565 /* If the function does not take a variable number of arguments,
6566 the last element in the list will have type `void'. */
6567 if (VOID_TYPE_P (TREE_VALUE (t)))
6568 break;
6569 else
6570 ++i;
6572 return i;
6575 /* Nonzero if integer constants T1 and T2
6576 represent the same constant value. */
6579 tree_int_cst_equal (const_tree t1, const_tree t2)
6581 if (t1 == t2)
6582 return 1;
6584 if (t1 == 0 || t2 == 0)
6585 return 0;
6587 if (TREE_CODE (t1) == INTEGER_CST
6588 && TREE_CODE (t2) == INTEGER_CST
6589 && wi::to_widest (t1) == wi::to_widest (t2))
6590 return 1;
6592 return 0;
6595 /* Return true if T is an INTEGER_CST whose numerical value (extended
6596 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6598 bool
6599 tree_fits_shwi_p (const_tree t)
6601 return (t != NULL_TREE
6602 && TREE_CODE (t) == INTEGER_CST
6603 && wi::fits_shwi_p (wi::to_widest (t)));
6606 /* Return true if T is an INTEGER_CST whose numerical value (extended
6607 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6609 bool
6610 tree_fits_uhwi_p (const_tree t)
6612 return (t != NULL_TREE
6613 && TREE_CODE (t) == INTEGER_CST
6614 && wi::fits_uhwi_p (wi::to_widest (t)));
6617 /* T is an INTEGER_CST whose numerical value (extended according to
6618 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6619 HOST_WIDE_INT. */
6621 HOST_WIDE_INT
6622 tree_to_shwi (const_tree t)
6624 gcc_assert (tree_fits_shwi_p (t));
6625 return TREE_INT_CST_LOW (t);
6628 /* T is an INTEGER_CST whose numerical value (extended according to
6629 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6630 HOST_WIDE_INT. */
6632 unsigned HOST_WIDE_INT
6633 tree_to_uhwi (const_tree t)
6635 gcc_assert (tree_fits_uhwi_p (t));
6636 return TREE_INT_CST_LOW (t);
6639 /* Return the most significant (sign) bit of T. */
6642 tree_int_cst_sign_bit (const_tree t)
6644 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
6646 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
6649 /* Return an indication of the sign of the integer constant T.
6650 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6651 Note that -1 will never be returned if T's type is unsigned. */
6654 tree_int_cst_sgn (const_tree t)
6656 if (wi::to_wide (t) == 0)
6657 return 0;
6658 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
6659 return 1;
6660 else if (wi::neg_p (wi::to_wide (t)))
6661 return -1;
6662 else
6663 return 1;
6666 /* Return the minimum number of bits needed to represent VALUE in a
6667 signed or unsigned type, UNSIGNEDP says which. */
6669 unsigned int
6670 tree_int_cst_min_precision (tree value, signop sgn)
6672 /* If the value is negative, compute its negative minus 1. The latter
6673 adjustment is because the absolute value of the largest negative value
6674 is one larger than the largest positive value. This is equivalent to
6675 a bit-wise negation, so use that operation instead. */
6677 if (tree_int_cst_sgn (value) < 0)
6678 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
6680 /* Return the number of bits needed, taking into account the fact
6681 that we need one more bit for a signed than unsigned type.
6682 If value is 0 or -1, the minimum precision is 1 no matter
6683 whether unsignedp is true or false. */
6685 if (integer_zerop (value))
6686 return 1;
6687 else
6688 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
6691 /* Return truthvalue of whether T1 is the same tree structure as T2.
6692 Return 1 if they are the same.
6693 Return 0 if they are understandably different.
6694 Return -1 if either contains tree structure not understood by
6695 this function. */
6698 simple_cst_equal (const_tree t1, const_tree t2)
6700 enum tree_code code1, code2;
6701 int cmp;
6702 int i;
6704 if (t1 == t2)
6705 return 1;
6706 if (t1 == 0 || t2 == 0)
6707 return 0;
6709 code1 = TREE_CODE (t1);
6710 code2 = TREE_CODE (t2);
6712 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
6714 if (CONVERT_EXPR_CODE_P (code2)
6715 || code2 == NON_LVALUE_EXPR)
6716 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6717 else
6718 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
6721 else if (CONVERT_EXPR_CODE_P (code2)
6722 || code2 == NON_LVALUE_EXPR)
6723 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
6725 if (code1 != code2)
6726 return 0;
6728 switch (code1)
6730 case INTEGER_CST:
6731 return wi::to_widest (t1) == wi::to_widest (t2);
6733 case REAL_CST:
6734 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
6736 case FIXED_CST:
6737 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
6739 case STRING_CST:
6740 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
6741 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
6742 TREE_STRING_LENGTH (t1)));
6744 case CONSTRUCTOR:
6746 unsigned HOST_WIDE_INT idx;
6747 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
6748 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
6750 if (vec_safe_length (v1) != vec_safe_length (v2))
6751 return false;
6753 for (idx = 0; idx < vec_safe_length (v1); ++idx)
6754 /* ??? Should we handle also fields here? */
6755 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
6756 return false;
6757 return true;
6760 case SAVE_EXPR:
6761 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6763 case CALL_EXPR:
6764 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
6765 if (cmp <= 0)
6766 return cmp;
6767 if (call_expr_nargs (t1) != call_expr_nargs (t2))
6768 return 0;
6770 const_tree arg1, arg2;
6771 const_call_expr_arg_iterator iter1, iter2;
6772 for (arg1 = first_const_call_expr_arg (t1, &iter1),
6773 arg2 = first_const_call_expr_arg (t2, &iter2);
6774 arg1 && arg2;
6775 arg1 = next_const_call_expr_arg (&iter1),
6776 arg2 = next_const_call_expr_arg (&iter2))
6778 cmp = simple_cst_equal (arg1, arg2);
6779 if (cmp <= 0)
6780 return cmp;
6782 return arg1 == arg2;
6785 case TARGET_EXPR:
6786 /* Special case: if either target is an unallocated VAR_DECL,
6787 it means that it's going to be unified with whatever the
6788 TARGET_EXPR is really supposed to initialize, so treat it
6789 as being equivalent to anything. */
6790 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
6791 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
6792 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
6793 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
6794 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
6795 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
6796 cmp = 1;
6797 else
6798 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6800 if (cmp <= 0)
6801 return cmp;
6803 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
6805 case WITH_CLEANUP_EXPR:
6806 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6807 if (cmp <= 0)
6808 return cmp;
6810 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
6812 case COMPONENT_REF:
6813 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
6814 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6816 return 0;
6818 case VAR_DECL:
6819 case PARM_DECL:
6820 case CONST_DECL:
6821 case FUNCTION_DECL:
6822 return 0;
6824 default:
6825 break;
6828 /* This general rule works for most tree codes. All exceptions should be
6829 handled above. If this is a language-specific tree code, we can't
6830 trust what might be in the operand, so say we don't know
6831 the situation. */
6832 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
6833 return -1;
6835 switch (TREE_CODE_CLASS (code1))
6837 case tcc_unary:
6838 case tcc_binary:
6839 case tcc_comparison:
6840 case tcc_expression:
6841 case tcc_reference:
6842 case tcc_statement:
6843 cmp = 1;
6844 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
6846 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
6847 if (cmp <= 0)
6848 return cmp;
6851 return cmp;
6853 default:
6854 return -1;
6858 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
6859 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
6860 than U, respectively. */
6863 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
6865 if (tree_int_cst_sgn (t) < 0)
6866 return -1;
6867 else if (!tree_fits_uhwi_p (t))
6868 return 1;
6869 else if (TREE_INT_CST_LOW (t) == u)
6870 return 0;
6871 else if (TREE_INT_CST_LOW (t) < u)
6872 return -1;
6873 else
6874 return 1;
6877 /* Return true if SIZE represents a constant size that is in bounds of
6878 what the middle-end and the backend accepts (covering not more than
6879 half of the address-space). */
6881 bool
6882 valid_constant_size_p (const_tree size)
6884 if (! tree_fits_uhwi_p (size)
6885 || TREE_OVERFLOW (size)
6886 || tree_int_cst_sign_bit (size) != 0)
6887 return false;
6888 return true;
6891 /* Return the precision of the type, or for a complex or vector type the
6892 precision of the type of its elements. */
6894 unsigned int
6895 element_precision (const_tree type)
6897 if (!TYPE_P (type))
6898 type = TREE_TYPE (type);
6899 enum tree_code code = TREE_CODE (type);
6900 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
6901 type = TREE_TYPE (type);
6903 return TYPE_PRECISION (type);
6906 /* Return true if CODE represents an associative tree code. Otherwise
6907 return false. */
6908 bool
6909 associative_tree_code (enum tree_code code)
6911 switch (code)
6913 case BIT_IOR_EXPR:
6914 case BIT_AND_EXPR:
6915 case BIT_XOR_EXPR:
6916 case PLUS_EXPR:
6917 case MULT_EXPR:
6918 case MIN_EXPR:
6919 case MAX_EXPR:
6920 return true;
6922 default:
6923 break;
6925 return false;
6928 /* Return true if CODE represents a commutative tree code. Otherwise
6929 return false. */
6930 bool
6931 commutative_tree_code (enum tree_code code)
6933 switch (code)
6935 case PLUS_EXPR:
6936 case MULT_EXPR:
6937 case MULT_HIGHPART_EXPR:
6938 case MIN_EXPR:
6939 case MAX_EXPR:
6940 case BIT_IOR_EXPR:
6941 case BIT_XOR_EXPR:
6942 case BIT_AND_EXPR:
6943 case NE_EXPR:
6944 case EQ_EXPR:
6945 case UNORDERED_EXPR:
6946 case ORDERED_EXPR:
6947 case UNEQ_EXPR:
6948 case LTGT_EXPR:
6949 case TRUTH_AND_EXPR:
6950 case TRUTH_XOR_EXPR:
6951 case TRUTH_OR_EXPR:
6952 case WIDEN_MULT_EXPR:
6953 case VEC_WIDEN_MULT_HI_EXPR:
6954 case VEC_WIDEN_MULT_LO_EXPR:
6955 case VEC_WIDEN_MULT_EVEN_EXPR:
6956 case VEC_WIDEN_MULT_ODD_EXPR:
6957 return true;
6959 default:
6960 break;
6962 return false;
6965 /* Return true if CODE represents a ternary tree code for which the
6966 first two operands are commutative. Otherwise return false. */
6967 bool
6968 commutative_ternary_tree_code (enum tree_code code)
6970 switch (code)
6972 case WIDEN_MULT_PLUS_EXPR:
6973 case WIDEN_MULT_MINUS_EXPR:
6974 case DOT_PROD_EXPR:
6975 case FMA_EXPR:
6976 return true;
6978 default:
6979 break;
6981 return false;
6984 /* Returns true if CODE can overflow. */
6986 bool
6987 operation_can_overflow (enum tree_code code)
6989 switch (code)
6991 case PLUS_EXPR:
6992 case MINUS_EXPR:
6993 case MULT_EXPR:
6994 case LSHIFT_EXPR:
6995 /* Can overflow in various ways. */
6996 return true;
6997 case TRUNC_DIV_EXPR:
6998 case EXACT_DIV_EXPR:
6999 case FLOOR_DIV_EXPR:
7000 case CEIL_DIV_EXPR:
7001 /* For INT_MIN / -1. */
7002 return true;
7003 case NEGATE_EXPR:
7004 case ABS_EXPR:
7005 /* For -INT_MIN. */
7006 return true;
7007 default:
7008 /* These operators cannot overflow. */
7009 return false;
7013 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7014 ftrapv doesn't generate trapping insns for CODE. */
7016 bool
7017 operation_no_trapping_overflow (tree type, enum tree_code code)
7019 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7021 /* We don't generate instructions that trap on overflow for complex or vector
7022 types. */
7023 if (!INTEGRAL_TYPE_P (type))
7024 return true;
7026 if (!TYPE_OVERFLOW_TRAPS (type))
7027 return true;
7029 switch (code)
7031 case PLUS_EXPR:
7032 case MINUS_EXPR:
7033 case MULT_EXPR:
7034 case NEGATE_EXPR:
7035 case ABS_EXPR:
7036 /* These operators can overflow, and -ftrapv generates trapping code for
7037 these. */
7038 return false;
7039 case TRUNC_DIV_EXPR:
7040 case EXACT_DIV_EXPR:
7041 case FLOOR_DIV_EXPR:
7042 case CEIL_DIV_EXPR:
7043 case LSHIFT_EXPR:
7044 /* These operators can overflow, but -ftrapv does not generate trapping
7045 code for these. */
7046 return true;
7047 default:
7048 /* These operators cannot overflow. */
7049 return true;
7053 namespace inchash
7056 /* Generate a hash value for an expression. This can be used iteratively
7057 by passing a previous result as the HSTATE argument.
7059 This function is intended to produce the same hash for expressions which
7060 would compare equal using operand_equal_p. */
7061 void
7062 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
7064 int i;
7065 enum tree_code code;
7066 enum tree_code_class tclass;
7068 if (t == NULL_TREE || t == error_mark_node)
7070 hstate.merge_hash (0);
7071 return;
7074 if (!(flags & OEP_ADDRESS_OF))
7075 STRIP_NOPS (t);
7077 code = TREE_CODE (t);
7079 switch (code)
7081 /* Alas, constants aren't shared, so we can't rely on pointer
7082 identity. */
7083 case VOID_CST:
7084 hstate.merge_hash (0);
7085 return;
7086 case INTEGER_CST:
7087 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7088 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
7089 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
7090 return;
7091 case REAL_CST:
7093 unsigned int val2;
7094 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
7095 val2 = rvc_zero;
7096 else
7097 val2 = real_hash (TREE_REAL_CST_PTR (t));
7098 hstate.merge_hash (val2);
7099 return;
7101 case FIXED_CST:
7103 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7104 hstate.merge_hash (val2);
7105 return;
7107 case STRING_CST:
7108 hstate.add ((const void *) TREE_STRING_POINTER (t),
7109 TREE_STRING_LENGTH (t));
7110 return;
7111 case COMPLEX_CST:
7112 inchash::add_expr (TREE_REALPART (t), hstate, flags);
7113 inchash::add_expr (TREE_IMAGPART (t), hstate, flags);
7114 return;
7115 case VECTOR_CST:
7117 hstate.add_int (VECTOR_CST_NPATTERNS (t));
7118 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
7119 unsigned int count = vector_cst_encoded_nelts (t);
7120 for (unsigned int i = 0; i < count; ++i)
7121 inchash::add_expr (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
7122 return;
7124 case SSA_NAME:
7125 /* We can just compare by pointer. */
7126 hstate.add_hwi (SSA_NAME_VERSION (t));
7127 return;
7128 case PLACEHOLDER_EXPR:
7129 /* The node itself doesn't matter. */
7130 return;
7131 case BLOCK:
7132 case OMP_CLAUSE:
7133 /* Ignore. */
7134 return;
7135 case TREE_LIST:
7136 /* A list of expressions, for a CALL_EXPR or as the elements of a
7137 VECTOR_CST. */
7138 for (; t; t = TREE_CHAIN (t))
7139 inchash::add_expr (TREE_VALUE (t), hstate, flags);
7140 return;
7141 case CONSTRUCTOR:
7143 unsigned HOST_WIDE_INT idx;
7144 tree field, value;
7145 flags &= ~OEP_ADDRESS_OF;
7146 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7148 inchash::add_expr (field, hstate, flags);
7149 inchash::add_expr (value, hstate, flags);
7151 return;
7153 case STATEMENT_LIST:
7155 tree_stmt_iterator i;
7156 for (i = tsi_start (CONST_CAST_TREE (t));
7157 !tsi_end_p (i); tsi_next (&i))
7158 inchash::add_expr (tsi_stmt (i), hstate, flags);
7159 return;
7161 case TREE_VEC:
7162 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
7163 inchash::add_expr (TREE_VEC_ELT (t, i), hstate, flags);
7164 return;
7165 case FUNCTION_DECL:
7166 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7167 Otherwise nodes that compare equal according to operand_equal_p might
7168 get different hash codes. However, don't do this for machine specific
7169 or front end builtins, since the function code is overloaded in those
7170 cases. */
7171 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7172 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7174 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7175 code = TREE_CODE (t);
7177 /* FALL THROUGH */
7178 default:
7179 tclass = TREE_CODE_CLASS (code);
7181 if (tclass == tcc_declaration)
7183 /* DECL's have a unique ID */
7184 hstate.add_hwi (DECL_UID (t));
7186 else if (tclass == tcc_comparison && !commutative_tree_code (code))
7188 /* For comparisons that can be swapped, use the lower
7189 tree code. */
7190 enum tree_code ccode = swap_tree_comparison (code);
7191 if (code < ccode)
7192 ccode = code;
7193 hstate.add_object (ccode);
7194 inchash::add_expr (TREE_OPERAND (t, ccode != code), hstate, flags);
7195 inchash::add_expr (TREE_OPERAND (t, ccode == code), hstate, flags);
7197 else if (CONVERT_EXPR_CODE_P (code))
7199 /* NOP_EXPR and CONVERT_EXPR are considered equal by
7200 operand_equal_p. */
7201 enum tree_code ccode = NOP_EXPR;
7202 hstate.add_object (ccode);
7204 /* Don't hash the type, that can lead to having nodes which
7205 compare equal according to operand_equal_p, but which
7206 have different hash codes. Make sure to include signedness
7207 in the hash computation. */
7208 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7209 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
7211 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
7212 else if (code == MEM_REF
7213 && (flags & OEP_ADDRESS_OF) != 0
7214 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
7215 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
7216 && integer_zerop (TREE_OPERAND (t, 1)))
7217 inchash::add_expr (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
7218 hstate, flags);
7219 /* Don't ICE on FE specific trees, or their arguments etc.
7220 during operand_equal_p hash verification. */
7221 else if (!IS_EXPR_CODE_CLASS (tclass))
7222 gcc_assert (flags & OEP_HASH_CHECK);
7223 else
7225 unsigned int sflags = flags;
7227 hstate.add_object (code);
7229 switch (code)
7231 case ADDR_EXPR:
7232 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7233 flags |= OEP_ADDRESS_OF;
7234 sflags = flags;
7235 break;
7237 case INDIRECT_REF:
7238 case MEM_REF:
7239 case TARGET_MEM_REF:
7240 flags &= ~OEP_ADDRESS_OF;
7241 sflags = flags;
7242 break;
7244 case ARRAY_REF:
7245 case ARRAY_RANGE_REF:
7246 case COMPONENT_REF:
7247 case BIT_FIELD_REF:
7248 sflags &= ~OEP_ADDRESS_OF;
7249 break;
7251 case COND_EXPR:
7252 flags &= ~OEP_ADDRESS_OF;
7253 break;
7255 case FMA_EXPR:
7256 case WIDEN_MULT_PLUS_EXPR:
7257 case WIDEN_MULT_MINUS_EXPR:
7259 /* The multiplication operands are commutative. */
7260 inchash::hash one, two;
7261 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
7262 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
7263 hstate.add_commutative (one, two);
7264 inchash::add_expr (TREE_OPERAND (t, 2), two, flags);
7265 return;
7268 case CALL_EXPR:
7269 if (CALL_EXPR_FN (t) == NULL_TREE)
7270 hstate.add_int (CALL_EXPR_IFN (t));
7271 break;
7273 case TARGET_EXPR:
7274 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
7275 Usually different TARGET_EXPRs just should use
7276 different temporaries in their slots. */
7277 inchash::add_expr (TARGET_EXPR_SLOT (t), hstate, flags);
7278 return;
7280 default:
7281 break;
7284 /* Don't hash the type, that can lead to having nodes which
7285 compare equal according to operand_equal_p, but which
7286 have different hash codes. */
7287 if (code == NON_LVALUE_EXPR)
7289 /* Make sure to include signness in the hash computation. */
7290 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7291 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
7294 else if (commutative_tree_code (code))
7296 /* It's a commutative expression. We want to hash it the same
7297 however it appears. We do this by first hashing both operands
7298 and then rehashing based on the order of their independent
7299 hashes. */
7300 inchash::hash one, two;
7301 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
7302 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
7303 hstate.add_commutative (one, two);
7305 else
7306 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7307 inchash::add_expr (TREE_OPERAND (t, i), hstate,
7308 i == 0 ? flags : sflags);
7310 return;
7316 /* Constructors for pointer, array and function types.
7317 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7318 constructed by language-dependent code, not here.) */
7320 /* Construct, lay out and return the type of pointers to TO_TYPE with
7321 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7322 reference all of memory. If such a type has already been
7323 constructed, reuse it. */
7325 tree
7326 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7327 bool can_alias_all)
7329 tree t;
7330 bool could_alias = can_alias_all;
7332 if (to_type == error_mark_node)
7333 return error_mark_node;
7335 /* If the pointed-to type has the may_alias attribute set, force
7336 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7337 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7338 can_alias_all = true;
7340 /* In some cases, languages will have things that aren't a POINTER_TYPE
7341 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7342 In that case, return that type without regard to the rest of our
7343 operands.
7345 ??? This is a kludge, but consistent with the way this function has
7346 always operated and there doesn't seem to be a good way to avoid this
7347 at the moment. */
7348 if (TYPE_POINTER_TO (to_type) != 0
7349 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7350 return TYPE_POINTER_TO (to_type);
7352 /* First, if we already have a type for pointers to TO_TYPE and it's
7353 the proper mode, use it. */
7354 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7355 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7356 return t;
7358 t = make_node (POINTER_TYPE);
7360 TREE_TYPE (t) = to_type;
7361 SET_TYPE_MODE (t, mode);
7362 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7363 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7364 TYPE_POINTER_TO (to_type) = t;
7366 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7367 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7368 SET_TYPE_STRUCTURAL_EQUALITY (t);
7369 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7370 TYPE_CANONICAL (t)
7371 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7372 mode, false);
7374 /* Lay out the type. This function has many callers that are concerned
7375 with expression-construction, and this simplifies them all. */
7376 layout_type (t);
7378 return t;
7381 /* By default build pointers in ptr_mode. */
7383 tree
7384 build_pointer_type (tree to_type)
7386 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7387 : TYPE_ADDR_SPACE (to_type);
7388 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7389 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7392 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7394 tree
7395 build_reference_type_for_mode (tree to_type, machine_mode mode,
7396 bool can_alias_all)
7398 tree t;
7399 bool could_alias = can_alias_all;
7401 if (to_type == error_mark_node)
7402 return error_mark_node;
7404 /* If the pointed-to type has the may_alias attribute set, force
7405 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7406 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7407 can_alias_all = true;
7409 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7410 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7411 In that case, return that type without regard to the rest of our
7412 operands.
7414 ??? This is a kludge, but consistent with the way this function has
7415 always operated and there doesn't seem to be a good way to avoid this
7416 at the moment. */
7417 if (TYPE_REFERENCE_TO (to_type) != 0
7418 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7419 return TYPE_REFERENCE_TO (to_type);
7421 /* First, if we already have a type for pointers to TO_TYPE and it's
7422 the proper mode, use it. */
7423 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7424 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7425 return t;
7427 t = make_node (REFERENCE_TYPE);
7429 TREE_TYPE (t) = to_type;
7430 SET_TYPE_MODE (t, mode);
7431 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7432 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7433 TYPE_REFERENCE_TO (to_type) = t;
7435 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7436 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7437 SET_TYPE_STRUCTURAL_EQUALITY (t);
7438 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7439 TYPE_CANONICAL (t)
7440 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7441 mode, false);
7443 layout_type (t);
7445 return t;
7449 /* Build the node for the type of references-to-TO_TYPE by default
7450 in ptr_mode. */
7452 tree
7453 build_reference_type (tree to_type)
7455 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7456 : TYPE_ADDR_SPACE (to_type);
7457 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7458 return build_reference_type_for_mode (to_type, pointer_mode, false);
7461 #define MAX_INT_CACHED_PREC \
7462 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7463 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7465 /* Builds a signed or unsigned integer type of precision PRECISION.
7466 Used for C bitfields whose precision does not match that of
7467 built-in target types. */
7468 tree
7469 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7470 int unsignedp)
7472 tree itype, ret;
7474 if (unsignedp)
7475 unsignedp = MAX_INT_CACHED_PREC + 1;
7477 if (precision <= MAX_INT_CACHED_PREC)
7479 itype = nonstandard_integer_type_cache[precision + unsignedp];
7480 if (itype)
7481 return itype;
7484 itype = make_node (INTEGER_TYPE);
7485 TYPE_PRECISION (itype) = precision;
7487 if (unsignedp)
7488 fixup_unsigned_type (itype);
7489 else
7490 fixup_signed_type (itype);
7492 ret = itype;
7494 inchash::hash hstate;
7495 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7496 ret = type_hash_canon (hstate.end (), itype);
7497 if (precision <= MAX_INT_CACHED_PREC)
7498 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7500 return ret;
7503 #define MAX_BOOL_CACHED_PREC \
7504 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7505 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
7507 /* Builds a boolean type of precision PRECISION.
7508 Used for boolean vectors to choose proper vector element size. */
7509 tree
7510 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
7512 tree type;
7514 if (precision <= MAX_BOOL_CACHED_PREC)
7516 type = nonstandard_boolean_type_cache[precision];
7517 if (type)
7518 return type;
7521 type = make_node (BOOLEAN_TYPE);
7522 TYPE_PRECISION (type) = precision;
7523 fixup_signed_type (type);
7525 if (precision <= MAX_INT_CACHED_PREC)
7526 nonstandard_boolean_type_cache[precision] = type;
7528 return type;
7531 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7532 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7533 is true, reuse such a type that has already been constructed. */
7535 static tree
7536 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7538 tree itype = make_node (INTEGER_TYPE);
7540 TREE_TYPE (itype) = type;
7542 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7543 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7545 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7546 SET_TYPE_MODE (itype, TYPE_MODE (type));
7547 TYPE_SIZE (itype) = TYPE_SIZE (type);
7548 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7549 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
7550 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7551 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
7553 if (!shared)
7554 return itype;
7556 if ((TYPE_MIN_VALUE (itype)
7557 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7558 || (TYPE_MAX_VALUE (itype)
7559 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7561 /* Since we cannot reliably merge this type, we need to compare it using
7562 structural equality checks. */
7563 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7564 return itype;
7567 hashval_t hash = type_hash_canon_hash (itype);
7568 itype = type_hash_canon (hash, itype);
7570 return itype;
7573 /* Wrapper around build_range_type_1 with SHARED set to true. */
7575 tree
7576 build_range_type (tree type, tree lowval, tree highval)
7578 return build_range_type_1 (type, lowval, highval, true);
7581 /* Wrapper around build_range_type_1 with SHARED set to false. */
7583 tree
7584 build_nonshared_range_type (tree type, tree lowval, tree highval)
7586 return build_range_type_1 (type, lowval, highval, false);
7589 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7590 MAXVAL should be the maximum value in the domain
7591 (one less than the length of the array).
7593 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7594 We don't enforce this limit, that is up to caller (e.g. language front end).
7595 The limit exists because the result is a signed type and we don't handle
7596 sizes that use more than one HOST_WIDE_INT. */
7598 tree
7599 build_index_type (tree maxval)
7601 return build_range_type (sizetype, size_zero_node, maxval);
7604 /* Return true if the debug information for TYPE, a subtype, should be emitted
7605 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7606 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7607 debug info and doesn't reflect the source code. */
7609 bool
7610 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7612 tree base_type = TREE_TYPE (type), low, high;
7614 /* Subrange types have a base type which is an integral type. */
7615 if (!INTEGRAL_TYPE_P (base_type))
7616 return false;
7618 /* Get the real bounds of the subtype. */
7619 if (lang_hooks.types.get_subrange_bounds)
7620 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7621 else
7623 low = TYPE_MIN_VALUE (type);
7624 high = TYPE_MAX_VALUE (type);
7627 /* If the type and its base type have the same representation and the same
7628 name, then the type is not a subrange but a copy of the base type. */
7629 if ((TREE_CODE (base_type) == INTEGER_TYPE
7630 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7631 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7632 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7633 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7634 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7635 return false;
7637 if (lowval)
7638 *lowval = low;
7639 if (highval)
7640 *highval = high;
7641 return true;
7644 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7645 and number of elements specified by the range of values of INDEX_TYPE.
7646 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7647 If SHARED is true, reuse such a type that has already been constructed. */
7649 static tree
7650 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
7651 bool shared)
7653 tree t;
7655 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7657 error ("arrays of functions are not meaningful");
7658 elt_type = integer_type_node;
7661 t = make_node (ARRAY_TYPE);
7662 TREE_TYPE (t) = elt_type;
7663 TYPE_DOMAIN (t) = index_type;
7664 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7665 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
7666 layout_type (t);
7668 /* If the element type is incomplete at this point we get marked for
7669 structural equality. Do not record these types in the canonical
7670 type hashtable. */
7671 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7672 return t;
7674 if (shared)
7676 hashval_t hash = type_hash_canon_hash (t);
7677 t = type_hash_canon (hash, t);
7680 if (TYPE_CANONICAL (t) == t)
7682 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7683 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
7684 || in_lto_p)
7685 SET_TYPE_STRUCTURAL_EQUALITY (t);
7686 else if (TYPE_CANONICAL (elt_type) != elt_type
7687 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7688 TYPE_CANONICAL (t)
7689 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7690 index_type
7691 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7692 typeless_storage, shared);
7695 return t;
7698 /* Wrapper around build_array_type_1 with SHARED set to true. */
7700 tree
7701 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
7703 return build_array_type_1 (elt_type, index_type, typeless_storage, true);
7706 /* Wrapper around build_array_type_1 with SHARED set to false. */
7708 tree
7709 build_nonshared_array_type (tree elt_type, tree index_type)
7711 return build_array_type_1 (elt_type, index_type, false, false);
7714 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7715 sizetype. */
7717 tree
7718 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
7720 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7723 /* Recursively examines the array elements of TYPE, until a non-array
7724 element type is found. */
7726 tree
7727 strip_array_types (tree type)
7729 while (TREE_CODE (type) == ARRAY_TYPE)
7730 type = TREE_TYPE (type);
7732 return type;
7735 /* Computes the canonical argument types from the argument type list
7736 ARGTYPES.
7738 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7739 on entry to this function, or if any of the ARGTYPES are
7740 structural.
7742 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7743 true on entry to this function, or if any of the ARGTYPES are
7744 non-canonical.
7746 Returns a canonical argument list, which may be ARGTYPES when the
7747 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7748 true) or would not differ from ARGTYPES. */
7750 static tree
7751 maybe_canonicalize_argtypes (tree argtypes,
7752 bool *any_structural_p,
7753 bool *any_noncanonical_p)
7755 tree arg;
7756 bool any_noncanonical_argtypes_p = false;
7758 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7760 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7761 /* Fail gracefully by stating that the type is structural. */
7762 *any_structural_p = true;
7763 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7764 *any_structural_p = true;
7765 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7766 || TREE_PURPOSE (arg))
7767 /* If the argument has a default argument, we consider it
7768 non-canonical even though the type itself is canonical.
7769 That way, different variants of function and method types
7770 with default arguments will all point to the variant with
7771 no defaults as their canonical type. */
7772 any_noncanonical_argtypes_p = true;
7775 if (*any_structural_p)
7776 return argtypes;
7778 if (any_noncanonical_argtypes_p)
7780 /* Build the canonical list of argument types. */
7781 tree canon_argtypes = NULL_TREE;
7782 bool is_void = false;
7784 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7786 if (arg == void_list_node)
7787 is_void = true;
7788 else
7789 canon_argtypes = tree_cons (NULL_TREE,
7790 TYPE_CANONICAL (TREE_VALUE (arg)),
7791 canon_argtypes);
7794 canon_argtypes = nreverse (canon_argtypes);
7795 if (is_void)
7796 canon_argtypes = chainon (canon_argtypes, void_list_node);
7798 /* There is a non-canonical type. */
7799 *any_noncanonical_p = true;
7800 return canon_argtypes;
7803 /* The canonical argument types are the same as ARGTYPES. */
7804 return argtypes;
7807 /* Construct, lay out and return
7808 the type of functions returning type VALUE_TYPE
7809 given arguments of types ARG_TYPES.
7810 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7811 are data type nodes for the arguments of the function.
7812 If such a type has already been constructed, reuse it. */
7814 tree
7815 build_function_type (tree value_type, tree arg_types)
7817 tree t;
7818 inchash::hash hstate;
7819 bool any_structural_p, any_noncanonical_p;
7820 tree canon_argtypes;
7822 if (TREE_CODE (value_type) == FUNCTION_TYPE)
7824 error ("function return type cannot be function");
7825 value_type = integer_type_node;
7828 /* Make a node of the sort we want. */
7829 t = make_node (FUNCTION_TYPE);
7830 TREE_TYPE (t) = value_type;
7831 TYPE_ARG_TYPES (t) = arg_types;
7833 /* If we already have such a type, use the old one. */
7834 hashval_t hash = type_hash_canon_hash (t);
7835 t = type_hash_canon (hash, t);
7837 /* Set up the canonical type. */
7838 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
7839 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
7840 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
7841 &any_structural_p,
7842 &any_noncanonical_p);
7843 if (any_structural_p)
7844 SET_TYPE_STRUCTURAL_EQUALITY (t);
7845 else if (any_noncanonical_p)
7846 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
7847 canon_argtypes);
7849 if (!COMPLETE_TYPE_P (t))
7850 layout_type (t);
7851 return t;
7854 /* Build a function type. The RETURN_TYPE is the type returned by the
7855 function. If VAARGS is set, no void_type_node is appended to the
7856 list. ARGP must be always be terminated be a NULL_TREE. */
7858 static tree
7859 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
7861 tree t, args, last;
7863 t = va_arg (argp, tree);
7864 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
7865 args = tree_cons (NULL_TREE, t, args);
7867 if (vaargs)
7869 last = args;
7870 if (args != NULL_TREE)
7871 args = nreverse (args);
7872 gcc_assert (last != void_list_node);
7874 else if (args == NULL_TREE)
7875 args = void_list_node;
7876 else
7878 last = args;
7879 args = nreverse (args);
7880 TREE_CHAIN (last) = void_list_node;
7882 args = build_function_type (return_type, args);
7884 return args;
7887 /* Build a function type. The RETURN_TYPE is the type returned by the
7888 function. If additional arguments are provided, they are
7889 additional argument types. The list of argument types must always
7890 be terminated by NULL_TREE. */
7892 tree
7893 build_function_type_list (tree return_type, ...)
7895 tree args;
7896 va_list p;
7898 va_start (p, return_type);
7899 args = build_function_type_list_1 (false, return_type, p);
7900 va_end (p);
7901 return args;
7904 /* Build a variable argument function type. The RETURN_TYPE is the
7905 type returned by the function. If additional arguments are provided,
7906 they are additional argument types. The list of argument types must
7907 always be terminated by NULL_TREE. */
7909 tree
7910 build_varargs_function_type_list (tree return_type, ...)
7912 tree args;
7913 va_list p;
7915 va_start (p, return_type);
7916 args = build_function_type_list_1 (true, return_type, p);
7917 va_end (p);
7919 return args;
7922 /* Build a function type. RETURN_TYPE is the type returned by the
7923 function; VAARGS indicates whether the function takes varargs. The
7924 function takes N named arguments, the types of which are provided in
7925 ARG_TYPES. */
7927 static tree
7928 build_function_type_array_1 (bool vaargs, tree return_type, int n,
7929 tree *arg_types)
7931 int i;
7932 tree t = vaargs ? NULL_TREE : void_list_node;
7934 for (i = n - 1; i >= 0; i--)
7935 t = tree_cons (NULL_TREE, arg_types[i], t);
7937 return build_function_type (return_type, t);
7940 /* Build a function type. RETURN_TYPE is the type returned by the
7941 function. The function takes N named arguments, the types of which
7942 are provided in ARG_TYPES. */
7944 tree
7945 build_function_type_array (tree return_type, int n, tree *arg_types)
7947 return build_function_type_array_1 (false, return_type, n, arg_types);
7950 /* Build a variable argument function type. RETURN_TYPE is the type
7951 returned by the function. The function takes N named arguments, the
7952 types of which are provided in ARG_TYPES. */
7954 tree
7955 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
7957 return build_function_type_array_1 (true, return_type, n, arg_types);
7960 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
7961 and ARGTYPES (a TREE_LIST) are the return type and arguments types
7962 for the method. An implicit additional parameter (of type
7963 pointer-to-BASETYPE) is added to the ARGTYPES. */
7965 tree
7966 build_method_type_directly (tree basetype,
7967 tree rettype,
7968 tree argtypes)
7970 tree t;
7971 tree ptype;
7972 bool any_structural_p, any_noncanonical_p;
7973 tree canon_argtypes;
7975 /* Make a node of the sort we want. */
7976 t = make_node (METHOD_TYPE);
7978 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7979 TREE_TYPE (t) = rettype;
7980 ptype = build_pointer_type (basetype);
7982 /* The actual arglist for this function includes a "hidden" argument
7983 which is "this". Put it into the list of argument types. */
7984 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
7985 TYPE_ARG_TYPES (t) = argtypes;
7987 /* If we already have such a type, use the old one. */
7988 hashval_t hash = type_hash_canon_hash (t);
7989 t = type_hash_canon (hash, t);
7991 /* Set up the canonical type. */
7992 any_structural_p
7993 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7994 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
7995 any_noncanonical_p
7996 = (TYPE_CANONICAL (basetype) != basetype
7997 || TYPE_CANONICAL (rettype) != rettype);
7998 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
7999 &any_structural_p,
8000 &any_noncanonical_p);
8001 if (any_structural_p)
8002 SET_TYPE_STRUCTURAL_EQUALITY (t);
8003 else if (any_noncanonical_p)
8004 TYPE_CANONICAL (t)
8005 = build_method_type_directly (TYPE_CANONICAL (basetype),
8006 TYPE_CANONICAL (rettype),
8007 canon_argtypes);
8008 if (!COMPLETE_TYPE_P (t))
8009 layout_type (t);
8011 return t;
8014 /* Construct, lay out and return the type of methods belonging to class
8015 BASETYPE and whose arguments and values are described by TYPE.
8016 If that type exists already, reuse it.
8017 TYPE must be a FUNCTION_TYPE node. */
8019 tree
8020 build_method_type (tree basetype, tree type)
8022 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8024 return build_method_type_directly (basetype,
8025 TREE_TYPE (type),
8026 TYPE_ARG_TYPES (type));
8029 /* Construct, lay out and return the type of offsets to a value
8030 of type TYPE, within an object of type BASETYPE.
8031 If a suitable offset type exists already, reuse it. */
8033 tree
8034 build_offset_type (tree basetype, tree type)
8036 tree t;
8038 /* Make a node of the sort we want. */
8039 t = make_node (OFFSET_TYPE);
8041 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8042 TREE_TYPE (t) = type;
8044 /* If we already have such a type, use the old one. */
8045 hashval_t hash = type_hash_canon_hash (t);
8046 t = type_hash_canon (hash, t);
8048 if (!COMPLETE_TYPE_P (t))
8049 layout_type (t);
8051 if (TYPE_CANONICAL (t) == t)
8053 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8054 || TYPE_STRUCTURAL_EQUALITY_P (type))
8055 SET_TYPE_STRUCTURAL_EQUALITY (t);
8056 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8057 || TYPE_CANONICAL (type) != type)
8058 TYPE_CANONICAL (t)
8059 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8060 TYPE_CANONICAL (type));
8063 return t;
8066 /* Create a complex type whose components are COMPONENT_TYPE.
8068 If NAMED is true, the type is given a TYPE_NAME. We do not always
8069 do so because this creates a DECL node and thus make the DECL_UIDs
8070 dependent on the type canonicalization hashtable, which is GC-ed,
8071 so the DECL_UIDs would not be stable wrt garbage collection. */
8073 tree
8074 build_complex_type (tree component_type, bool named)
8076 gcc_assert (INTEGRAL_TYPE_P (component_type)
8077 || SCALAR_FLOAT_TYPE_P (component_type)
8078 || FIXED_POINT_TYPE_P (component_type));
8080 /* Make a node of the sort we want. */
8081 tree probe = make_node (COMPLEX_TYPE);
8083 TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
8085 /* If we already have such a type, use the old one. */
8086 hashval_t hash = type_hash_canon_hash (probe);
8087 tree t = type_hash_canon (hash, probe);
8089 if (t == probe)
8091 /* We created a new type. The hash insertion will have laid
8092 out the type. We need to check the canonicalization and
8093 maybe set the name. */
8094 gcc_checking_assert (COMPLETE_TYPE_P (t)
8095 && !TYPE_NAME (t)
8096 && TYPE_CANONICAL (t) == t);
8098 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
8099 SET_TYPE_STRUCTURAL_EQUALITY (t);
8100 else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
8101 TYPE_CANONICAL (t)
8102 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
8104 /* We need to create a name, since complex is a fundamental type. */
8105 if (named)
8107 const char *name = NULL;
8109 if (TREE_TYPE (t) == char_type_node)
8110 name = "complex char";
8111 else if (TREE_TYPE (t) == signed_char_type_node)
8112 name = "complex signed char";
8113 else if (TREE_TYPE (t) == unsigned_char_type_node)
8114 name = "complex unsigned char";
8115 else if (TREE_TYPE (t) == short_integer_type_node)
8116 name = "complex short int";
8117 else if (TREE_TYPE (t) == short_unsigned_type_node)
8118 name = "complex short unsigned int";
8119 else if (TREE_TYPE (t) == integer_type_node)
8120 name = "complex int";
8121 else if (TREE_TYPE (t) == unsigned_type_node)
8122 name = "complex unsigned int";
8123 else if (TREE_TYPE (t) == long_integer_type_node)
8124 name = "complex long int";
8125 else if (TREE_TYPE (t) == long_unsigned_type_node)
8126 name = "complex long unsigned int";
8127 else if (TREE_TYPE (t) == long_long_integer_type_node)
8128 name = "complex long long int";
8129 else if (TREE_TYPE (t) == long_long_unsigned_type_node)
8130 name = "complex long long unsigned int";
8132 if (name != NULL)
8133 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8134 get_identifier (name), t);
8138 return build_qualified_type (t, TYPE_QUALS (component_type));
8141 /* If TYPE is a real or complex floating-point type and the target
8142 does not directly support arithmetic on TYPE then return the wider
8143 type to be used for arithmetic on TYPE. Otherwise, return
8144 NULL_TREE. */
8146 tree
8147 excess_precision_type (tree type)
8149 /* The target can give two different responses to the question of
8150 which excess precision mode it would like depending on whether we
8151 are in -fexcess-precision=standard or -fexcess-precision=fast. */
8153 enum excess_precision_type requested_type
8154 = (flag_excess_precision == EXCESS_PRECISION_FAST
8155 ? EXCESS_PRECISION_TYPE_FAST
8156 : EXCESS_PRECISION_TYPE_STANDARD);
8158 enum flt_eval_method target_flt_eval_method
8159 = targetm.c.excess_precision (requested_type);
8161 /* The target should not ask for unpredictable float evaluation (though
8162 it might advertise that implicitly the evaluation is unpredictable,
8163 but we don't care about that here, it will have been reported
8164 elsewhere). If it does ask for unpredictable evaluation, we have
8165 nothing to do here. */
8166 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
8168 /* Nothing to do. The target has asked for all types we know about
8169 to be computed with their native precision and range. */
8170 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
8171 return NULL_TREE;
8173 /* The target will promote this type in a target-dependent way, so excess
8174 precision ought to leave it alone. */
8175 if (targetm.promoted_type (type) != NULL_TREE)
8176 return NULL_TREE;
8178 machine_mode float16_type_mode = (float16_type_node
8179 ? TYPE_MODE (float16_type_node)
8180 : VOIDmode);
8181 machine_mode float_type_mode = TYPE_MODE (float_type_node);
8182 machine_mode double_type_mode = TYPE_MODE (double_type_node);
8184 switch (TREE_CODE (type))
8186 case REAL_TYPE:
8188 machine_mode type_mode = TYPE_MODE (type);
8189 switch (target_flt_eval_method)
8191 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8192 if (type_mode == float16_type_mode)
8193 return float_type_node;
8194 break;
8195 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8196 if (type_mode == float16_type_mode
8197 || type_mode == float_type_mode)
8198 return double_type_node;
8199 break;
8200 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8201 if (type_mode == float16_type_mode
8202 || type_mode == float_type_mode
8203 || type_mode == double_type_mode)
8204 return long_double_type_node;
8205 break;
8206 default:
8207 gcc_unreachable ();
8209 break;
8211 case COMPLEX_TYPE:
8213 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8214 return NULL_TREE;
8215 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
8216 switch (target_flt_eval_method)
8218 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8219 if (type_mode == float16_type_mode)
8220 return complex_float_type_node;
8221 break;
8222 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8223 if (type_mode == float16_type_mode
8224 || type_mode == float_type_mode)
8225 return complex_double_type_node;
8226 break;
8227 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8228 if (type_mode == float16_type_mode
8229 || type_mode == float_type_mode
8230 || type_mode == double_type_mode)
8231 return complex_long_double_type_node;
8232 break;
8233 default:
8234 gcc_unreachable ();
8236 break;
8238 default:
8239 break;
8242 return NULL_TREE;
8245 /* Return OP, stripped of any conversions to wider types as much as is safe.
8246 Converting the value back to OP's type makes a value equivalent to OP.
8248 If FOR_TYPE is nonzero, we return a value which, if converted to
8249 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8251 OP must have integer, real or enumeral type. Pointers are not allowed!
8253 There are some cases where the obvious value we could return
8254 would regenerate to OP if converted to OP's type,
8255 but would not extend like OP to wider types.
8256 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8257 For example, if OP is (unsigned short)(signed char)-1,
8258 we avoid returning (signed char)-1 if FOR_TYPE is int,
8259 even though extending that to an unsigned short would regenerate OP,
8260 since the result of extending (signed char)-1 to (int)
8261 is different from (int) OP. */
8263 tree
8264 get_unwidened (tree op, tree for_type)
8266 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8267 tree type = TREE_TYPE (op);
8268 unsigned final_prec
8269 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8270 int uns
8271 = (for_type != 0 && for_type != type
8272 && final_prec > TYPE_PRECISION (type)
8273 && TYPE_UNSIGNED (type));
8274 tree win = op;
8276 while (CONVERT_EXPR_P (op))
8278 int bitschange;
8280 /* TYPE_PRECISION on vector types has different meaning
8281 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8282 so avoid them here. */
8283 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8284 break;
8286 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8287 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8289 /* Truncations are many-one so cannot be removed.
8290 Unless we are later going to truncate down even farther. */
8291 if (bitschange < 0
8292 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8293 break;
8295 /* See what's inside this conversion. If we decide to strip it,
8296 we will set WIN. */
8297 op = TREE_OPERAND (op, 0);
8299 /* If we have not stripped any zero-extensions (uns is 0),
8300 we can strip any kind of extension.
8301 If we have previously stripped a zero-extension,
8302 only zero-extensions can safely be stripped.
8303 Any extension can be stripped if the bits it would produce
8304 are all going to be discarded later by truncating to FOR_TYPE. */
8306 if (bitschange > 0)
8308 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8309 win = op;
8310 /* TYPE_UNSIGNED says whether this is a zero-extension.
8311 Let's avoid computing it if it does not affect WIN
8312 and if UNS will not be needed again. */
8313 if ((uns
8314 || CONVERT_EXPR_P (op))
8315 && TYPE_UNSIGNED (TREE_TYPE (op)))
8317 uns = 1;
8318 win = op;
8323 /* If we finally reach a constant see if it fits in sth smaller and
8324 in that case convert it. */
8325 if (TREE_CODE (win) == INTEGER_CST)
8327 tree wtype = TREE_TYPE (win);
8328 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
8329 if (for_type)
8330 prec = MAX (prec, final_prec);
8331 if (prec < TYPE_PRECISION (wtype))
8333 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
8334 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
8335 win = fold_convert (t, win);
8339 return win;
8342 /* Return OP or a simpler expression for a narrower value
8343 which can be sign-extended or zero-extended to give back OP.
8344 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8345 or 0 if the value should be sign-extended. */
8347 tree
8348 get_narrower (tree op, int *unsignedp_ptr)
8350 int uns = 0;
8351 int first = 1;
8352 tree win = op;
8353 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8355 while (TREE_CODE (op) == NOP_EXPR)
8357 int bitschange
8358 = (TYPE_PRECISION (TREE_TYPE (op))
8359 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8361 /* Truncations are many-one so cannot be removed. */
8362 if (bitschange < 0)
8363 break;
8365 /* See what's inside this conversion. If we decide to strip it,
8366 we will set WIN. */
8368 if (bitschange > 0)
8370 op = TREE_OPERAND (op, 0);
8371 /* An extension: the outermost one can be stripped,
8372 but remember whether it is zero or sign extension. */
8373 if (first)
8374 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8375 /* Otherwise, if a sign extension has been stripped,
8376 only sign extensions can now be stripped;
8377 if a zero extension has been stripped, only zero-extensions. */
8378 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8379 break;
8380 first = 0;
8382 else /* bitschange == 0 */
8384 /* A change in nominal type can always be stripped, but we must
8385 preserve the unsignedness. */
8386 if (first)
8387 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8388 first = 0;
8389 op = TREE_OPERAND (op, 0);
8390 /* Keep trying to narrow, but don't assign op to win if it
8391 would turn an integral type into something else. */
8392 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8393 continue;
8396 win = op;
8399 if (TREE_CODE (op) == COMPONENT_REF
8400 /* Since type_for_size always gives an integer type. */
8401 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8402 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8403 /* Ensure field is laid out already. */
8404 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8405 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8407 unsigned HOST_WIDE_INT innerprec
8408 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8409 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8410 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8411 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8413 /* We can get this structure field in a narrower type that fits it,
8414 but the resulting extension to its nominal type (a fullword type)
8415 must satisfy the same conditions as for other extensions.
8417 Do this only for fields that are aligned (not bit-fields),
8418 because when bit-field insns will be used there is no
8419 advantage in doing this. */
8421 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8422 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8423 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8424 && type != 0)
8426 if (first)
8427 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8428 win = fold_convert (type, op);
8432 *unsignedp_ptr = uns;
8433 return win;
8436 /* Return true if integer constant C has a value that is permissible
8437 for TYPE, an integral type. */
8439 bool
8440 int_fits_type_p (const_tree c, const_tree type)
8442 tree type_low_bound, type_high_bound;
8443 bool ok_for_low_bound, ok_for_high_bound;
8444 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8446 /* Non-standard boolean types can have arbitrary precision but various
8447 transformations assume that they can only take values 0 and +/-1. */
8448 if (TREE_CODE (type) == BOOLEAN_TYPE)
8449 return wi::fits_to_boolean_p (wi::to_wide (c), type);
8451 retry:
8452 type_low_bound = TYPE_MIN_VALUE (type);
8453 type_high_bound = TYPE_MAX_VALUE (type);
8455 /* If at least one bound of the type is a constant integer, we can check
8456 ourselves and maybe make a decision. If no such decision is possible, but
8457 this type is a subtype, try checking against that. Otherwise, use
8458 fits_to_tree_p, which checks against the precision.
8460 Compute the status for each possibly constant bound, and return if we see
8461 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8462 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8463 for "constant known to fit". */
8465 /* Check if c >= type_low_bound. */
8466 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8468 if (tree_int_cst_lt (c, type_low_bound))
8469 return false;
8470 ok_for_low_bound = true;
8472 else
8473 ok_for_low_bound = false;
8475 /* Check if c <= type_high_bound. */
8476 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8478 if (tree_int_cst_lt (type_high_bound, c))
8479 return false;
8480 ok_for_high_bound = true;
8482 else
8483 ok_for_high_bound = false;
8485 /* If the constant fits both bounds, the result is known. */
8486 if (ok_for_low_bound && ok_for_high_bound)
8487 return true;
8489 /* Perform some generic filtering which may allow making a decision
8490 even if the bounds are not constant. First, negative integers
8491 never fit in unsigned types, */
8492 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
8493 return false;
8495 /* Second, narrower types always fit in wider ones. */
8496 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8497 return true;
8499 /* Third, unsigned integers with top bit set never fit signed types. */
8500 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8502 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
8503 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8505 /* When a tree_cst is converted to a wide-int, the precision
8506 is taken from the type. However, if the precision of the
8507 mode underneath the type is smaller than that, it is
8508 possible that the value will not fit. The test below
8509 fails if any bit is set between the sign bit of the
8510 underlying mode and the top bit of the type. */
8511 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
8512 return false;
8514 else if (wi::neg_p (wi::to_wide (c)))
8515 return false;
8518 /* If we haven't been able to decide at this point, there nothing more we
8519 can check ourselves here. Look at the base type if we have one and it
8520 has the same precision. */
8521 if (TREE_CODE (type) == INTEGER_TYPE
8522 && TREE_TYPE (type) != 0
8523 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8525 type = TREE_TYPE (type);
8526 goto retry;
8529 /* Or to fits_to_tree_p, if nothing else. */
8530 return wi::fits_to_tree_p (wi::to_wide (c), type);
8533 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8534 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8535 represented (assuming two's-complement arithmetic) within the bit
8536 precision of the type are returned instead. */
8538 void
8539 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8541 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8542 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8543 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
8544 else
8546 if (TYPE_UNSIGNED (type))
8547 mpz_set_ui (min, 0);
8548 else
8550 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8551 wi::to_mpz (mn, min, SIGNED);
8555 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8556 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8557 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
8558 else
8560 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8561 wi::to_mpz (mn, max, TYPE_SIGN (type));
8565 /* Return true if VAR is an automatic variable defined in function FN. */
8567 bool
8568 auto_var_in_fn_p (const_tree var, const_tree fn)
8570 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8571 && ((((VAR_P (var) && ! DECL_EXTERNAL (var))
8572 || TREE_CODE (var) == PARM_DECL)
8573 && ! TREE_STATIC (var))
8574 || TREE_CODE (var) == LABEL_DECL
8575 || TREE_CODE (var) == RESULT_DECL));
8578 /* Subprogram of following function. Called by walk_tree.
8580 Return *TP if it is an automatic variable or parameter of the
8581 function passed in as DATA. */
8583 static tree
8584 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8586 tree fn = (tree) data;
8588 if (TYPE_P (*tp))
8589 *walk_subtrees = 0;
8591 else if (DECL_P (*tp)
8592 && auto_var_in_fn_p (*tp, fn))
8593 return *tp;
8595 return NULL_TREE;
8598 /* Returns true if T is, contains, or refers to a type with variable
8599 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8600 arguments, but not the return type. If FN is nonzero, only return
8601 true if a modifier of the type or position of FN is a variable or
8602 parameter inside FN.
8604 This concept is more general than that of C99 'variably modified types':
8605 in C99, a struct type is never variably modified because a VLA may not
8606 appear as a structure member. However, in GNU C code like:
8608 struct S { int i[f()]; };
8610 is valid, and other languages may define similar constructs. */
8612 bool
8613 variably_modified_type_p (tree type, tree fn)
8615 tree t;
8617 /* Test if T is either variable (if FN is zero) or an expression containing
8618 a variable in FN. If TYPE isn't gimplified, return true also if
8619 gimplify_one_sizepos would gimplify the expression into a local
8620 variable. */
8621 #define RETURN_TRUE_IF_VAR(T) \
8622 do { tree _t = (T); \
8623 if (_t != NULL_TREE \
8624 && _t != error_mark_node \
8625 && TREE_CODE (_t) != INTEGER_CST \
8626 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8627 && (!fn \
8628 || (!TYPE_SIZES_GIMPLIFIED (type) \
8629 && !is_gimple_sizepos (_t)) \
8630 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8631 return true; } while (0)
8633 if (type == error_mark_node)
8634 return false;
8636 /* If TYPE itself has variable size, it is variably modified. */
8637 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8638 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8640 switch (TREE_CODE (type))
8642 case POINTER_TYPE:
8643 case REFERENCE_TYPE:
8644 case VECTOR_TYPE:
8645 /* Ada can have pointer types refering to themselves indirectly. */
8646 if (TREE_VISITED (type))
8647 return false;
8648 TREE_VISITED (type) = true;
8649 if (variably_modified_type_p (TREE_TYPE (type), fn))
8651 TREE_VISITED (type) = false;
8652 return true;
8654 TREE_VISITED (type) = false;
8655 break;
8657 case FUNCTION_TYPE:
8658 case METHOD_TYPE:
8659 /* If TYPE is a function type, it is variably modified if the
8660 return type is variably modified. */
8661 if (variably_modified_type_p (TREE_TYPE (type), fn))
8662 return true;
8663 break;
8665 case INTEGER_TYPE:
8666 case REAL_TYPE:
8667 case FIXED_POINT_TYPE:
8668 case ENUMERAL_TYPE:
8669 case BOOLEAN_TYPE:
8670 /* Scalar types are variably modified if their end points
8671 aren't constant. */
8672 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8673 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8674 break;
8676 case RECORD_TYPE:
8677 case UNION_TYPE:
8678 case QUAL_UNION_TYPE:
8679 /* We can't see if any of the fields are variably-modified by the
8680 definition we normally use, since that would produce infinite
8681 recursion via pointers. */
8682 /* This is variably modified if some field's type is. */
8683 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8684 if (TREE_CODE (t) == FIELD_DECL)
8686 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8687 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8688 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8690 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8691 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8693 break;
8695 case ARRAY_TYPE:
8696 /* Do not call ourselves to avoid infinite recursion. This is
8697 variably modified if the element type is. */
8698 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8699 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8700 break;
8702 default:
8703 break;
8706 /* The current language may have other cases to check, but in general,
8707 all other types are not variably modified. */
8708 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8710 #undef RETURN_TRUE_IF_VAR
8713 /* Given a DECL or TYPE, return the scope in which it was declared, or
8714 NULL_TREE if there is no containing scope. */
8716 tree
8717 get_containing_scope (const_tree t)
8719 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8722 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
8724 const_tree
8725 get_ultimate_context (const_tree decl)
8727 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
8729 if (TREE_CODE (decl) == BLOCK)
8730 decl = BLOCK_SUPERCONTEXT (decl);
8731 else
8732 decl = get_containing_scope (decl);
8734 return decl;
8737 /* Return the innermost context enclosing DECL that is
8738 a FUNCTION_DECL, or zero if none. */
8740 tree
8741 decl_function_context (const_tree decl)
8743 tree context;
8745 if (TREE_CODE (decl) == ERROR_MARK)
8746 return 0;
8748 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8749 where we look up the function at runtime. Such functions always take
8750 a first argument of type 'pointer to real context'.
8752 C++ should really be fixed to use DECL_CONTEXT for the real context,
8753 and use something else for the "virtual context". */
8754 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8755 context
8756 = TYPE_MAIN_VARIANT
8757 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8758 else
8759 context = DECL_CONTEXT (decl);
8761 while (context && TREE_CODE (context) != FUNCTION_DECL)
8763 if (TREE_CODE (context) == BLOCK)
8764 context = BLOCK_SUPERCONTEXT (context);
8765 else
8766 context = get_containing_scope (context);
8769 return context;
8772 /* Return the innermost context enclosing DECL that is
8773 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8774 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8776 tree
8777 decl_type_context (const_tree decl)
8779 tree context = DECL_CONTEXT (decl);
8781 while (context)
8782 switch (TREE_CODE (context))
8784 case NAMESPACE_DECL:
8785 case TRANSLATION_UNIT_DECL:
8786 return NULL_TREE;
8788 case RECORD_TYPE:
8789 case UNION_TYPE:
8790 case QUAL_UNION_TYPE:
8791 return context;
8793 case TYPE_DECL:
8794 case FUNCTION_DECL:
8795 context = DECL_CONTEXT (context);
8796 break;
8798 case BLOCK:
8799 context = BLOCK_SUPERCONTEXT (context);
8800 break;
8802 default:
8803 gcc_unreachable ();
8806 return NULL_TREE;
8809 /* CALL is a CALL_EXPR. Return the declaration for the function
8810 called, or NULL_TREE if the called function cannot be
8811 determined. */
8813 tree
8814 get_callee_fndecl (const_tree call)
8816 tree addr;
8818 if (call == error_mark_node)
8819 return error_mark_node;
8821 /* It's invalid to call this function with anything but a
8822 CALL_EXPR. */
8823 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8825 /* The first operand to the CALL is the address of the function
8826 called. */
8827 addr = CALL_EXPR_FN (call);
8829 /* If there is no function, return early. */
8830 if (addr == NULL_TREE)
8831 return NULL_TREE;
8833 STRIP_NOPS (addr);
8835 /* If this is a readonly function pointer, extract its initial value. */
8836 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8837 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8838 && DECL_INITIAL (addr))
8839 addr = DECL_INITIAL (addr);
8841 /* If the address is just `&f' for some function `f', then we know
8842 that `f' is being called. */
8843 if (TREE_CODE (addr) == ADDR_EXPR
8844 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8845 return TREE_OPERAND (addr, 0);
8847 /* We couldn't figure out what was being called. */
8848 return NULL_TREE;
8851 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
8852 return the associated function code, otherwise return CFN_LAST. */
8854 combined_fn
8855 get_call_combined_fn (const_tree call)
8857 /* It's invalid to call this function with anything but a CALL_EXPR. */
8858 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8860 if (!CALL_EXPR_FN (call))
8861 return as_combined_fn (CALL_EXPR_IFN (call));
8863 tree fndecl = get_callee_fndecl (call);
8864 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8865 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
8867 return CFN_LAST;
8870 #define TREE_MEM_USAGE_SPACES 40
8872 /* Print debugging information about tree nodes generated during the compile,
8873 and any language-specific information. */
8875 void
8876 dump_tree_statistics (void)
8878 if (GATHER_STATISTICS)
8880 int i;
8881 int total_nodes, total_bytes;
8882 fprintf (stderr, "\nKind Nodes Bytes\n");
8883 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8884 total_nodes = total_bytes = 0;
8885 for (i = 0; i < (int) all_kinds; i++)
8887 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
8888 tree_node_counts[i], tree_node_sizes[i]);
8889 total_nodes += tree_node_counts[i];
8890 total_bytes += tree_node_sizes[i];
8892 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8893 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
8894 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8895 fprintf (stderr, "Code Nodes\n");
8896 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8897 for (i = 0; i < (int) MAX_TREE_CODES; i++)
8898 fprintf (stderr, "%-32s %7d\n", get_tree_code_name ((enum tree_code) i),
8899 tree_code_counts[i]);
8900 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8901 fprintf (stderr, "\n");
8902 ssanames_print_statistics ();
8903 fprintf (stderr, "\n");
8904 phinodes_print_statistics ();
8905 fprintf (stderr, "\n");
8907 else
8908 fprintf (stderr, "(No per-node statistics)\n");
8910 print_type_hash_statistics ();
8911 print_debug_expr_statistics ();
8912 print_value_expr_statistics ();
8913 lang_hooks.print_statistics ();
8916 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8918 /* Generate a crc32 of the low BYTES bytes of VALUE. */
8920 unsigned
8921 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
8923 /* This relies on the raw feedback's top 4 bits being zero. */
8924 #define FEEDBACK(X) ((X) * 0x04c11db7)
8925 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
8926 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
8927 static const unsigned syndromes[16] =
8929 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
8930 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
8931 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
8932 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
8934 #undef FEEDBACK
8935 #undef SYNDROME
8937 value <<= (32 - bytes * 8);
8938 for (unsigned ix = bytes * 2; ix--; value <<= 4)
8940 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
8942 chksum = (chksum << 4) ^ feedback;
8945 return chksum;
8948 /* Generate a crc32 of a string. */
8950 unsigned
8951 crc32_string (unsigned chksum, const char *string)
8954 chksum = crc32_byte (chksum, *string);
8955 while (*string++);
8956 return chksum;
8959 /* P is a string that will be used in a symbol. Mask out any characters
8960 that are not valid in that context. */
8962 void
8963 clean_symbol_name (char *p)
8965 for (; *p; p++)
8966 if (! (ISALNUM (*p)
8967 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8968 || *p == '$'
8969 #endif
8970 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
8971 || *p == '.'
8972 #endif
8974 *p = '_';
8977 /* For anonymous aggregate types, we need some sort of name to
8978 hold on to. In practice, this should not appear, but it should
8979 not be harmful if it does. */
8980 bool
8981 anon_aggrname_p(const_tree id_node)
8983 #ifndef NO_DOT_IN_LABEL
8984 return (IDENTIFIER_POINTER (id_node)[0] == '.'
8985 && IDENTIFIER_POINTER (id_node)[1] == '_');
8986 #else /* NO_DOT_IN_LABEL */
8987 #ifndef NO_DOLLAR_IN_LABEL
8988 return (IDENTIFIER_POINTER (id_node)[0] == '$' \
8989 && IDENTIFIER_POINTER (id_node)[1] == '_');
8990 #else /* NO_DOLLAR_IN_LABEL */
8991 #define ANON_AGGRNAME_PREFIX "__anon_"
8992 return (!strncmp (IDENTIFIER_POINTER (id_node), ANON_AGGRNAME_PREFIX,
8993 sizeof (ANON_AGGRNAME_PREFIX) - 1));
8994 #endif /* NO_DOLLAR_IN_LABEL */
8995 #endif /* NO_DOT_IN_LABEL */
8998 /* Return a format for an anonymous aggregate name. */
8999 const char *
9000 anon_aggrname_format()
9002 #ifndef NO_DOT_IN_LABEL
9003 return "._%d";
9004 #else /* NO_DOT_IN_LABEL */
9005 #ifndef NO_DOLLAR_IN_LABEL
9006 return "$_%d";
9007 #else /* NO_DOLLAR_IN_LABEL */
9008 return "__anon_%d";
9009 #endif /* NO_DOLLAR_IN_LABEL */
9010 #endif /* NO_DOT_IN_LABEL */
9013 /* Generate a name for a special-purpose function.
9014 The generated name may need to be unique across the whole link.
9015 Changes to this function may also require corresponding changes to
9016 xstrdup_mask_random.
9017 TYPE is some string to identify the purpose of this function to the
9018 linker or collect2; it must start with an uppercase letter,
9019 one of:
9020 I - for constructors
9021 D - for destructors
9022 N - for C++ anonymous namespaces
9023 F - for DWARF unwind frame information. */
9025 tree
9026 get_file_function_name (const char *type)
9028 char *buf;
9029 const char *p;
9030 char *q;
9032 /* If we already have a name we know to be unique, just use that. */
9033 if (first_global_object_name)
9034 p = q = ASTRDUP (first_global_object_name);
9035 /* If the target is handling the constructors/destructors, they
9036 will be local to this file and the name is only necessary for
9037 debugging purposes.
9038 We also assign sub_I and sub_D sufixes to constructors called from
9039 the global static constructors. These are always local. */
9040 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9041 || (strncmp (type, "sub_", 4) == 0
9042 && (type[4] == 'I' || type[4] == 'D')))
9044 const char *file = main_input_filename;
9045 if (! file)
9046 file = LOCATION_FILE (input_location);
9047 /* Just use the file's basename, because the full pathname
9048 might be quite long. */
9049 p = q = ASTRDUP (lbasename (file));
9051 else
9053 /* Otherwise, the name must be unique across the entire link.
9054 We don't have anything that we know to be unique to this translation
9055 unit, so use what we do have and throw in some randomness. */
9056 unsigned len;
9057 const char *name = weak_global_object_name;
9058 const char *file = main_input_filename;
9060 if (! name)
9061 name = "";
9062 if (! file)
9063 file = LOCATION_FILE (input_location);
9065 len = strlen (file);
9066 q = (char *) alloca (9 + 19 + len + 1);
9067 memcpy (q, file, len + 1);
9069 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9070 crc32_string (0, name), get_random_seed (false));
9072 p = q;
9075 clean_symbol_name (q);
9076 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9077 + strlen (type));
9079 /* Set up the name of the file-level functions we may need.
9080 Use a global object (which is already required to be unique over
9081 the program) rather than the file name (which imposes extra
9082 constraints). */
9083 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9085 return get_identifier (buf);
9088 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9090 /* Complain that the tree code of NODE does not match the expected 0
9091 terminated list of trailing codes. The trailing code list can be
9092 empty, for a more vague error message. FILE, LINE, and FUNCTION
9093 are of the caller. */
9095 void
9096 tree_check_failed (const_tree node, const char *file,
9097 int line, const char *function, ...)
9099 va_list args;
9100 const char *buffer;
9101 unsigned length = 0;
9102 enum tree_code code;
9104 va_start (args, function);
9105 while ((code = (enum tree_code) va_arg (args, int)))
9106 length += 4 + strlen (get_tree_code_name (code));
9107 va_end (args);
9108 if (length)
9110 char *tmp;
9111 va_start (args, function);
9112 length += strlen ("expected ");
9113 buffer = tmp = (char *) alloca (length);
9114 length = 0;
9115 while ((code = (enum tree_code) va_arg (args, int)))
9117 const char *prefix = length ? " or " : "expected ";
9119 strcpy (tmp + length, prefix);
9120 length += strlen (prefix);
9121 strcpy (tmp + length, get_tree_code_name (code));
9122 length += strlen (get_tree_code_name (code));
9124 va_end (args);
9126 else
9127 buffer = "unexpected node";
9129 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9130 buffer, get_tree_code_name (TREE_CODE (node)),
9131 function, trim_filename (file), line);
9134 /* Complain that the tree code of NODE does match the expected 0
9135 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9136 the caller. */
9138 void
9139 tree_not_check_failed (const_tree node, const char *file,
9140 int line, const char *function, ...)
9142 va_list args;
9143 char *buffer;
9144 unsigned length = 0;
9145 enum tree_code code;
9147 va_start (args, function);
9148 while ((code = (enum tree_code) va_arg (args, int)))
9149 length += 4 + strlen (get_tree_code_name (code));
9150 va_end (args);
9151 va_start (args, function);
9152 buffer = (char *) alloca (length);
9153 length = 0;
9154 while ((code = (enum tree_code) va_arg (args, int)))
9156 if (length)
9158 strcpy (buffer + length, " or ");
9159 length += 4;
9161 strcpy (buffer + length, get_tree_code_name (code));
9162 length += strlen (get_tree_code_name (code));
9164 va_end (args);
9166 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9167 buffer, get_tree_code_name (TREE_CODE (node)),
9168 function, trim_filename (file), line);
9171 /* Similar to tree_check_failed, except that we check for a class of tree
9172 code, given in CL. */
9174 void
9175 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9176 const char *file, int line, const char *function)
9178 internal_error
9179 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9180 TREE_CODE_CLASS_STRING (cl),
9181 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9182 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9185 /* Similar to tree_check_failed, except that instead of specifying a
9186 dozen codes, use the knowledge that they're all sequential. */
9188 void
9189 tree_range_check_failed (const_tree node, const char *file, int line,
9190 const char *function, enum tree_code c1,
9191 enum tree_code c2)
9193 char *buffer;
9194 unsigned length = 0;
9195 unsigned int c;
9197 for (c = c1; c <= c2; ++c)
9198 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9200 length += strlen ("expected ");
9201 buffer = (char *) alloca (length);
9202 length = 0;
9204 for (c = c1; c <= c2; ++c)
9206 const char *prefix = length ? " or " : "expected ";
9208 strcpy (buffer + length, prefix);
9209 length += strlen (prefix);
9210 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9211 length += strlen (get_tree_code_name ((enum tree_code) c));
9214 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9215 buffer, get_tree_code_name (TREE_CODE (node)),
9216 function, trim_filename (file), line);
9220 /* Similar to tree_check_failed, except that we check that a tree does
9221 not have the specified code, given in CL. */
9223 void
9224 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9225 const char *file, int line, const char *function)
9227 internal_error
9228 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9229 TREE_CODE_CLASS_STRING (cl),
9230 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9231 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9235 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9237 void
9238 omp_clause_check_failed (const_tree node, const char *file, int line,
9239 const char *function, enum omp_clause_code code)
9241 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9242 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9243 function, trim_filename (file), line);
9247 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9249 void
9250 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9251 const char *function, enum omp_clause_code c1,
9252 enum omp_clause_code c2)
9254 char *buffer;
9255 unsigned length = 0;
9256 unsigned int c;
9258 for (c = c1; c <= c2; ++c)
9259 length += 4 + strlen (omp_clause_code_name[c]);
9261 length += strlen ("expected ");
9262 buffer = (char *) alloca (length);
9263 length = 0;
9265 for (c = c1; c <= c2; ++c)
9267 const char *prefix = length ? " or " : "expected ";
9269 strcpy (buffer + length, prefix);
9270 length += strlen (prefix);
9271 strcpy (buffer + length, omp_clause_code_name[c]);
9272 length += strlen (omp_clause_code_name[c]);
9275 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9276 buffer, omp_clause_code_name[TREE_CODE (node)],
9277 function, trim_filename (file), line);
9281 #undef DEFTREESTRUCT
9282 #define DEFTREESTRUCT(VAL, NAME) NAME,
9284 static const char *ts_enum_names[] = {
9285 #include "treestruct.def"
9287 #undef DEFTREESTRUCT
9289 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9291 /* Similar to tree_class_check_failed, except that we check for
9292 whether CODE contains the tree structure identified by EN. */
9294 void
9295 tree_contains_struct_check_failed (const_tree node,
9296 const enum tree_node_structure_enum en,
9297 const char *file, int line,
9298 const char *function)
9300 internal_error
9301 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9302 TS_ENUM_NAME (en),
9303 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9307 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9308 (dynamically sized) vector. */
9310 void
9311 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9312 const char *function)
9314 internal_error
9315 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9316 idx + 1, len, function, trim_filename (file), line);
9319 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9320 (dynamically sized) vector. */
9322 void
9323 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9324 const char *function)
9326 internal_error
9327 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9328 idx + 1, len, function, trim_filename (file), line);
9331 /* Similar to above, except that the check is for the bounds of the operand
9332 vector of an expression node EXP. */
9334 void
9335 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9336 int line, const char *function)
9338 enum tree_code code = TREE_CODE (exp);
9339 internal_error
9340 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9341 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9342 function, trim_filename (file), line);
9345 /* Similar to above, except that the check is for the number of
9346 operands of an OMP_CLAUSE node. */
9348 void
9349 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9350 int line, const char *function)
9352 internal_error
9353 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9354 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9355 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9356 trim_filename (file), line);
9358 #endif /* ENABLE_TREE_CHECKING */
9360 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9361 and mapped to the machine mode MODE. Initialize its fields and build
9362 the information necessary for debugging output. */
9364 static tree
9365 make_vector_type (tree innertype, int nunits, machine_mode mode)
9367 tree t;
9368 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9370 t = make_node (VECTOR_TYPE);
9371 TREE_TYPE (t) = mv_innertype;
9372 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9373 SET_TYPE_MODE (t, mode);
9375 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9376 SET_TYPE_STRUCTURAL_EQUALITY (t);
9377 else if ((TYPE_CANONICAL (mv_innertype) != innertype
9378 || mode != VOIDmode)
9379 && !VECTOR_BOOLEAN_TYPE_P (t))
9380 TYPE_CANONICAL (t)
9381 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
9383 layout_type (t);
9385 hashval_t hash = type_hash_canon_hash (t);
9386 t = type_hash_canon (hash, t);
9388 /* We have built a main variant, based on the main variant of the
9389 inner type. Use it to build the variant we return. */
9390 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9391 && TREE_TYPE (t) != innertype)
9392 return build_type_attribute_qual_variant (t,
9393 TYPE_ATTRIBUTES (innertype),
9394 TYPE_QUALS (innertype));
9396 return t;
9399 static tree
9400 make_or_reuse_type (unsigned size, int unsignedp)
9402 int i;
9404 if (size == INT_TYPE_SIZE)
9405 return unsignedp ? unsigned_type_node : integer_type_node;
9406 if (size == CHAR_TYPE_SIZE)
9407 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9408 if (size == SHORT_TYPE_SIZE)
9409 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9410 if (size == LONG_TYPE_SIZE)
9411 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9412 if (size == LONG_LONG_TYPE_SIZE)
9413 return (unsignedp ? long_long_unsigned_type_node
9414 : long_long_integer_type_node);
9416 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9417 if (size == int_n_data[i].bitsize
9418 && int_n_enabled_p[i])
9419 return (unsignedp ? int_n_trees[i].unsigned_type
9420 : int_n_trees[i].signed_type);
9422 if (unsignedp)
9423 return make_unsigned_type (size);
9424 else
9425 return make_signed_type (size);
9428 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9430 static tree
9431 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9433 if (satp)
9435 if (size == SHORT_FRACT_TYPE_SIZE)
9436 return unsignedp ? sat_unsigned_short_fract_type_node
9437 : sat_short_fract_type_node;
9438 if (size == FRACT_TYPE_SIZE)
9439 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9440 if (size == LONG_FRACT_TYPE_SIZE)
9441 return unsignedp ? sat_unsigned_long_fract_type_node
9442 : sat_long_fract_type_node;
9443 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9444 return unsignedp ? sat_unsigned_long_long_fract_type_node
9445 : sat_long_long_fract_type_node;
9447 else
9449 if (size == SHORT_FRACT_TYPE_SIZE)
9450 return unsignedp ? unsigned_short_fract_type_node
9451 : short_fract_type_node;
9452 if (size == FRACT_TYPE_SIZE)
9453 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9454 if (size == LONG_FRACT_TYPE_SIZE)
9455 return unsignedp ? unsigned_long_fract_type_node
9456 : long_fract_type_node;
9457 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9458 return unsignedp ? unsigned_long_long_fract_type_node
9459 : long_long_fract_type_node;
9462 return make_fract_type (size, unsignedp, satp);
9465 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9467 static tree
9468 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9470 if (satp)
9472 if (size == SHORT_ACCUM_TYPE_SIZE)
9473 return unsignedp ? sat_unsigned_short_accum_type_node
9474 : sat_short_accum_type_node;
9475 if (size == ACCUM_TYPE_SIZE)
9476 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9477 if (size == LONG_ACCUM_TYPE_SIZE)
9478 return unsignedp ? sat_unsigned_long_accum_type_node
9479 : sat_long_accum_type_node;
9480 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9481 return unsignedp ? sat_unsigned_long_long_accum_type_node
9482 : sat_long_long_accum_type_node;
9484 else
9486 if (size == SHORT_ACCUM_TYPE_SIZE)
9487 return unsignedp ? unsigned_short_accum_type_node
9488 : short_accum_type_node;
9489 if (size == ACCUM_TYPE_SIZE)
9490 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9491 if (size == LONG_ACCUM_TYPE_SIZE)
9492 return unsignedp ? unsigned_long_accum_type_node
9493 : long_accum_type_node;
9494 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9495 return unsignedp ? unsigned_long_long_accum_type_node
9496 : long_long_accum_type_node;
9499 return make_accum_type (size, unsignedp, satp);
9503 /* Create an atomic variant node for TYPE. This routine is called
9504 during initialization of data types to create the 5 basic atomic
9505 types. The generic build_variant_type function requires these to
9506 already be set up in order to function properly, so cannot be
9507 called from there. If ALIGN is non-zero, then ensure alignment is
9508 overridden to this value. */
9510 static tree
9511 build_atomic_base (tree type, unsigned int align)
9513 tree t;
9515 /* Make sure its not already registered. */
9516 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9517 return t;
9519 t = build_variant_type_copy (type);
9520 set_type_quals (t, TYPE_QUAL_ATOMIC);
9522 if (align)
9523 SET_TYPE_ALIGN (t, align);
9525 return t;
9528 /* Information about the _FloatN and _FloatNx types. This must be in
9529 the same order as the corresponding TI_* enum values. */
9530 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
9532 { 16, false },
9533 { 32, false },
9534 { 64, false },
9535 { 128, false },
9536 { 32, true },
9537 { 64, true },
9538 { 128, true },
9542 /* Create nodes for all integer types (and error_mark_node) using the sizes
9543 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9545 void
9546 build_common_tree_nodes (bool signed_char)
9548 int i;
9550 error_mark_node = make_node (ERROR_MARK);
9551 TREE_TYPE (error_mark_node) = error_mark_node;
9553 initialize_sizetypes ();
9555 /* Define both `signed char' and `unsigned char'. */
9556 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9557 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9558 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9559 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9561 /* Define `char', which is like either `signed char' or `unsigned char'
9562 but not the same as either. */
9563 char_type_node
9564 = (signed_char
9565 ? make_signed_type (CHAR_TYPE_SIZE)
9566 : make_unsigned_type (CHAR_TYPE_SIZE));
9567 TYPE_STRING_FLAG (char_type_node) = 1;
9569 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9570 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9571 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9572 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9573 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9574 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9575 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9576 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9578 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9580 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9581 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9582 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
9583 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
9585 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
9586 && int_n_enabled_p[i])
9588 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9589 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9593 /* Define a boolean type. This type only represents boolean values but
9594 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9595 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9596 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9597 TYPE_PRECISION (boolean_type_node) = 1;
9598 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9600 /* Define what type to use for size_t. */
9601 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9602 size_type_node = unsigned_type_node;
9603 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9604 size_type_node = long_unsigned_type_node;
9605 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9606 size_type_node = long_long_unsigned_type_node;
9607 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9608 size_type_node = short_unsigned_type_node;
9609 else
9611 int i;
9613 size_type_node = NULL_TREE;
9614 for (i = 0; i < NUM_INT_N_ENTS; i++)
9615 if (int_n_enabled_p[i])
9617 char name[50];
9618 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9620 if (strcmp (name, SIZE_TYPE) == 0)
9622 size_type_node = int_n_trees[i].unsigned_type;
9625 if (size_type_node == NULL_TREE)
9626 gcc_unreachable ();
9629 /* Define what type to use for ptrdiff_t. */
9630 if (strcmp (PTRDIFF_TYPE, "int") == 0)
9631 ptrdiff_type_node = integer_type_node;
9632 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
9633 ptrdiff_type_node = long_integer_type_node;
9634 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
9635 ptrdiff_type_node = long_long_integer_type_node;
9636 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
9637 ptrdiff_type_node = short_integer_type_node;
9638 else
9640 ptrdiff_type_node = NULL_TREE;
9641 for (int i = 0; i < NUM_INT_N_ENTS; i++)
9642 if (int_n_enabled_p[i])
9644 char name[50];
9645 sprintf (name, "__int%d", int_n_data[i].bitsize);
9646 if (strcmp (name, PTRDIFF_TYPE) == 0)
9647 ptrdiff_type_node = int_n_trees[i].signed_type;
9649 if (ptrdiff_type_node == NULL_TREE)
9650 gcc_unreachable ();
9653 /* Fill in the rest of the sized types. Reuse existing type nodes
9654 when possible. */
9655 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9656 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9657 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9658 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9659 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9661 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9662 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9663 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9664 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9665 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9667 /* Don't call build_qualified type for atomics. That routine does
9668 special processing for atomics, and until they are initialized
9669 it's better not to make that call.
9671 Check to see if there is a target override for atomic types. */
9673 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9674 targetm.atomic_align_for_mode (QImode));
9675 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9676 targetm.atomic_align_for_mode (HImode));
9677 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9678 targetm.atomic_align_for_mode (SImode));
9679 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9680 targetm.atomic_align_for_mode (DImode));
9681 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9682 targetm.atomic_align_for_mode (TImode));
9684 access_public_node = get_identifier ("public");
9685 access_protected_node = get_identifier ("protected");
9686 access_private_node = get_identifier ("private");
9688 /* Define these next since types below may used them. */
9689 integer_zero_node = build_int_cst (integer_type_node, 0);
9690 integer_one_node = build_int_cst (integer_type_node, 1);
9691 integer_three_node = build_int_cst (integer_type_node, 3);
9692 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9694 size_zero_node = size_int (0);
9695 size_one_node = size_int (1);
9696 bitsize_zero_node = bitsize_int (0);
9697 bitsize_one_node = bitsize_int (1);
9698 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9700 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9701 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9703 void_type_node = make_node (VOID_TYPE);
9704 layout_type (void_type_node);
9706 pointer_bounds_type_node = targetm.chkp_bound_type ();
9708 /* We are not going to have real types in C with less than byte alignment,
9709 so we might as well not have any types that claim to have it. */
9710 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
9711 TYPE_USER_ALIGN (void_type_node) = 0;
9713 void_node = make_node (VOID_CST);
9714 TREE_TYPE (void_node) = void_type_node;
9716 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9717 layout_type (TREE_TYPE (null_pointer_node));
9719 ptr_type_node = build_pointer_type (void_type_node);
9720 const_ptr_type_node
9721 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9722 for (unsigned i = 0;
9723 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
9724 ++i)
9725 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
9727 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9729 float_type_node = make_node (REAL_TYPE);
9730 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9731 layout_type (float_type_node);
9733 double_type_node = make_node (REAL_TYPE);
9734 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9735 layout_type (double_type_node);
9737 long_double_type_node = make_node (REAL_TYPE);
9738 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9739 layout_type (long_double_type_node);
9741 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9743 int n = floatn_nx_types[i].n;
9744 bool extended = floatn_nx_types[i].extended;
9745 scalar_float_mode mode;
9746 if (!targetm.floatn_mode (n, extended).exists (&mode))
9747 continue;
9748 int precision = GET_MODE_PRECISION (mode);
9749 /* Work around the rs6000 KFmode having precision 113 not
9750 128. */
9751 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
9752 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
9753 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
9754 if (!extended)
9755 gcc_assert (min_precision == n);
9756 if (precision < min_precision)
9757 precision = min_precision;
9758 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
9759 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
9760 layout_type (FLOATN_NX_TYPE_NODE (i));
9761 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
9764 float_ptr_type_node = build_pointer_type (float_type_node);
9765 double_ptr_type_node = build_pointer_type (double_type_node);
9766 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9767 integer_ptr_type_node = build_pointer_type (integer_type_node);
9769 /* Fixed size integer types. */
9770 uint16_type_node = make_or_reuse_type (16, 1);
9771 uint32_type_node = make_or_reuse_type (32, 1);
9772 uint64_type_node = make_or_reuse_type (64, 1);
9774 /* Decimal float types. */
9775 dfloat32_type_node = make_node (REAL_TYPE);
9776 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9777 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9778 layout_type (dfloat32_type_node);
9779 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9781 dfloat64_type_node = make_node (REAL_TYPE);
9782 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9783 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9784 layout_type (dfloat64_type_node);
9785 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9787 dfloat128_type_node = make_node (REAL_TYPE);
9788 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9789 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9790 layout_type (dfloat128_type_node);
9791 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9793 complex_integer_type_node = build_complex_type (integer_type_node, true);
9794 complex_float_type_node = build_complex_type (float_type_node, true);
9795 complex_double_type_node = build_complex_type (double_type_node, true);
9796 complex_long_double_type_node = build_complex_type (long_double_type_node,
9797 true);
9799 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9801 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
9802 COMPLEX_FLOATN_NX_TYPE_NODE (i)
9803 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
9806 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9807 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9808 sat_ ## KIND ## _type_node = \
9809 make_sat_signed_ ## KIND ## _type (SIZE); \
9810 sat_unsigned_ ## KIND ## _type_node = \
9811 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9812 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9813 unsigned_ ## KIND ## _type_node = \
9814 make_unsigned_ ## KIND ## _type (SIZE);
9816 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9817 sat_ ## WIDTH ## KIND ## _type_node = \
9818 make_sat_signed_ ## KIND ## _type (SIZE); \
9819 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9820 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9821 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9822 unsigned_ ## WIDTH ## KIND ## _type_node = \
9823 make_unsigned_ ## KIND ## _type (SIZE);
9825 /* Make fixed-point type nodes based on four different widths. */
9826 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9827 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9828 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9829 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9830 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9832 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9833 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9834 NAME ## _type_node = \
9835 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9836 u ## NAME ## _type_node = \
9837 make_or_reuse_unsigned_ ## KIND ## _type \
9838 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9839 sat_ ## NAME ## _type_node = \
9840 make_or_reuse_sat_signed_ ## KIND ## _type \
9841 (GET_MODE_BITSIZE (MODE ## mode)); \
9842 sat_u ## NAME ## _type_node = \
9843 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9844 (GET_MODE_BITSIZE (U ## MODE ## mode));
9846 /* Fixed-point type and mode nodes. */
9847 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9848 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9849 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9850 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9851 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9852 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9853 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9854 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9855 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9856 MAKE_FIXED_MODE_NODE (accum, da, DA)
9857 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9860 tree t = targetm.build_builtin_va_list ();
9862 /* Many back-ends define record types without setting TYPE_NAME.
9863 If we copied the record type here, we'd keep the original
9864 record type without a name. This breaks name mangling. So,
9865 don't copy record types and let c_common_nodes_and_builtins()
9866 declare the type to be __builtin_va_list. */
9867 if (TREE_CODE (t) != RECORD_TYPE)
9868 t = build_variant_type_copy (t);
9870 va_list_type_node = t;
9874 /* Modify DECL for given flags.
9875 TM_PURE attribute is set only on types, so the function will modify
9876 DECL's type when ECF_TM_PURE is used. */
9878 void
9879 set_call_expr_flags (tree decl, int flags)
9881 if (flags & ECF_NOTHROW)
9882 TREE_NOTHROW (decl) = 1;
9883 if (flags & ECF_CONST)
9884 TREE_READONLY (decl) = 1;
9885 if (flags & ECF_PURE)
9886 DECL_PURE_P (decl) = 1;
9887 if (flags & ECF_LOOPING_CONST_OR_PURE)
9888 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9889 if (flags & ECF_NOVOPS)
9890 DECL_IS_NOVOPS (decl) = 1;
9891 if (flags & ECF_NORETURN)
9892 TREE_THIS_VOLATILE (decl) = 1;
9893 if (flags & ECF_MALLOC)
9894 DECL_IS_MALLOC (decl) = 1;
9895 if (flags & ECF_RETURNS_TWICE)
9896 DECL_IS_RETURNS_TWICE (decl) = 1;
9897 if (flags & ECF_LEAF)
9898 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9899 NULL, DECL_ATTRIBUTES (decl));
9900 if (flags & ECF_COLD)
9901 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
9902 NULL, DECL_ATTRIBUTES (decl));
9903 if (flags & ECF_RET1)
9904 DECL_ATTRIBUTES (decl)
9905 = tree_cons (get_identifier ("fn spec"),
9906 build_tree_list (NULL_TREE, build_string (1, "1")),
9907 DECL_ATTRIBUTES (decl));
9908 if ((flags & ECF_TM_PURE) && flag_tm)
9909 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9910 /* Looping const or pure is implied by noreturn.
9911 There is currently no way to declare looping const or looping pure alone. */
9912 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9913 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9917 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9919 static void
9920 local_define_builtin (const char *name, tree type, enum built_in_function code,
9921 const char *library_name, int ecf_flags)
9923 tree decl;
9925 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9926 library_name, NULL_TREE);
9927 set_call_expr_flags (decl, ecf_flags);
9929 set_builtin_decl (code, decl, true);
9932 /* Call this function after instantiating all builtins that the language
9933 front end cares about. This will build the rest of the builtins
9934 and internal functions that are relied upon by the tree optimizers and
9935 the middle-end. */
9937 void
9938 build_common_builtin_nodes (void)
9940 tree tmp, ftype;
9941 int ecf_flags;
9943 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
9944 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
9946 ftype = build_function_type (void_type_node, void_list_node);
9947 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9948 local_define_builtin ("__builtin_unreachable", ftype,
9949 BUILT_IN_UNREACHABLE,
9950 "__builtin_unreachable",
9951 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9952 | ECF_CONST | ECF_COLD);
9953 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
9954 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
9955 "abort",
9956 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
9959 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9960 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9962 ftype = build_function_type_list (ptr_type_node,
9963 ptr_type_node, const_ptr_type_node,
9964 size_type_node, NULL_TREE);
9966 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9967 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9968 "memcpy", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
9969 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9970 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9971 "memmove", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
9974 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9976 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9977 const_ptr_type_node, size_type_node,
9978 NULL_TREE);
9979 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9980 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9983 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9985 ftype = build_function_type_list (ptr_type_node,
9986 ptr_type_node, integer_type_node,
9987 size_type_node, NULL_TREE);
9988 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9989 "memset", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
9992 /* If we're checking the stack, `alloca' can throw. */
9993 const int alloca_flags
9994 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
9996 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9998 ftype = build_function_type_list (ptr_type_node,
9999 size_type_node, NULL_TREE);
10000 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10001 "alloca", alloca_flags);
10004 ftype = build_function_type_list (ptr_type_node, size_type_node,
10005 size_type_node, NULL_TREE);
10006 local_define_builtin ("__builtin_alloca_with_align", ftype,
10007 BUILT_IN_ALLOCA_WITH_ALIGN,
10008 "__builtin_alloca_with_align",
10009 alloca_flags);
10011 ftype = build_function_type_list (ptr_type_node, size_type_node,
10012 size_type_node, size_type_node, NULL_TREE);
10013 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
10014 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
10015 "__builtin_alloca_with_align_and_max",
10016 alloca_flags);
10018 ftype = build_function_type_list (void_type_node,
10019 ptr_type_node, ptr_type_node,
10020 ptr_type_node, NULL_TREE);
10021 local_define_builtin ("__builtin_init_trampoline", ftype,
10022 BUILT_IN_INIT_TRAMPOLINE,
10023 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10024 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10025 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10026 "__builtin_init_heap_trampoline",
10027 ECF_NOTHROW | ECF_LEAF);
10028 local_define_builtin ("__builtin_init_descriptor", ftype,
10029 BUILT_IN_INIT_DESCRIPTOR,
10030 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
10032 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10033 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10034 BUILT_IN_ADJUST_TRAMPOLINE,
10035 "__builtin_adjust_trampoline",
10036 ECF_CONST | ECF_NOTHROW);
10037 local_define_builtin ("__builtin_adjust_descriptor", ftype,
10038 BUILT_IN_ADJUST_DESCRIPTOR,
10039 "__builtin_adjust_descriptor",
10040 ECF_CONST | ECF_NOTHROW);
10042 ftype = build_function_type_list (void_type_node,
10043 ptr_type_node, ptr_type_node, NULL_TREE);
10044 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10045 BUILT_IN_NONLOCAL_GOTO,
10046 "__builtin_nonlocal_goto",
10047 ECF_NORETURN | ECF_NOTHROW);
10049 ftype = build_function_type_list (void_type_node,
10050 ptr_type_node, ptr_type_node, NULL_TREE);
10051 local_define_builtin ("__builtin_setjmp_setup", ftype,
10052 BUILT_IN_SETJMP_SETUP,
10053 "__builtin_setjmp_setup", ECF_NOTHROW);
10055 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10056 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10057 BUILT_IN_SETJMP_RECEIVER,
10058 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10060 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10061 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10062 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10064 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10065 local_define_builtin ("__builtin_stack_restore", ftype,
10066 BUILT_IN_STACK_RESTORE,
10067 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10069 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10070 const_ptr_type_node, size_type_node,
10071 NULL_TREE);
10072 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
10073 "__builtin_memcmp_eq",
10074 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10076 /* If there's a possibility that we might use the ARM EABI, build the
10077 alternate __cxa_end_cleanup node used to resume from C++. */
10078 if (targetm.arm_eabi_unwinder)
10080 ftype = build_function_type_list (void_type_node, NULL_TREE);
10081 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10082 BUILT_IN_CXA_END_CLEANUP,
10083 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10086 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10087 local_define_builtin ("__builtin_unwind_resume", ftype,
10088 BUILT_IN_UNWIND_RESUME,
10089 ((targetm_common.except_unwind_info (&global_options)
10090 == UI_SJLJ)
10091 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10092 ECF_NORETURN);
10094 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10096 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10097 NULL_TREE);
10098 local_define_builtin ("__builtin_return_address", ftype,
10099 BUILT_IN_RETURN_ADDRESS,
10100 "__builtin_return_address",
10101 ECF_NOTHROW);
10104 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10105 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10107 ftype = build_function_type_list (void_type_node, ptr_type_node,
10108 ptr_type_node, NULL_TREE);
10109 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10110 local_define_builtin ("__cyg_profile_func_enter", ftype,
10111 BUILT_IN_PROFILE_FUNC_ENTER,
10112 "__cyg_profile_func_enter", 0);
10113 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10114 local_define_builtin ("__cyg_profile_func_exit", ftype,
10115 BUILT_IN_PROFILE_FUNC_EXIT,
10116 "__cyg_profile_func_exit", 0);
10119 /* The exception object and filter values from the runtime. The argument
10120 must be zero before exception lowering, i.e. from the front end. After
10121 exception lowering, it will be the region number for the exception
10122 landing pad. These functions are PURE instead of CONST to prevent
10123 them from being hoisted past the exception edge that will initialize
10124 its value in the landing pad. */
10125 ftype = build_function_type_list (ptr_type_node,
10126 integer_type_node, NULL_TREE);
10127 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10128 /* Only use TM_PURE if we have TM language support. */
10129 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10130 ecf_flags |= ECF_TM_PURE;
10131 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10132 "__builtin_eh_pointer", ecf_flags);
10134 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10135 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10136 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10137 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10139 ftype = build_function_type_list (void_type_node,
10140 integer_type_node, integer_type_node,
10141 NULL_TREE);
10142 local_define_builtin ("__builtin_eh_copy_values", ftype,
10143 BUILT_IN_EH_COPY_VALUES,
10144 "__builtin_eh_copy_values", ECF_NOTHROW);
10146 /* Complex multiplication and division. These are handled as builtins
10147 rather than optabs because emit_library_call_value doesn't support
10148 complex. Further, we can do slightly better with folding these
10149 beasties if the real and complex parts of the arguments are separate. */
10151 int mode;
10153 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10155 char mode_name_buf[4], *q;
10156 const char *p;
10157 enum built_in_function mcode, dcode;
10158 tree type, inner_type;
10159 const char *prefix = "__";
10161 if (targetm.libfunc_gnu_prefix)
10162 prefix = "__gnu_";
10164 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10165 if (type == NULL)
10166 continue;
10167 inner_type = TREE_TYPE (type);
10169 ftype = build_function_type_list (type, inner_type, inner_type,
10170 inner_type, inner_type, NULL_TREE);
10172 mcode = ((enum built_in_function)
10173 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10174 dcode = ((enum built_in_function)
10175 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10177 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10178 *q = TOLOWER (*p);
10179 *q = '\0';
10181 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10182 NULL);
10183 local_define_builtin (built_in_names[mcode], ftype, mcode,
10184 built_in_names[mcode],
10185 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10187 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10188 NULL);
10189 local_define_builtin (built_in_names[dcode], ftype, dcode,
10190 built_in_names[dcode],
10191 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10195 init_internal_fns ();
10198 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10199 better way.
10201 If we requested a pointer to a vector, build up the pointers that
10202 we stripped off while looking for the inner type. Similarly for
10203 return values from functions.
10205 The argument TYPE is the top of the chain, and BOTTOM is the
10206 new type which we will point to. */
10208 tree
10209 reconstruct_complex_type (tree type, tree bottom)
10211 tree inner, outer;
10213 if (TREE_CODE (type) == POINTER_TYPE)
10215 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10216 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10217 TYPE_REF_CAN_ALIAS_ALL (type));
10219 else if (TREE_CODE (type) == REFERENCE_TYPE)
10221 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10222 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10223 TYPE_REF_CAN_ALIAS_ALL (type));
10225 else if (TREE_CODE (type) == ARRAY_TYPE)
10227 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10228 outer = build_array_type (inner, TYPE_DOMAIN (type));
10230 else if (TREE_CODE (type) == FUNCTION_TYPE)
10232 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10233 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10235 else if (TREE_CODE (type) == METHOD_TYPE)
10237 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10238 /* The build_method_type_directly() routine prepends 'this' to argument list,
10239 so we must compensate by getting rid of it. */
10240 outer
10241 = build_method_type_directly
10242 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10243 inner,
10244 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10246 else if (TREE_CODE (type) == OFFSET_TYPE)
10248 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10249 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10251 else
10252 return bottom;
10254 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10255 TYPE_QUALS (type));
10258 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10259 the inner type. */
10260 tree
10261 build_vector_type_for_mode (tree innertype, machine_mode mode)
10263 int nunits;
10264 unsigned int bitsize;
10266 switch (GET_MODE_CLASS (mode))
10268 case MODE_VECTOR_INT:
10269 case MODE_VECTOR_FLOAT:
10270 case MODE_VECTOR_FRACT:
10271 case MODE_VECTOR_UFRACT:
10272 case MODE_VECTOR_ACCUM:
10273 case MODE_VECTOR_UACCUM:
10274 nunits = GET_MODE_NUNITS (mode);
10275 break;
10277 case MODE_INT:
10278 /* Check that there are no leftover bits. */
10279 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
10280 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10281 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10282 break;
10284 default:
10285 gcc_unreachable ();
10288 return make_vector_type (innertype, nunits, mode);
10291 /* Similarly, but takes the inner type and number of units, which must be
10292 a power of two. */
10294 tree
10295 build_vector_type (tree innertype, int nunits)
10297 return make_vector_type (innertype, nunits, VOIDmode);
10300 /* Build truth vector with specified length and number of units. */
10302 tree
10303 build_truth_vector_type (unsigned nunits, unsigned vector_size)
10305 machine_mode mask_mode
10306 = targetm.vectorize.get_mask_mode (nunits, vector_size).else_blk ();
10308 unsigned HOST_WIDE_INT vsize;
10309 if (mask_mode == BLKmode)
10310 vsize = vector_size * BITS_PER_UNIT;
10311 else
10312 vsize = GET_MODE_BITSIZE (mask_mode);
10314 unsigned HOST_WIDE_INT esize = vsize / nunits;
10315 gcc_assert (esize * nunits == vsize);
10317 tree bool_type = build_nonstandard_boolean_type (esize);
10319 return make_vector_type (bool_type, nunits, mask_mode);
10322 /* Returns a vector type corresponding to a comparison of VECTYPE. */
10324 tree
10325 build_same_sized_truth_vector_type (tree vectype)
10327 if (VECTOR_BOOLEAN_TYPE_P (vectype))
10328 return vectype;
10330 unsigned HOST_WIDE_INT size = GET_MODE_SIZE (TYPE_MODE (vectype));
10332 if (!size)
10333 size = tree_to_uhwi (TYPE_SIZE_UNIT (vectype));
10335 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (vectype), size);
10338 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10340 tree
10341 build_opaque_vector_type (tree innertype, int nunits)
10343 tree t = make_vector_type (innertype, nunits, VOIDmode);
10344 tree cand;
10345 /* We always build the non-opaque variant before the opaque one,
10346 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10347 cand = TYPE_NEXT_VARIANT (t);
10348 if (cand
10349 && TYPE_VECTOR_OPAQUE (cand)
10350 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10351 return cand;
10352 /* Othewise build a variant type and make sure to queue it after
10353 the non-opaque type. */
10354 cand = build_distinct_type_copy (t);
10355 TYPE_VECTOR_OPAQUE (cand) = true;
10356 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10357 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10358 TYPE_NEXT_VARIANT (t) = cand;
10359 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10360 return cand;
10363 /* Return the value of element I of VECTOR_CST T as a wide_int. */
10365 wide_int
10366 vector_cst_int_elt (const_tree t, unsigned int i)
10368 /* First handle elements that are directly encoded. */
10369 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10370 if (i < encoded_nelts)
10371 return wi::to_wide (VECTOR_CST_ENCODED_ELT (t, i));
10373 /* Identify the pattern that contains element I and work out the index of
10374 the last encoded element for that pattern. */
10375 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10376 unsigned int pattern = i % npatterns;
10377 unsigned int count = i / npatterns;
10378 unsigned int final_i = encoded_nelts - npatterns + pattern;
10380 /* If there are no steps, the final encoded value is the right one. */
10381 if (!VECTOR_CST_STEPPED_P (t))
10382 return wi::to_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
10384 /* Otherwise work out the value from the last two encoded elements. */
10385 tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
10386 tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
10387 wide_int diff = wi::to_wide (v2) - wi::to_wide (v1);
10388 return wi::to_wide (v2) + (count - 2) * diff;
10391 /* Return the value of element I of VECTOR_CST T. */
10393 tree
10394 vector_cst_elt (const_tree t, unsigned int i)
10396 /* First handle elements that are directly encoded. */
10397 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10398 if (i < encoded_nelts)
10399 return VECTOR_CST_ENCODED_ELT (t, i);
10401 /* If there are no steps, the final encoded value is the right one. */
10402 if (!VECTOR_CST_STEPPED_P (t))
10404 /* Identify the pattern that contains element I and work out the index of
10405 the last encoded element for that pattern. */
10406 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10407 unsigned int pattern = i % npatterns;
10408 unsigned int final_i = encoded_nelts - npatterns + pattern;
10409 return VECTOR_CST_ENCODED_ELT (t, final_i);
10412 /* Otherwise work out the value from the last two encoded elements. */
10413 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
10414 vector_cst_int_elt (t, i));
10417 /* Given an initializer INIT, return TRUE if INIT is zero or some
10418 aggregate of zeros. Otherwise return FALSE. */
10419 bool
10420 initializer_zerop (const_tree init)
10422 tree elt;
10424 STRIP_NOPS (init);
10426 switch (TREE_CODE (init))
10428 case INTEGER_CST:
10429 return integer_zerop (init);
10431 case REAL_CST:
10432 /* ??? Note that this is not correct for C4X float formats. There,
10433 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10434 negative exponent. */
10435 return real_zerop (init)
10436 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10438 case FIXED_CST:
10439 return fixed_zerop (init);
10441 case COMPLEX_CST:
10442 return integer_zerop (init)
10443 || (real_zerop (init)
10444 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10445 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10447 case VECTOR_CST:
10448 return (VECTOR_CST_NPATTERNS (init) == 1
10449 && VECTOR_CST_DUPLICATE_P (init)
10450 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)));
10452 case CONSTRUCTOR:
10454 unsigned HOST_WIDE_INT idx;
10456 if (TREE_CLOBBER_P (init))
10457 return false;
10458 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10459 if (!initializer_zerop (elt))
10460 return false;
10461 return true;
10464 case STRING_CST:
10466 int i;
10468 /* We need to loop through all elements to handle cases like
10469 "\0" and "\0foobar". */
10470 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10471 if (TREE_STRING_POINTER (init)[i] != '\0')
10472 return false;
10474 return true;
10477 default:
10478 return false;
10482 /* Check if vector VEC consists of all the equal elements and
10483 that the number of elements corresponds to the type of VEC.
10484 The function returns first element of the vector
10485 or NULL_TREE if the vector is not uniform. */
10486 tree
10487 uniform_vector_p (const_tree vec)
10489 tree first, t;
10490 unsigned i;
10492 if (vec == NULL_TREE)
10493 return NULL_TREE;
10495 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10497 if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
10498 return TREE_OPERAND (vec, 0);
10500 else if (TREE_CODE (vec) == VECTOR_CST)
10502 if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
10503 return VECTOR_CST_ENCODED_ELT (vec, 0);
10504 return NULL_TREE;
10507 else if (TREE_CODE (vec) == CONSTRUCTOR)
10509 first = error_mark_node;
10511 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10513 if (i == 0)
10515 first = t;
10516 continue;
10518 if (!operand_equal_p (first, t, 0))
10519 return NULL_TREE;
10521 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10522 return NULL_TREE;
10524 return first;
10527 return NULL_TREE;
10530 /* Build an empty statement at location LOC. */
10532 tree
10533 build_empty_stmt (location_t loc)
10535 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10536 SET_EXPR_LOCATION (t, loc);
10537 return t;
10541 /* Build an OpenMP clause with code CODE. LOC is the location of the
10542 clause. */
10544 tree
10545 build_omp_clause (location_t loc, enum omp_clause_code code)
10547 tree t;
10548 int size, length;
10550 length = omp_clause_num_ops[code];
10551 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10553 record_node_allocation_statistics (OMP_CLAUSE, size);
10555 t = (tree) ggc_internal_alloc (size);
10556 memset (t, 0, size);
10557 TREE_SET_CODE (t, OMP_CLAUSE);
10558 OMP_CLAUSE_SET_CODE (t, code);
10559 OMP_CLAUSE_LOCATION (t) = loc;
10561 return t;
10564 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10565 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10566 Except for the CODE and operand count field, other storage for the
10567 object is initialized to zeros. */
10569 tree
10570 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
10572 tree t;
10573 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10575 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10576 gcc_assert (len >= 1);
10578 record_node_allocation_statistics (code, length);
10580 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10582 TREE_SET_CODE (t, code);
10584 /* Can't use TREE_OPERAND to store the length because if checking is
10585 enabled, it will try to check the length before we store it. :-P */
10586 t->exp.operands[0] = build_int_cst (sizetype, len);
10588 return t;
10591 /* Helper function for build_call_* functions; build a CALL_EXPR with
10592 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10593 the argument slots. */
10595 static tree
10596 build_call_1 (tree return_type, tree fn, int nargs)
10598 tree t;
10600 t = build_vl_exp (CALL_EXPR, nargs + 3);
10601 TREE_TYPE (t) = return_type;
10602 CALL_EXPR_FN (t) = fn;
10603 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10605 return t;
10608 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10609 FN and a null static chain slot. NARGS is the number of call arguments
10610 which are specified as "..." arguments. */
10612 tree
10613 build_call_nary (tree return_type, tree fn, int nargs, ...)
10615 tree ret;
10616 va_list args;
10617 va_start (args, nargs);
10618 ret = build_call_valist (return_type, fn, nargs, args);
10619 va_end (args);
10620 return ret;
10623 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10624 FN and a null static chain slot. NARGS is the number of call arguments
10625 which are specified as a va_list ARGS. */
10627 tree
10628 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10630 tree t;
10631 int i;
10633 t = build_call_1 (return_type, fn, nargs);
10634 for (i = 0; i < nargs; i++)
10635 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10636 process_call_operands (t);
10637 return t;
10640 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10641 FN and a null static chain slot. NARGS is the number of call arguments
10642 which are specified as a tree array ARGS. */
10644 tree
10645 build_call_array_loc (location_t loc, tree return_type, tree fn,
10646 int nargs, const tree *args)
10648 tree t;
10649 int i;
10651 t = build_call_1 (return_type, fn, nargs);
10652 for (i = 0; i < nargs; i++)
10653 CALL_EXPR_ARG (t, i) = args[i];
10654 process_call_operands (t);
10655 SET_EXPR_LOCATION (t, loc);
10656 return t;
10659 /* Like build_call_array, but takes a vec. */
10661 tree
10662 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10664 tree ret, t;
10665 unsigned int ix;
10667 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10668 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10669 CALL_EXPR_ARG (ret, ix) = t;
10670 process_call_operands (ret);
10671 return ret;
10674 /* Conveniently construct a function call expression. FNDECL names the
10675 function to be called and N arguments are passed in the array
10676 ARGARRAY. */
10678 tree
10679 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10681 tree fntype = TREE_TYPE (fndecl);
10682 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10684 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10687 /* Conveniently construct a function call expression. FNDECL names the
10688 function to be called and the arguments are passed in the vector
10689 VEC. */
10691 tree
10692 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10694 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10695 vec_safe_address (vec));
10699 /* Conveniently construct a function call expression. FNDECL names the
10700 function to be called, N is the number of arguments, and the "..."
10701 parameters are the argument expressions. */
10703 tree
10704 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10706 va_list ap;
10707 tree *argarray = XALLOCAVEC (tree, n);
10708 int i;
10710 va_start (ap, n);
10711 for (i = 0; i < n; i++)
10712 argarray[i] = va_arg (ap, tree);
10713 va_end (ap);
10714 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10717 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10718 varargs macros aren't supported by all bootstrap compilers. */
10720 tree
10721 build_call_expr (tree fndecl, int n, ...)
10723 va_list ap;
10724 tree *argarray = XALLOCAVEC (tree, n);
10725 int i;
10727 va_start (ap, n);
10728 for (i = 0; i < n; i++)
10729 argarray[i] = va_arg (ap, tree);
10730 va_end (ap);
10731 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10734 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10735 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10736 It will get gimplified later into an ordinary internal function. */
10738 tree
10739 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
10740 tree type, int n, const tree *args)
10742 tree t = build_call_1 (type, NULL_TREE, n);
10743 for (int i = 0; i < n; ++i)
10744 CALL_EXPR_ARG (t, i) = args[i];
10745 SET_EXPR_LOCATION (t, loc);
10746 CALL_EXPR_IFN (t) = ifn;
10747 return t;
10750 /* Build internal call expression. This is just like CALL_EXPR, except
10751 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10752 internal function. */
10754 tree
10755 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10756 tree type, int n, ...)
10758 va_list ap;
10759 tree *argarray = XALLOCAVEC (tree, n);
10760 int i;
10762 va_start (ap, n);
10763 for (i = 0; i < n; i++)
10764 argarray[i] = va_arg (ap, tree);
10765 va_end (ap);
10766 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10769 /* Return a function call to FN, if the target is guaranteed to support it,
10770 or null otherwise.
10772 N is the number of arguments, passed in the "...", and TYPE is the
10773 type of the return value. */
10775 tree
10776 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
10777 int n, ...)
10779 va_list ap;
10780 tree *argarray = XALLOCAVEC (tree, n);
10781 int i;
10783 va_start (ap, n);
10784 for (i = 0; i < n; i++)
10785 argarray[i] = va_arg (ap, tree);
10786 va_end (ap);
10787 if (internal_fn_p (fn))
10789 internal_fn ifn = as_internal_fn (fn);
10790 if (direct_internal_fn_p (ifn))
10792 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
10793 if (!direct_internal_fn_supported_p (ifn, types,
10794 OPTIMIZE_FOR_BOTH))
10795 return NULL_TREE;
10797 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10799 else
10801 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
10802 if (!fndecl)
10803 return NULL_TREE;
10804 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10808 /* Return a function call to the appropriate builtin alloca variant.
10810 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
10811 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
10812 bound for SIZE in case it is not a fixed value. */
10814 tree
10815 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
10817 if (max_size >= 0)
10819 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
10820 return
10821 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
10823 else if (align > 0)
10825 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10826 return build_call_expr (t, 2, size, size_int (align));
10828 else
10830 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
10831 return build_call_expr (t, 1, size);
10835 /* Create a new constant string literal and return a char* pointer to it.
10836 The STRING_CST value is the LEN characters at STR. */
10837 tree
10838 build_string_literal (int len, const char *str)
10840 tree t, elem, index, type;
10842 t = build_string (len, str);
10843 elem = build_type_variant (char_type_node, 1, 0);
10844 index = build_index_type (size_int (len - 1));
10845 type = build_array_type (elem, index);
10846 TREE_TYPE (t) = type;
10847 TREE_CONSTANT (t) = 1;
10848 TREE_READONLY (t) = 1;
10849 TREE_STATIC (t) = 1;
10851 type = build_pointer_type (elem);
10852 t = build1 (ADDR_EXPR, type,
10853 build4 (ARRAY_REF, elem,
10854 t, integer_zero_node, NULL_TREE, NULL_TREE));
10855 return t;
10860 /* Return true if T (assumed to be a DECL) must be assigned a memory
10861 location. */
10863 bool
10864 needs_to_live_in_memory (const_tree t)
10866 return (TREE_ADDRESSABLE (t)
10867 || is_global_var (t)
10868 || (TREE_CODE (t) == RESULT_DECL
10869 && !DECL_BY_REFERENCE (t)
10870 && aggregate_value_p (t, current_function_decl)));
10873 /* Return value of a constant X and sign-extend it. */
10875 HOST_WIDE_INT
10876 int_cst_value (const_tree x)
10878 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10879 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10881 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10882 gcc_assert (cst_and_fits_in_hwi (x));
10884 if (bits < HOST_BITS_PER_WIDE_INT)
10886 bool negative = ((val >> (bits - 1)) & 1) != 0;
10887 if (negative)
10888 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
10889 else
10890 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
10893 return val;
10896 /* If TYPE is an integral or pointer type, return an integer type with
10897 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10898 if TYPE is already an integer type of signedness UNSIGNEDP. */
10900 tree
10901 signed_or_unsigned_type_for (int unsignedp, tree type)
10903 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10904 return type;
10906 if (TREE_CODE (type) == VECTOR_TYPE)
10908 tree inner = TREE_TYPE (type);
10909 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10910 if (!inner2)
10911 return NULL_TREE;
10912 if (inner == inner2)
10913 return type;
10914 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10917 if (!INTEGRAL_TYPE_P (type)
10918 && !POINTER_TYPE_P (type)
10919 && TREE_CODE (type) != OFFSET_TYPE)
10920 return NULL_TREE;
10922 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10925 /* If TYPE is an integral or pointer type, return an integer type with
10926 the same precision which is unsigned, or itself if TYPE is already an
10927 unsigned integer type. */
10929 tree
10930 unsigned_type_for (tree type)
10932 return signed_or_unsigned_type_for (1, type);
10935 /* If TYPE is an integral or pointer type, return an integer type with
10936 the same precision which is signed, or itself if TYPE is already a
10937 signed integer type. */
10939 tree
10940 signed_type_for (tree type)
10942 return signed_or_unsigned_type_for (0, type);
10945 /* If TYPE is a vector type, return a signed integer vector type with the
10946 same width and number of subparts. Otherwise return boolean_type_node. */
10948 tree
10949 truth_type_for (tree type)
10951 if (TREE_CODE (type) == VECTOR_TYPE)
10953 if (VECTOR_BOOLEAN_TYPE_P (type))
10954 return type;
10955 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (type),
10956 GET_MODE_SIZE (TYPE_MODE (type)));
10958 else
10959 return boolean_type_node;
10962 /* Returns the largest value obtainable by casting something in INNER type to
10963 OUTER type. */
10965 tree
10966 upper_bound_in_type (tree outer, tree inner)
10968 unsigned int det = 0;
10969 unsigned oprec = TYPE_PRECISION (outer);
10970 unsigned iprec = TYPE_PRECISION (inner);
10971 unsigned prec;
10973 /* Compute a unique number for every combination. */
10974 det |= (oprec > iprec) ? 4 : 0;
10975 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10976 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10978 /* Determine the exponent to use. */
10979 switch (det)
10981 case 0:
10982 case 1:
10983 /* oprec <= iprec, outer: signed, inner: don't care. */
10984 prec = oprec - 1;
10985 break;
10986 case 2:
10987 case 3:
10988 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10989 prec = oprec;
10990 break;
10991 case 4:
10992 /* oprec > iprec, outer: signed, inner: signed. */
10993 prec = iprec - 1;
10994 break;
10995 case 5:
10996 /* oprec > iprec, outer: signed, inner: unsigned. */
10997 prec = iprec;
10998 break;
10999 case 6:
11000 /* oprec > iprec, outer: unsigned, inner: signed. */
11001 prec = oprec;
11002 break;
11003 case 7:
11004 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11005 prec = iprec;
11006 break;
11007 default:
11008 gcc_unreachable ();
11011 return wide_int_to_tree (outer,
11012 wi::mask (prec, false, TYPE_PRECISION (outer)));
11015 /* Returns the smallest value obtainable by casting something in INNER type to
11016 OUTER type. */
11018 tree
11019 lower_bound_in_type (tree outer, tree inner)
11021 unsigned oprec = TYPE_PRECISION (outer);
11022 unsigned iprec = TYPE_PRECISION (inner);
11024 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11025 and obtain 0. */
11026 if (TYPE_UNSIGNED (outer)
11027 /* If we are widening something of an unsigned type, OUTER type
11028 contains all values of INNER type. In particular, both INNER
11029 and OUTER types have zero in common. */
11030 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11031 return build_int_cst (outer, 0);
11032 else
11034 /* If we are widening a signed type to another signed type, we
11035 want to obtain -2^^(iprec-1). If we are keeping the
11036 precision or narrowing to a signed type, we want to obtain
11037 -2^(oprec-1). */
11038 unsigned prec = oprec > iprec ? iprec : oprec;
11039 return wide_int_to_tree (outer,
11040 wi::mask (prec - 1, true,
11041 TYPE_PRECISION (outer)));
11045 /* Return nonzero if two operands that are suitable for PHI nodes are
11046 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11047 SSA_NAME or invariant. Note that this is strictly an optimization.
11048 That is, callers of this function can directly call operand_equal_p
11049 and get the same result, only slower. */
11052 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11054 if (arg0 == arg1)
11055 return 1;
11056 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11057 return 0;
11058 return operand_equal_p (arg0, arg1, 0);
11061 /* Returns number of zeros at the end of binary representation of X. */
11063 tree
11064 num_ending_zeros (const_tree x)
11066 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
11070 #define WALK_SUBTREE(NODE) \
11071 do \
11073 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11074 if (result) \
11075 return result; \
11077 while (0)
11079 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11080 be walked whenever a type is seen in the tree. Rest of operands and return
11081 value are as for walk_tree. */
11083 static tree
11084 walk_type_fields (tree type, walk_tree_fn func, void *data,
11085 hash_set<tree> *pset, walk_tree_lh lh)
11087 tree result = NULL_TREE;
11089 switch (TREE_CODE (type))
11091 case POINTER_TYPE:
11092 case REFERENCE_TYPE:
11093 case VECTOR_TYPE:
11094 /* We have to worry about mutually recursive pointers. These can't
11095 be written in C. They can in Ada. It's pathological, but
11096 there's an ACATS test (c38102a) that checks it. Deal with this
11097 by checking if we're pointing to another pointer, that one
11098 points to another pointer, that one does too, and we have no htab.
11099 If so, get a hash table. We check three levels deep to avoid
11100 the cost of the hash table if we don't need one. */
11101 if (POINTER_TYPE_P (TREE_TYPE (type))
11102 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11103 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11104 && !pset)
11106 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11107 func, data);
11108 if (result)
11109 return result;
11111 break;
11114 /* fall through */
11116 case COMPLEX_TYPE:
11117 WALK_SUBTREE (TREE_TYPE (type));
11118 break;
11120 case METHOD_TYPE:
11121 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11123 /* Fall through. */
11125 case FUNCTION_TYPE:
11126 WALK_SUBTREE (TREE_TYPE (type));
11128 tree arg;
11130 /* We never want to walk into default arguments. */
11131 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11132 WALK_SUBTREE (TREE_VALUE (arg));
11134 break;
11136 case ARRAY_TYPE:
11137 /* Don't follow this nodes's type if a pointer for fear that
11138 we'll have infinite recursion. If we have a PSET, then we
11139 need not fear. */
11140 if (pset
11141 || (!POINTER_TYPE_P (TREE_TYPE (type))
11142 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11143 WALK_SUBTREE (TREE_TYPE (type));
11144 WALK_SUBTREE (TYPE_DOMAIN (type));
11145 break;
11147 case OFFSET_TYPE:
11148 WALK_SUBTREE (TREE_TYPE (type));
11149 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11150 break;
11152 default:
11153 break;
11156 return NULL_TREE;
11159 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11160 called with the DATA and the address of each sub-tree. If FUNC returns a
11161 non-NULL value, the traversal is stopped, and the value returned by FUNC
11162 is returned. If PSET is non-NULL it is used to record the nodes visited,
11163 and to avoid visiting a node more than once. */
11165 tree
11166 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11167 hash_set<tree> *pset, walk_tree_lh lh)
11169 enum tree_code code;
11170 int walk_subtrees;
11171 tree result;
11173 #define WALK_SUBTREE_TAIL(NODE) \
11174 do \
11176 tp = & (NODE); \
11177 goto tail_recurse; \
11179 while (0)
11181 tail_recurse:
11182 /* Skip empty subtrees. */
11183 if (!*tp)
11184 return NULL_TREE;
11186 /* Don't walk the same tree twice, if the user has requested
11187 that we avoid doing so. */
11188 if (pset && pset->add (*tp))
11189 return NULL_TREE;
11191 /* Call the function. */
11192 walk_subtrees = 1;
11193 result = (*func) (tp, &walk_subtrees, data);
11195 /* If we found something, return it. */
11196 if (result)
11197 return result;
11199 code = TREE_CODE (*tp);
11201 /* Even if we didn't, FUNC may have decided that there was nothing
11202 interesting below this point in the tree. */
11203 if (!walk_subtrees)
11205 /* But we still need to check our siblings. */
11206 if (code == TREE_LIST)
11207 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11208 else if (code == OMP_CLAUSE)
11209 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11210 else
11211 return NULL_TREE;
11214 if (lh)
11216 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11217 if (result || !walk_subtrees)
11218 return result;
11221 switch (code)
11223 case ERROR_MARK:
11224 case IDENTIFIER_NODE:
11225 case INTEGER_CST:
11226 case REAL_CST:
11227 case FIXED_CST:
11228 case VECTOR_CST:
11229 case STRING_CST:
11230 case BLOCK:
11231 case PLACEHOLDER_EXPR:
11232 case SSA_NAME:
11233 case FIELD_DECL:
11234 case RESULT_DECL:
11235 /* None of these have subtrees other than those already walked
11236 above. */
11237 break;
11239 case TREE_LIST:
11240 WALK_SUBTREE (TREE_VALUE (*tp));
11241 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11242 break;
11244 case TREE_VEC:
11246 int len = TREE_VEC_LENGTH (*tp);
11248 if (len == 0)
11249 break;
11251 /* Walk all elements but the first. */
11252 while (--len)
11253 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11255 /* Now walk the first one as a tail call. */
11256 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11259 case COMPLEX_CST:
11260 WALK_SUBTREE (TREE_REALPART (*tp));
11261 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11263 case CONSTRUCTOR:
11265 unsigned HOST_WIDE_INT idx;
11266 constructor_elt *ce;
11268 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11269 idx++)
11270 WALK_SUBTREE (ce->value);
11272 break;
11274 case SAVE_EXPR:
11275 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11277 case BIND_EXPR:
11279 tree decl;
11280 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11282 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11283 into declarations that are just mentioned, rather than
11284 declared; they don't really belong to this part of the tree.
11285 And, we can see cycles: the initializer for a declaration
11286 can refer to the declaration itself. */
11287 WALK_SUBTREE (DECL_INITIAL (decl));
11288 WALK_SUBTREE (DECL_SIZE (decl));
11289 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11291 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11294 case STATEMENT_LIST:
11296 tree_stmt_iterator i;
11297 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11298 WALK_SUBTREE (*tsi_stmt_ptr (i));
11300 break;
11302 case OMP_CLAUSE:
11303 switch (OMP_CLAUSE_CODE (*tp))
11305 case OMP_CLAUSE_GANG:
11306 case OMP_CLAUSE__GRIDDIM_:
11307 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11308 /* FALLTHRU */
11310 case OMP_CLAUSE_ASYNC:
11311 case OMP_CLAUSE_WAIT:
11312 case OMP_CLAUSE_WORKER:
11313 case OMP_CLAUSE_VECTOR:
11314 case OMP_CLAUSE_NUM_GANGS:
11315 case OMP_CLAUSE_NUM_WORKERS:
11316 case OMP_CLAUSE_VECTOR_LENGTH:
11317 case OMP_CLAUSE_PRIVATE:
11318 case OMP_CLAUSE_SHARED:
11319 case OMP_CLAUSE_FIRSTPRIVATE:
11320 case OMP_CLAUSE_COPYIN:
11321 case OMP_CLAUSE_COPYPRIVATE:
11322 case OMP_CLAUSE_FINAL:
11323 case OMP_CLAUSE_IF:
11324 case OMP_CLAUSE_NUM_THREADS:
11325 case OMP_CLAUSE_SCHEDULE:
11326 case OMP_CLAUSE_UNIFORM:
11327 case OMP_CLAUSE_DEPEND:
11328 case OMP_CLAUSE_NUM_TEAMS:
11329 case OMP_CLAUSE_THREAD_LIMIT:
11330 case OMP_CLAUSE_DEVICE:
11331 case OMP_CLAUSE_DIST_SCHEDULE:
11332 case OMP_CLAUSE_SAFELEN:
11333 case OMP_CLAUSE_SIMDLEN:
11334 case OMP_CLAUSE_ORDERED:
11335 case OMP_CLAUSE_PRIORITY:
11336 case OMP_CLAUSE_GRAINSIZE:
11337 case OMP_CLAUSE_NUM_TASKS:
11338 case OMP_CLAUSE_HINT:
11339 case OMP_CLAUSE_TO_DECLARE:
11340 case OMP_CLAUSE_LINK:
11341 case OMP_CLAUSE_USE_DEVICE_PTR:
11342 case OMP_CLAUSE_IS_DEVICE_PTR:
11343 case OMP_CLAUSE__LOOPTEMP_:
11344 case OMP_CLAUSE__SIMDUID_:
11345 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11346 /* FALLTHRU */
11348 case OMP_CLAUSE_INDEPENDENT:
11349 case OMP_CLAUSE_NOWAIT:
11350 case OMP_CLAUSE_DEFAULT:
11351 case OMP_CLAUSE_UNTIED:
11352 case OMP_CLAUSE_MERGEABLE:
11353 case OMP_CLAUSE_PROC_BIND:
11354 case OMP_CLAUSE_INBRANCH:
11355 case OMP_CLAUSE_NOTINBRANCH:
11356 case OMP_CLAUSE_FOR:
11357 case OMP_CLAUSE_PARALLEL:
11358 case OMP_CLAUSE_SECTIONS:
11359 case OMP_CLAUSE_TASKGROUP:
11360 case OMP_CLAUSE_NOGROUP:
11361 case OMP_CLAUSE_THREADS:
11362 case OMP_CLAUSE_SIMD:
11363 case OMP_CLAUSE_DEFAULTMAP:
11364 case OMP_CLAUSE_AUTO:
11365 case OMP_CLAUSE_SEQ:
11366 case OMP_CLAUSE_TILE:
11367 case OMP_CLAUSE__SIMT_:
11368 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11370 case OMP_CLAUSE_LASTPRIVATE:
11371 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11372 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11373 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11375 case OMP_CLAUSE_COLLAPSE:
11377 int i;
11378 for (i = 0; i < 3; i++)
11379 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11380 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11383 case OMP_CLAUSE_LINEAR:
11384 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11385 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11386 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11387 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11389 case OMP_CLAUSE_ALIGNED:
11390 case OMP_CLAUSE_FROM:
11391 case OMP_CLAUSE_TO:
11392 case OMP_CLAUSE_MAP:
11393 case OMP_CLAUSE__CACHE_:
11394 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11395 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11396 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11398 case OMP_CLAUSE_REDUCTION:
11400 int i;
11401 for (i = 0; i < 5; i++)
11402 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11403 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11406 default:
11407 gcc_unreachable ();
11409 break;
11411 case TARGET_EXPR:
11413 int i, len;
11415 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11416 But, we only want to walk once. */
11417 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11418 for (i = 0; i < len; ++i)
11419 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11420 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11423 case DECL_EXPR:
11424 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11425 defining. We only want to walk into these fields of a type in this
11426 case and not in the general case of a mere reference to the type.
11428 The criterion is as follows: if the field can be an expression, it
11429 must be walked only here. This should be in keeping with the fields
11430 that are directly gimplified in gimplify_type_sizes in order for the
11431 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11432 variable-sized types.
11434 Note that DECLs get walked as part of processing the BIND_EXPR. */
11435 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11437 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11438 if (TREE_CODE (*type_p) == ERROR_MARK)
11439 return NULL_TREE;
11441 /* Call the function for the type. See if it returns anything or
11442 doesn't want us to continue. If we are to continue, walk both
11443 the normal fields and those for the declaration case. */
11444 result = (*func) (type_p, &walk_subtrees, data);
11445 if (result || !walk_subtrees)
11446 return result;
11448 /* But do not walk a pointed-to type since it may itself need to
11449 be walked in the declaration case if it isn't anonymous. */
11450 if (!POINTER_TYPE_P (*type_p))
11452 result = walk_type_fields (*type_p, func, data, pset, lh);
11453 if (result)
11454 return result;
11457 /* If this is a record type, also walk the fields. */
11458 if (RECORD_OR_UNION_TYPE_P (*type_p))
11460 tree field;
11462 for (field = TYPE_FIELDS (*type_p); field;
11463 field = DECL_CHAIN (field))
11465 /* We'd like to look at the type of the field, but we can
11466 easily get infinite recursion. So assume it's pointed
11467 to elsewhere in the tree. Also, ignore things that
11468 aren't fields. */
11469 if (TREE_CODE (field) != FIELD_DECL)
11470 continue;
11472 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11473 WALK_SUBTREE (DECL_SIZE (field));
11474 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11475 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11476 WALK_SUBTREE (DECL_QUALIFIER (field));
11480 /* Same for scalar types. */
11481 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11482 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11483 || TREE_CODE (*type_p) == INTEGER_TYPE
11484 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11485 || TREE_CODE (*type_p) == REAL_TYPE)
11487 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11488 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11491 WALK_SUBTREE (TYPE_SIZE (*type_p));
11492 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11494 /* FALLTHRU */
11496 default:
11497 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11499 int i, len;
11501 /* Walk over all the sub-trees of this operand. */
11502 len = TREE_OPERAND_LENGTH (*tp);
11504 /* Go through the subtrees. We need to do this in forward order so
11505 that the scope of a FOR_EXPR is handled properly. */
11506 if (len)
11508 for (i = 0; i < len - 1; ++i)
11509 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11510 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11513 /* If this is a type, walk the needed fields in the type. */
11514 else if (TYPE_P (*tp))
11515 return walk_type_fields (*tp, func, data, pset, lh);
11516 break;
11519 /* We didn't find what we were looking for. */
11520 return NULL_TREE;
11522 #undef WALK_SUBTREE_TAIL
11524 #undef WALK_SUBTREE
11526 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11528 tree
11529 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11530 walk_tree_lh lh)
11532 tree result;
11534 hash_set<tree> pset;
11535 result = walk_tree_1 (tp, func, data, &pset, lh);
11536 return result;
11540 tree
11541 tree_block (tree t)
11543 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11545 if (IS_EXPR_CODE_CLASS (c))
11546 return LOCATION_BLOCK (t->exp.locus);
11547 gcc_unreachable ();
11548 return NULL;
11551 void
11552 tree_set_block (tree t, tree b)
11554 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11556 if (IS_EXPR_CODE_CLASS (c))
11558 t->exp.locus = set_block (t->exp.locus, b);
11560 else
11561 gcc_unreachable ();
11564 /* Create a nameless artificial label and put it in the current
11565 function context. The label has a location of LOC. Returns the
11566 newly created label. */
11568 tree
11569 create_artificial_label (location_t loc)
11571 tree lab = build_decl (loc,
11572 LABEL_DECL, NULL_TREE, void_type_node);
11574 DECL_ARTIFICIAL (lab) = 1;
11575 DECL_IGNORED_P (lab) = 1;
11576 DECL_CONTEXT (lab) = current_function_decl;
11577 return lab;
11580 /* Given a tree, try to return a useful variable name that we can use
11581 to prefix a temporary that is being assigned the value of the tree.
11582 I.E. given <temp> = &A, return A. */
11584 const char *
11585 get_name (tree t)
11587 tree stripped_decl;
11589 stripped_decl = t;
11590 STRIP_NOPS (stripped_decl);
11591 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11592 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11593 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11595 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11596 if (!name)
11597 return NULL;
11598 return IDENTIFIER_POINTER (name);
11600 else
11602 switch (TREE_CODE (stripped_decl))
11604 case ADDR_EXPR:
11605 return get_name (TREE_OPERAND (stripped_decl, 0));
11606 default:
11607 return NULL;
11612 /* Return true if TYPE has a variable argument list. */
11614 bool
11615 stdarg_p (const_tree fntype)
11617 function_args_iterator args_iter;
11618 tree n = NULL_TREE, t;
11620 if (!fntype)
11621 return false;
11623 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11625 n = t;
11628 return n != NULL_TREE && n != void_type_node;
11631 /* Return true if TYPE has a prototype. */
11633 bool
11634 prototype_p (const_tree fntype)
11636 tree t;
11638 gcc_assert (fntype != NULL_TREE);
11640 t = TYPE_ARG_TYPES (fntype);
11641 return (t != NULL_TREE);
11644 /* If BLOCK is inlined from an __attribute__((__artificial__))
11645 routine, return pointer to location from where it has been
11646 called. */
11647 location_t *
11648 block_nonartificial_location (tree block)
11650 location_t *ret = NULL;
11652 while (block && TREE_CODE (block) == BLOCK
11653 && BLOCK_ABSTRACT_ORIGIN (block))
11655 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11657 while (TREE_CODE (ao) == BLOCK
11658 && BLOCK_ABSTRACT_ORIGIN (ao)
11659 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11660 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11662 if (TREE_CODE (ao) == FUNCTION_DECL)
11664 /* If AO is an artificial inline, point RET to the
11665 call site locus at which it has been inlined and continue
11666 the loop, in case AO's caller is also an artificial
11667 inline. */
11668 if (DECL_DECLARED_INLINE_P (ao)
11669 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11670 ret = &BLOCK_SOURCE_LOCATION (block);
11671 else
11672 break;
11674 else if (TREE_CODE (ao) != BLOCK)
11675 break;
11677 block = BLOCK_SUPERCONTEXT (block);
11679 return ret;
11683 /* If EXP is inlined from an __attribute__((__artificial__))
11684 function, return the location of the original call expression. */
11686 location_t
11687 tree_nonartificial_location (tree exp)
11689 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11691 if (loc)
11692 return *loc;
11693 else
11694 return EXPR_LOCATION (exp);
11698 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11699 nodes. */
11701 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11703 hashval_t
11704 cl_option_hasher::hash (tree x)
11706 const_tree const t = x;
11707 const char *p;
11708 size_t i;
11709 size_t len = 0;
11710 hashval_t hash = 0;
11712 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11714 p = (const char *)TREE_OPTIMIZATION (t);
11715 len = sizeof (struct cl_optimization);
11718 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11719 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11721 else
11722 gcc_unreachable ();
11724 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11725 something else. */
11726 for (i = 0; i < len; i++)
11727 if (p[i])
11728 hash = (hash << 4) ^ ((i << 2) | p[i]);
11730 return hash;
11733 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11734 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11735 same. */
11737 bool
11738 cl_option_hasher::equal (tree x, tree y)
11740 const_tree const xt = x;
11741 const_tree const yt = y;
11742 const char *xp;
11743 const char *yp;
11744 size_t len;
11746 if (TREE_CODE (xt) != TREE_CODE (yt))
11747 return 0;
11749 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11751 xp = (const char *)TREE_OPTIMIZATION (xt);
11752 yp = (const char *)TREE_OPTIMIZATION (yt);
11753 len = sizeof (struct cl_optimization);
11756 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11758 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11759 TREE_TARGET_OPTION (yt));
11762 else
11763 gcc_unreachable ();
11765 return (memcmp (xp, yp, len) == 0);
11768 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11770 tree
11771 build_optimization_node (struct gcc_options *opts)
11773 tree t;
11775 /* Use the cache of optimization nodes. */
11777 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11778 opts);
11780 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11781 t = *slot;
11782 if (!t)
11784 /* Insert this one into the hash table. */
11785 t = cl_optimization_node;
11786 *slot = t;
11788 /* Make a new node for next time round. */
11789 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11792 return t;
11795 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11797 tree
11798 build_target_option_node (struct gcc_options *opts)
11800 tree t;
11802 /* Use the cache of optimization nodes. */
11804 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11805 opts);
11807 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11808 t = *slot;
11809 if (!t)
11811 /* Insert this one into the hash table. */
11812 t = cl_target_option_node;
11813 *slot = t;
11815 /* Make a new node for next time round. */
11816 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11819 return t;
11822 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11823 so that they aren't saved during PCH writing. */
11825 void
11826 prepare_target_option_nodes_for_pch (void)
11828 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11829 for (; iter != cl_option_hash_table->end (); ++iter)
11830 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11831 TREE_TARGET_GLOBALS (*iter) = NULL;
11834 /* Determine the "ultimate origin" of a block. The block may be an inlined
11835 instance of an inlined instance of a block which is local to an inline
11836 function, so we have to trace all of the way back through the origin chain
11837 to find out what sort of node actually served as the original seed for the
11838 given block. */
11840 tree
11841 block_ultimate_origin (const_tree block)
11843 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11845 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
11846 we're trying to output the abstract instance of this function. */
11847 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11848 return NULL_TREE;
11850 if (immediate_origin == NULL_TREE)
11851 return NULL_TREE;
11852 else
11854 tree ret_val;
11855 tree lookahead = immediate_origin;
11859 ret_val = lookahead;
11860 lookahead = (TREE_CODE (ret_val) == BLOCK
11861 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11863 while (lookahead != NULL && lookahead != ret_val);
11865 /* The block's abstract origin chain may not be the *ultimate* origin of
11866 the block. It could lead to a DECL that has an abstract origin set.
11867 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11868 will give us if it has one). Note that DECL's abstract origins are
11869 supposed to be the most distant ancestor (or so decl_ultimate_origin
11870 claims), so we don't need to loop following the DECL origins. */
11871 if (DECL_P (ret_val))
11872 return DECL_ORIGIN (ret_val);
11874 return ret_val;
11878 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11879 no instruction. */
11881 bool
11882 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11884 /* Do not strip casts into or out of differing address spaces. */
11885 if (POINTER_TYPE_P (outer_type)
11886 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
11888 if (!POINTER_TYPE_P (inner_type)
11889 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
11890 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
11891 return false;
11893 else if (POINTER_TYPE_P (inner_type)
11894 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
11896 /* We already know that outer_type is not a pointer with
11897 a non-generic address space. */
11898 return false;
11901 /* Use precision rather then machine mode when we can, which gives
11902 the correct answer even for submode (bit-field) types. */
11903 if ((INTEGRAL_TYPE_P (outer_type)
11904 || POINTER_TYPE_P (outer_type)
11905 || TREE_CODE (outer_type) == OFFSET_TYPE)
11906 && (INTEGRAL_TYPE_P (inner_type)
11907 || POINTER_TYPE_P (inner_type)
11908 || TREE_CODE (inner_type) == OFFSET_TYPE))
11909 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11911 /* Otherwise fall back on comparing machine modes (e.g. for
11912 aggregate types, floats). */
11913 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11916 /* Return true iff conversion in EXP generates no instruction. Mark
11917 it inline so that we fully inline into the stripping functions even
11918 though we have two uses of this function. */
11920 static inline bool
11921 tree_nop_conversion (const_tree exp)
11923 tree outer_type, inner_type;
11925 if (!CONVERT_EXPR_P (exp)
11926 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11927 return false;
11928 if (TREE_OPERAND (exp, 0) == error_mark_node)
11929 return false;
11931 outer_type = TREE_TYPE (exp);
11932 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11934 if (!inner_type)
11935 return false;
11937 return tree_nop_conversion_p (outer_type, inner_type);
11940 /* Return true iff conversion in EXP generates no instruction. Don't
11941 consider conversions changing the signedness. */
11943 static bool
11944 tree_sign_nop_conversion (const_tree exp)
11946 tree outer_type, inner_type;
11948 if (!tree_nop_conversion (exp))
11949 return false;
11951 outer_type = TREE_TYPE (exp);
11952 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11954 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11955 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11958 /* Strip conversions from EXP according to tree_nop_conversion and
11959 return the resulting expression. */
11961 tree
11962 tree_strip_nop_conversions (tree exp)
11964 while (tree_nop_conversion (exp))
11965 exp = TREE_OPERAND (exp, 0);
11966 return exp;
11969 /* Strip conversions from EXP according to tree_sign_nop_conversion
11970 and return the resulting expression. */
11972 tree
11973 tree_strip_sign_nop_conversions (tree exp)
11975 while (tree_sign_nop_conversion (exp))
11976 exp = TREE_OPERAND (exp, 0);
11977 return exp;
11980 /* Avoid any floating point extensions from EXP. */
11981 tree
11982 strip_float_extensions (tree exp)
11984 tree sub, expt, subt;
11986 /* For floating point constant look up the narrowest type that can hold
11987 it properly and handle it like (type)(narrowest_type)constant.
11988 This way we can optimize for instance a=a*2.0 where "a" is float
11989 but 2.0 is double constant. */
11990 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11992 REAL_VALUE_TYPE orig;
11993 tree type = NULL;
11995 orig = TREE_REAL_CST (exp);
11996 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11997 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11998 type = float_type_node;
11999 else if (TYPE_PRECISION (TREE_TYPE (exp))
12000 > TYPE_PRECISION (double_type_node)
12001 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12002 type = double_type_node;
12003 if (type)
12004 return build_real_truncate (type, orig);
12007 if (!CONVERT_EXPR_P (exp))
12008 return exp;
12010 sub = TREE_OPERAND (exp, 0);
12011 subt = TREE_TYPE (sub);
12012 expt = TREE_TYPE (exp);
12014 if (!FLOAT_TYPE_P (subt))
12015 return exp;
12017 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12018 return exp;
12020 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12021 return exp;
12023 return strip_float_extensions (sub);
12026 /* Strip out all handled components that produce invariant
12027 offsets. */
12029 const_tree
12030 strip_invariant_refs (const_tree op)
12032 while (handled_component_p (op))
12034 switch (TREE_CODE (op))
12036 case ARRAY_REF:
12037 case ARRAY_RANGE_REF:
12038 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12039 || TREE_OPERAND (op, 2) != NULL_TREE
12040 || TREE_OPERAND (op, 3) != NULL_TREE)
12041 return NULL;
12042 break;
12044 case COMPONENT_REF:
12045 if (TREE_OPERAND (op, 2) != NULL_TREE)
12046 return NULL;
12047 break;
12049 default:;
12051 op = TREE_OPERAND (op, 0);
12054 return op;
12057 static GTY(()) tree gcc_eh_personality_decl;
12059 /* Return the GCC personality function decl. */
12061 tree
12062 lhd_gcc_personality (void)
12064 if (!gcc_eh_personality_decl)
12065 gcc_eh_personality_decl = build_personality_function ("gcc");
12066 return gcc_eh_personality_decl;
12069 /* TARGET is a call target of GIMPLE call statement
12070 (obtained by gimple_call_fn). Return true if it is
12071 OBJ_TYPE_REF representing an virtual call of C++ method.
12072 (As opposed to OBJ_TYPE_REF representing objc calls
12073 through a cast where middle-end devirtualization machinery
12074 can't apply.) */
12076 bool
12077 virtual_method_call_p (const_tree target)
12079 if (TREE_CODE (target) != OBJ_TYPE_REF)
12080 return false;
12081 tree t = TREE_TYPE (target);
12082 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12083 t = TREE_TYPE (t);
12084 if (TREE_CODE (t) == FUNCTION_TYPE)
12085 return false;
12086 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12087 /* If we do not have BINFO associated, it means that type was built
12088 without devirtualization enabled. Do not consider this a virtual
12089 call. */
12090 if (!TYPE_BINFO (obj_type_ref_class (target)))
12091 return false;
12092 return true;
12095 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
12097 tree
12098 obj_type_ref_class (const_tree ref)
12100 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
12101 ref = TREE_TYPE (ref);
12102 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12103 ref = TREE_TYPE (ref);
12104 /* We look for type THIS points to. ObjC also builds
12105 OBJ_TYPE_REF with non-method calls, Their first parameter
12106 ID however also corresponds to class type. */
12107 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12108 || TREE_CODE (ref) == FUNCTION_TYPE);
12109 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12110 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12111 return TREE_TYPE (ref);
12114 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12116 static tree
12117 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12119 unsigned int i;
12120 tree base_binfo, b;
12122 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12123 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12124 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12125 return base_binfo;
12126 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12127 return b;
12128 return NULL;
12131 /* Try to find a base info of BINFO that would have its field decl at offset
12132 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12133 found, return, otherwise return NULL_TREE. */
12135 tree
12136 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
12138 tree type = BINFO_TYPE (binfo);
12140 while (true)
12142 HOST_WIDE_INT pos, size;
12143 tree fld;
12144 int i;
12146 if (types_same_for_odr (type, expected_type))
12147 return binfo;
12148 if (offset < 0)
12149 return NULL_TREE;
12151 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12153 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12154 continue;
12156 pos = int_bit_position (fld);
12157 size = tree_to_uhwi (DECL_SIZE (fld));
12158 if (pos <= offset && (pos + size) > offset)
12159 break;
12161 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12162 return NULL_TREE;
12164 /* Offset 0 indicates the primary base, whose vtable contents are
12165 represented in the binfo for the derived class. */
12166 else if (offset != 0)
12168 tree found_binfo = NULL, base_binfo;
12169 /* Offsets in BINFO are in bytes relative to the whole structure
12170 while POS is in bits relative to the containing field. */
12171 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12172 / BITS_PER_UNIT);
12174 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12175 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12176 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12178 found_binfo = base_binfo;
12179 break;
12181 if (found_binfo)
12182 binfo = found_binfo;
12183 else
12184 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12185 binfo_offset);
12188 type = TREE_TYPE (fld);
12189 offset -= pos;
12193 /* Returns true if X is a typedef decl. */
12195 bool
12196 is_typedef_decl (const_tree x)
12198 return (x && TREE_CODE (x) == TYPE_DECL
12199 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12202 /* Returns true iff TYPE is a type variant created for a typedef. */
12204 bool
12205 typedef_variant_p (const_tree type)
12207 return is_typedef_decl (TYPE_NAME (type));
12210 /* Warn about a use of an identifier which was marked deprecated. */
12211 void
12212 warn_deprecated_use (tree node, tree attr)
12214 const char *msg;
12216 if (node == 0 || !warn_deprecated_decl)
12217 return;
12219 if (!attr)
12221 if (DECL_P (node))
12222 attr = DECL_ATTRIBUTES (node);
12223 else if (TYPE_P (node))
12225 tree decl = TYPE_STUB_DECL (node);
12226 if (decl)
12227 attr = lookup_attribute ("deprecated",
12228 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12232 if (attr)
12233 attr = lookup_attribute ("deprecated", attr);
12235 if (attr)
12236 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12237 else
12238 msg = NULL;
12240 bool w;
12241 if (DECL_P (node))
12243 if (msg)
12244 w = warning (OPT_Wdeprecated_declarations,
12245 "%qD is deprecated: %s", node, msg);
12246 else
12247 w = warning (OPT_Wdeprecated_declarations,
12248 "%qD is deprecated", node);
12249 if (w)
12250 inform (DECL_SOURCE_LOCATION (node), "declared here");
12252 else if (TYPE_P (node))
12254 tree what = NULL_TREE;
12255 tree decl = TYPE_STUB_DECL (node);
12257 if (TYPE_NAME (node))
12259 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12260 what = TYPE_NAME (node);
12261 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12262 && DECL_NAME (TYPE_NAME (node)))
12263 what = DECL_NAME (TYPE_NAME (node));
12266 if (decl)
12268 if (what)
12270 if (msg)
12271 w = warning (OPT_Wdeprecated_declarations,
12272 "%qE is deprecated: %s", what, msg);
12273 else
12274 w = warning (OPT_Wdeprecated_declarations,
12275 "%qE is deprecated", what);
12277 else
12279 if (msg)
12280 w = warning (OPT_Wdeprecated_declarations,
12281 "type is deprecated: %s", msg);
12282 else
12283 w = warning (OPT_Wdeprecated_declarations,
12284 "type is deprecated");
12286 if (w)
12287 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12289 else
12291 if (what)
12293 if (msg)
12294 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12295 what, msg);
12296 else
12297 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12299 else
12301 if (msg)
12302 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12303 msg);
12304 else
12305 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12311 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12312 somewhere in it. */
12314 bool
12315 contains_bitfld_component_ref_p (const_tree ref)
12317 while (handled_component_p (ref))
12319 if (TREE_CODE (ref) == COMPONENT_REF
12320 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12321 return true;
12322 ref = TREE_OPERAND (ref, 0);
12325 return false;
12328 /* Try to determine whether a TRY_CATCH expression can fall through.
12329 This is a subroutine of block_may_fallthru. */
12331 static bool
12332 try_catch_may_fallthru (const_tree stmt)
12334 tree_stmt_iterator i;
12336 /* If the TRY block can fall through, the whole TRY_CATCH can
12337 fall through. */
12338 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12339 return true;
12341 i = tsi_start (TREE_OPERAND (stmt, 1));
12342 switch (TREE_CODE (tsi_stmt (i)))
12344 case CATCH_EXPR:
12345 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12346 catch expression and a body. The whole TRY_CATCH may fall
12347 through iff any of the catch bodies falls through. */
12348 for (; !tsi_end_p (i); tsi_next (&i))
12350 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12351 return true;
12353 return false;
12355 case EH_FILTER_EXPR:
12356 /* The exception filter expression only matters if there is an
12357 exception. If the exception does not match EH_FILTER_TYPES,
12358 we will execute EH_FILTER_FAILURE, and we will fall through
12359 if that falls through. If the exception does match
12360 EH_FILTER_TYPES, the stack unwinder will continue up the
12361 stack, so we will not fall through. We don't know whether we
12362 will throw an exception which matches EH_FILTER_TYPES or not,
12363 so we just ignore EH_FILTER_TYPES and assume that we might
12364 throw an exception which doesn't match. */
12365 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12367 default:
12368 /* This case represents statements to be executed when an
12369 exception occurs. Those statements are implicitly followed
12370 by a RESX statement to resume execution after the exception.
12371 So in this case the TRY_CATCH never falls through. */
12372 return false;
12376 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12377 need not be 100% accurate; simply be conservative and return true if we
12378 don't know. This is used only to avoid stupidly generating extra code.
12379 If we're wrong, we'll just delete the extra code later. */
12381 bool
12382 block_may_fallthru (const_tree block)
12384 /* This CONST_CAST is okay because expr_last returns its argument
12385 unmodified and we assign it to a const_tree. */
12386 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12388 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12390 case GOTO_EXPR:
12391 case RETURN_EXPR:
12392 /* Easy cases. If the last statement of the block implies
12393 control transfer, then we can't fall through. */
12394 return false;
12396 case SWITCH_EXPR:
12397 /* If there is a default: label or case labels cover all possible
12398 SWITCH_COND values, then the SWITCH_EXPR will transfer control
12399 to some case label in all cases and all we care is whether the
12400 SWITCH_BODY falls through. */
12401 if (SWITCH_ALL_CASES_P (stmt))
12402 return block_may_fallthru (SWITCH_BODY (stmt));
12403 return true;
12405 case COND_EXPR:
12406 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12407 return true;
12408 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12410 case BIND_EXPR:
12411 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12413 case TRY_CATCH_EXPR:
12414 return try_catch_may_fallthru (stmt);
12416 case TRY_FINALLY_EXPR:
12417 /* The finally clause is always executed after the try clause,
12418 so if it does not fall through, then the try-finally will not
12419 fall through. Otherwise, if the try clause does not fall
12420 through, then when the finally clause falls through it will
12421 resume execution wherever the try clause was going. So the
12422 whole try-finally will only fall through if both the try
12423 clause and the finally clause fall through. */
12424 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12425 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12427 case MODIFY_EXPR:
12428 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12429 stmt = TREE_OPERAND (stmt, 1);
12430 else
12431 return true;
12432 /* FALLTHRU */
12434 case CALL_EXPR:
12435 /* Functions that do not return do not fall through. */
12436 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12438 case CLEANUP_POINT_EXPR:
12439 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12441 case TARGET_EXPR:
12442 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12444 case ERROR_MARK:
12445 return true;
12447 default:
12448 return lang_hooks.block_may_fallthru (stmt);
12452 /* True if we are using EH to handle cleanups. */
12453 static bool using_eh_for_cleanups_flag = false;
12455 /* This routine is called from front ends to indicate eh should be used for
12456 cleanups. */
12457 void
12458 using_eh_for_cleanups (void)
12460 using_eh_for_cleanups_flag = true;
12463 /* Query whether EH is used for cleanups. */
12464 bool
12465 using_eh_for_cleanups_p (void)
12467 return using_eh_for_cleanups_flag;
12470 /* Wrapper for tree_code_name to ensure that tree code is valid */
12471 const char *
12472 get_tree_code_name (enum tree_code code)
12474 const char *invalid = "<invalid tree code>";
12476 if (code >= MAX_TREE_CODES)
12477 return invalid;
12479 return tree_code_name[code];
12482 /* Drops the TREE_OVERFLOW flag from T. */
12484 tree
12485 drop_tree_overflow (tree t)
12487 gcc_checking_assert (TREE_OVERFLOW (t));
12489 /* For tree codes with a sharing machinery re-build the result. */
12490 if (TREE_CODE (t) == INTEGER_CST)
12491 return wide_int_to_tree (TREE_TYPE (t), wi::to_wide (t));
12493 /* For VECTOR_CST, remove the overflow bits from the encoded elements
12494 and canonicalize the result. */
12495 if (TREE_CODE (t) == VECTOR_CST)
12497 tree_vector_builder builder;
12498 builder.new_unary_operation (TREE_TYPE (t), t, true);
12499 unsigned int count = builder.encoded_nelts ();
12500 for (unsigned int i = 0; i < count; ++i)
12502 tree elt = VECTOR_CST_ELT (t, i);
12503 if (TREE_OVERFLOW (elt))
12504 elt = drop_tree_overflow (elt);
12505 builder.quick_push (elt);
12507 return builder.build ();
12510 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12511 and drop the flag. */
12512 t = copy_node (t);
12513 TREE_OVERFLOW (t) = 0;
12515 /* For constants that contain nested constants, drop the flag
12516 from those as well. */
12517 if (TREE_CODE (t) == COMPLEX_CST)
12519 if (TREE_OVERFLOW (TREE_REALPART (t)))
12520 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
12521 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
12522 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
12525 return t;
12528 /* Given a memory reference expression T, return its base address.
12529 The base address of a memory reference expression is the main
12530 object being referenced. For instance, the base address for
12531 'array[i].fld[j]' is 'array'. You can think of this as stripping
12532 away the offset part from a memory address.
12534 This function calls handled_component_p to strip away all the inner
12535 parts of the memory reference until it reaches the base object. */
12537 tree
12538 get_base_address (tree t)
12540 while (handled_component_p (t))
12541 t = TREE_OPERAND (t, 0);
12543 if ((TREE_CODE (t) == MEM_REF
12544 || TREE_CODE (t) == TARGET_MEM_REF)
12545 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12546 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12548 /* ??? Either the alias oracle or all callers need to properly deal
12549 with WITH_SIZE_EXPRs before we can look through those. */
12550 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12551 return NULL_TREE;
12553 return t;
12556 /* Return a tree of sizetype representing the size, in bytes, of the element
12557 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12559 tree
12560 array_ref_element_size (tree exp)
12562 tree aligned_size = TREE_OPERAND (exp, 3);
12563 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12564 location_t loc = EXPR_LOCATION (exp);
12566 /* If a size was specified in the ARRAY_REF, it's the size measured
12567 in alignment units of the element type. So multiply by that value. */
12568 if (aligned_size)
12570 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12571 sizetype from another type of the same width and signedness. */
12572 if (TREE_TYPE (aligned_size) != sizetype)
12573 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12574 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12575 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12578 /* Otherwise, take the size from that of the element type. Substitute
12579 any PLACEHOLDER_EXPR that we have. */
12580 else
12581 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12584 /* Return a tree representing the lower bound of the array mentioned in
12585 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12587 tree
12588 array_ref_low_bound (tree exp)
12590 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12592 /* If a lower bound is specified in EXP, use it. */
12593 if (TREE_OPERAND (exp, 2))
12594 return TREE_OPERAND (exp, 2);
12596 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12597 substituting for a PLACEHOLDER_EXPR as needed. */
12598 if (domain_type && TYPE_MIN_VALUE (domain_type))
12599 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12601 /* Otherwise, return a zero of the appropriate type. */
12602 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
12605 /* Return a tree representing the upper bound of the array mentioned in
12606 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12608 tree
12609 array_ref_up_bound (tree exp)
12611 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12613 /* If there is a domain type and it has an upper bound, use it, substituting
12614 for a PLACEHOLDER_EXPR as needed. */
12615 if (domain_type && TYPE_MAX_VALUE (domain_type))
12616 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12618 /* Otherwise fail. */
12619 return NULL_TREE;
12622 /* Returns true if REF is an array reference or a component reference
12623 to an array at the end of a structure.
12624 If this is the case, the array may be allocated larger
12625 than its upper bound implies. */
12627 bool
12628 array_at_struct_end_p (tree ref)
12630 tree atype;
12632 if (TREE_CODE (ref) == ARRAY_REF
12633 || TREE_CODE (ref) == ARRAY_RANGE_REF)
12635 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
12636 ref = TREE_OPERAND (ref, 0);
12638 else if (TREE_CODE (ref) == COMPONENT_REF
12639 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
12640 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
12641 else
12642 return false;
12644 if (TREE_CODE (ref) == STRING_CST)
12645 return false;
12647 tree ref_to_array = ref;
12648 while (handled_component_p (ref))
12650 /* If the reference chain contains a component reference to a
12651 non-union type and there follows another field the reference
12652 is not at the end of a structure. */
12653 if (TREE_CODE (ref) == COMPONENT_REF)
12655 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12657 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12658 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12659 nextf = DECL_CHAIN (nextf);
12660 if (nextf)
12661 return false;
12664 /* If we have a multi-dimensional array we do not consider
12665 a non-innermost dimension as flex array if the whole
12666 multi-dimensional array is at struct end.
12667 Same for an array of aggregates with a trailing array
12668 member. */
12669 else if (TREE_CODE (ref) == ARRAY_REF)
12670 return false;
12671 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
12673 /* If we view an underlying object as sth else then what we
12674 gathered up to now is what we have to rely on. */
12675 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
12676 break;
12677 else
12678 gcc_unreachable ();
12680 ref = TREE_OPERAND (ref, 0);
12683 /* The array now is at struct end. Treat flexible arrays as
12684 always subject to extend, even into just padding constrained by
12685 an underlying decl. */
12686 if (! TYPE_SIZE (atype)
12687 || ! TYPE_DOMAIN (atype)
12688 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12689 return true;
12691 if (TREE_CODE (ref) == MEM_REF
12692 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
12693 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
12695 /* If the reference is based on a declared entity, the size of the array
12696 is constrained by its given domain. (Do not trust commons PR/69368). */
12697 if (DECL_P (ref)
12698 && !(flag_unconstrained_commons
12699 && VAR_P (ref) && DECL_COMMON (ref))
12700 && DECL_SIZE_UNIT (ref)
12701 && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
12703 /* Check whether the array domain covers all of the available
12704 padding. */
12705 HOST_WIDE_INT offset;
12706 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
12707 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
12708 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
12709 return true;
12710 if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
12711 return true;
12713 /* If at least one extra element fits it is a flexarray. */
12714 if (wi::les_p ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12715 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
12716 + 2)
12717 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
12718 wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
12719 return true;
12721 return false;
12724 return true;
12727 /* Return a tree representing the offset, in bytes, of the field referenced
12728 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12730 tree
12731 component_ref_field_offset (tree exp)
12733 tree aligned_offset = TREE_OPERAND (exp, 2);
12734 tree field = TREE_OPERAND (exp, 1);
12735 location_t loc = EXPR_LOCATION (exp);
12737 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12738 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12739 value. */
12740 if (aligned_offset)
12742 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12743 sizetype from another type of the same width and signedness. */
12744 if (TREE_TYPE (aligned_offset) != sizetype)
12745 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12746 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12747 size_int (DECL_OFFSET_ALIGN (field)
12748 / BITS_PER_UNIT));
12751 /* Otherwise, take the offset from that of the field. Substitute
12752 any PLACEHOLDER_EXPR that we have. */
12753 else
12754 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12757 /* Return the machine mode of T. For vectors, returns the mode of the
12758 inner type. The main use case is to feed the result to HONOR_NANS,
12759 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12761 machine_mode
12762 element_mode (const_tree t)
12764 if (!TYPE_P (t))
12765 t = TREE_TYPE (t);
12766 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12767 t = TREE_TYPE (t);
12768 return TYPE_MODE (t);
12771 /* Vector types need to re-check the target flags each time we report
12772 the machine mode. We need to do this because attribute target can
12773 change the result of vector_mode_supported_p and have_regs_of_mode
12774 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
12775 change on a per-function basis. */
12776 /* ??? Possibly a better solution is to run through all the types
12777 referenced by a function and re-compute the TYPE_MODE once, rather
12778 than make the TYPE_MODE macro call a function. */
12780 machine_mode
12781 vector_type_mode (const_tree t)
12783 machine_mode mode;
12785 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
12787 mode = t->type_common.mode;
12788 if (VECTOR_MODE_P (mode)
12789 && (!targetm.vector_mode_supported_p (mode)
12790 || !have_regs_of_mode[mode]))
12792 scalar_int_mode innermode;
12794 /* For integers, try mapping it to a same-sized scalar mode. */
12795 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
12797 unsigned int size = (TYPE_VECTOR_SUBPARTS (t)
12798 * GET_MODE_BITSIZE (innermode));
12799 scalar_int_mode mode;
12800 if (int_mode_for_size (size, 0).exists (&mode)
12801 && have_regs_of_mode[mode])
12802 return mode;
12805 return BLKmode;
12808 return mode;
12811 /* Verify that basic properties of T match TV and thus T can be a variant of
12812 TV. TV should be the more specified variant (i.e. the main variant). */
12814 static bool
12815 verify_type_variant (const_tree t, tree tv)
12817 /* Type variant can differ by:
12819 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
12820 ENCODE_QUAL_ADDR_SPACE.
12821 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
12822 in this case some values may not be set in the variant types
12823 (see TYPE_COMPLETE_P checks).
12824 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
12825 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
12826 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
12827 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
12828 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
12829 this is necessary to make it possible to merge types form different TUs
12830 - arrays, pointers and references may have TREE_TYPE that is a variant
12831 of TREE_TYPE of their main variants.
12832 - aggregates may have new TYPE_FIELDS list that list variants of
12833 the main variant TYPE_FIELDS.
12834 - vector types may differ by TYPE_VECTOR_OPAQUE
12837 /* Convenience macro for matching individual fields. */
12838 #define verify_variant_match(flag) \
12839 do { \
12840 if (flag (tv) != flag (t)) \
12842 error ("type variant differs by " #flag "."); \
12843 debug_tree (tv); \
12844 return false; \
12846 } while (false)
12848 /* tree_base checks. */
12850 verify_variant_match (TREE_CODE);
12851 /* FIXME: Ada builds non-artificial variants of artificial types. */
12852 if (TYPE_ARTIFICIAL (tv) && 0)
12853 verify_variant_match (TYPE_ARTIFICIAL);
12854 if (POINTER_TYPE_P (tv))
12855 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
12856 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
12857 verify_variant_match (TYPE_UNSIGNED);
12858 verify_variant_match (TYPE_PACKED);
12859 if (TREE_CODE (t) == REFERENCE_TYPE)
12860 verify_variant_match (TYPE_REF_IS_RVALUE);
12861 if (AGGREGATE_TYPE_P (t))
12862 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
12863 else
12864 verify_variant_match (TYPE_SATURATING);
12865 /* FIXME: This check trigger during libstdc++ build. */
12866 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
12867 verify_variant_match (TYPE_FINAL_P);
12869 /* tree_type_common checks. */
12871 if (COMPLETE_TYPE_P (t))
12873 verify_variant_match (TYPE_MODE);
12874 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
12875 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
12876 verify_variant_match (TYPE_SIZE);
12877 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
12878 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
12879 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
12881 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
12882 TYPE_SIZE_UNIT (tv), 0));
12883 error ("type variant has different TYPE_SIZE_UNIT");
12884 debug_tree (tv);
12885 error ("type variant's TYPE_SIZE_UNIT");
12886 debug_tree (TYPE_SIZE_UNIT (tv));
12887 error ("type's TYPE_SIZE_UNIT");
12888 debug_tree (TYPE_SIZE_UNIT (t));
12889 return false;
12892 verify_variant_match (TYPE_PRECISION);
12893 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
12894 if (RECORD_OR_UNION_TYPE_P (t))
12895 verify_variant_match (TYPE_TRANSPARENT_AGGR);
12896 else if (TREE_CODE (t) == ARRAY_TYPE)
12897 verify_variant_match (TYPE_NONALIASED_COMPONENT);
12898 /* During LTO we merge variant lists from diferent translation units
12899 that may differ BY TYPE_CONTEXT that in turn may point
12900 to TRANSLATION_UNIT_DECL.
12901 Ada also builds variants of types with different TYPE_CONTEXT. */
12902 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
12903 verify_variant_match (TYPE_CONTEXT);
12904 verify_variant_match (TYPE_STRING_FLAG);
12905 if (TYPE_ALIAS_SET_KNOWN_P (t))
12907 error ("type variant with TYPE_ALIAS_SET_KNOWN_P");
12908 debug_tree (tv);
12909 return false;
12912 /* tree_type_non_common checks. */
12914 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
12915 and dangle the pointer from time to time. */
12916 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
12917 && (in_lto_p || !TYPE_VFIELD (tv)
12918 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
12920 error ("type variant has different TYPE_VFIELD");
12921 debug_tree (tv);
12922 return false;
12924 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
12925 || TREE_CODE (t) == INTEGER_TYPE
12926 || TREE_CODE (t) == BOOLEAN_TYPE
12927 || TREE_CODE (t) == REAL_TYPE
12928 || TREE_CODE (t) == FIXED_POINT_TYPE)
12930 verify_variant_match (TYPE_MAX_VALUE);
12931 verify_variant_match (TYPE_MIN_VALUE);
12933 if (TREE_CODE (t) == METHOD_TYPE)
12934 verify_variant_match (TYPE_METHOD_BASETYPE);
12935 if (TREE_CODE (t) == OFFSET_TYPE)
12936 verify_variant_match (TYPE_OFFSET_BASETYPE);
12937 if (TREE_CODE (t) == ARRAY_TYPE)
12938 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
12939 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
12940 or even type's main variant. This is needed to make bootstrap pass
12941 and the bug seems new in GCC 5.
12942 C++ FE should be updated to make this consistent and we should check
12943 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
12944 is a match with main variant.
12946 Also disable the check for Java for now because of parser hack that builds
12947 first an dummy BINFO and then sometimes replace it by real BINFO in some
12948 of the copies. */
12949 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
12950 && TYPE_BINFO (t) != TYPE_BINFO (tv)
12951 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
12952 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
12953 at LTO time only. */
12954 && (in_lto_p && odr_type_p (t)))
12956 error ("type variant has different TYPE_BINFO");
12957 debug_tree (tv);
12958 error ("type variant's TYPE_BINFO");
12959 debug_tree (TYPE_BINFO (tv));
12960 error ("type's TYPE_BINFO");
12961 debug_tree (TYPE_BINFO (t));
12962 return false;
12965 /* Check various uses of TYPE_VALUES_RAW. */
12966 if (TREE_CODE (t) == ENUMERAL_TYPE)
12967 verify_variant_match (TYPE_VALUES);
12968 else if (TREE_CODE (t) == ARRAY_TYPE)
12969 verify_variant_match (TYPE_DOMAIN);
12970 /* Permit incomplete variants of complete type. While FEs may complete
12971 all variants, this does not happen for C++ templates in all cases. */
12972 else if (RECORD_OR_UNION_TYPE_P (t)
12973 && COMPLETE_TYPE_P (t)
12974 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
12976 tree f1, f2;
12978 /* Fortran builds qualified variants as new records with items of
12979 qualified type. Verify that they looks same. */
12980 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
12981 f1 && f2;
12982 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
12983 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
12984 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
12985 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
12986 /* FIXME: gfc_nonrestricted_type builds all types as variants
12987 with exception of pointer types. It deeply copies the type
12988 which means that we may end up with a variant type
12989 referring non-variant pointer. We may change it to
12990 produce types as variants, too, like
12991 objc_get_protocol_qualified_type does. */
12992 && !POINTER_TYPE_P (TREE_TYPE (f1)))
12993 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
12994 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
12995 break;
12996 if (f1 || f2)
12998 error ("type variant has different TYPE_FIELDS");
12999 debug_tree (tv);
13000 error ("first mismatch is field");
13001 debug_tree (f1);
13002 error ("and field");
13003 debug_tree (f2);
13004 return false;
13007 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13008 verify_variant_match (TYPE_ARG_TYPES);
13009 /* For C++ the qualified variant of array type is really an array type
13010 of qualified TREE_TYPE.
13011 objc builds variants of pointer where pointer to type is a variant, too
13012 in objc_get_protocol_qualified_type. */
13013 if (TREE_TYPE (t) != TREE_TYPE (tv)
13014 && ((TREE_CODE (t) != ARRAY_TYPE
13015 && !POINTER_TYPE_P (t))
13016 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13017 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13019 error ("type variant has different TREE_TYPE");
13020 debug_tree (tv);
13021 error ("type variant's TREE_TYPE");
13022 debug_tree (TREE_TYPE (tv));
13023 error ("type's TREE_TYPE");
13024 debug_tree (TREE_TYPE (t));
13025 return false;
13027 if (type_with_alias_set_p (t)
13028 && !gimple_canonical_types_compatible_p (t, tv, false))
13030 error ("type is not compatible with its variant");
13031 debug_tree (tv);
13032 error ("type variant's TREE_TYPE");
13033 debug_tree (TREE_TYPE (tv));
13034 error ("type's TREE_TYPE");
13035 debug_tree (TREE_TYPE (t));
13036 return false;
13038 return true;
13039 #undef verify_variant_match
13043 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13044 the middle-end types_compatible_p function. It needs to avoid
13045 claiming types are different for types that should be treated
13046 the same with respect to TBAA. Canonical types are also used
13047 for IL consistency checks via the useless_type_conversion_p
13048 predicate which does not handle all type kinds itself but falls
13049 back to pointer-comparison of TYPE_CANONICAL for aggregates
13050 for example. */
13052 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13053 type calculation because we need to allow inter-operability between signed
13054 and unsigned variants. */
13056 bool
13057 type_with_interoperable_signedness (const_tree type)
13059 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13060 signed char and unsigned char. Similarly fortran FE builds
13061 C_SIZE_T as signed type, while C defines it unsigned. */
13063 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13064 == INTEGER_TYPE
13065 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13066 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13069 /* Return true iff T1 and T2 are structurally identical for what
13070 TBAA is concerned.
13071 This function is used both by lto.c canonical type merging and by the
13072 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13073 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13074 only for LTO because only in these cases TYPE_CANONICAL equivalence
13075 correspond to one defined by gimple_canonical_types_compatible_p. */
13077 bool
13078 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13079 bool trust_type_canonical)
13081 /* Type variants should be same as the main variant. When not doing sanity
13082 checking to verify this fact, go to main variants and save some work. */
13083 if (trust_type_canonical)
13085 t1 = TYPE_MAIN_VARIANT (t1);
13086 t2 = TYPE_MAIN_VARIANT (t2);
13089 /* Check first for the obvious case of pointer identity. */
13090 if (t1 == t2)
13091 return true;
13093 /* Check that we have two types to compare. */
13094 if (t1 == NULL_TREE || t2 == NULL_TREE)
13095 return false;
13097 /* We consider complete types always compatible with incomplete type.
13098 This does not make sense for canonical type calculation and thus we
13099 need to ensure that we are never called on it.
13101 FIXME: For more correctness the function probably should have three modes
13102 1) mode assuming that types are complete mathcing their structure
13103 2) mode allowing incomplete types but producing equivalence classes
13104 and thus ignoring all info from complete types
13105 3) mode allowing incomplete types to match complete but checking
13106 compatibility between complete types.
13108 1 and 2 can be used for canonical type calculation. 3 is the real
13109 definition of type compatibility that can be used i.e. for warnings during
13110 declaration merging. */
13112 gcc_assert (!trust_type_canonical
13113 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13114 /* If the types have been previously registered and found equal
13115 they still are. */
13117 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13118 && trust_type_canonical)
13120 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13121 they are always NULL, but they are set to non-NULL for types
13122 constructed by build_pointer_type and variants. In this case the
13123 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13124 all pointers are considered equal. Be sure to not return false
13125 negatives. */
13126 gcc_checking_assert (canonical_type_used_p (t1)
13127 && canonical_type_used_p (t2));
13128 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13131 /* Can't be the same type if the types don't have the same code. */
13132 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13133 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13134 return false;
13136 /* Qualifiers do not matter for canonical type comparison purposes. */
13138 /* Void types and nullptr types are always the same. */
13139 if (TREE_CODE (t1) == VOID_TYPE
13140 || TREE_CODE (t1) == NULLPTR_TYPE)
13141 return true;
13143 /* Can't be the same type if they have different mode. */
13144 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13145 return false;
13147 /* Non-aggregate types can be handled cheaply. */
13148 if (INTEGRAL_TYPE_P (t1)
13149 || SCALAR_FLOAT_TYPE_P (t1)
13150 || FIXED_POINT_TYPE_P (t1)
13151 || TREE_CODE (t1) == VECTOR_TYPE
13152 || TREE_CODE (t1) == COMPLEX_TYPE
13153 || TREE_CODE (t1) == OFFSET_TYPE
13154 || POINTER_TYPE_P (t1))
13156 /* Can't be the same type if they have different recision. */
13157 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13158 return false;
13160 /* In some cases the signed and unsigned types are required to be
13161 inter-operable. */
13162 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13163 && !type_with_interoperable_signedness (t1))
13164 return false;
13166 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13167 interoperable with "signed char". Unless all frontends are revisited
13168 to agree on these types, we must ignore the flag completely. */
13170 /* Fortran standard define C_PTR type that is compatible with every
13171 C pointer. For this reason we need to glob all pointers into one.
13172 Still pointers in different address spaces are not compatible. */
13173 if (POINTER_TYPE_P (t1))
13175 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13176 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13177 return false;
13180 /* Tail-recurse to components. */
13181 if (TREE_CODE (t1) == VECTOR_TYPE
13182 || TREE_CODE (t1) == COMPLEX_TYPE)
13183 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13184 TREE_TYPE (t2),
13185 trust_type_canonical);
13187 return true;
13190 /* Do type-specific comparisons. */
13191 switch (TREE_CODE (t1))
13193 case ARRAY_TYPE:
13194 /* Array types are the same if the element types are the same and
13195 the number of elements are the same. */
13196 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13197 trust_type_canonical)
13198 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13199 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13200 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13201 return false;
13202 else
13204 tree i1 = TYPE_DOMAIN (t1);
13205 tree i2 = TYPE_DOMAIN (t2);
13207 /* For an incomplete external array, the type domain can be
13208 NULL_TREE. Check this condition also. */
13209 if (i1 == NULL_TREE && i2 == NULL_TREE)
13210 return true;
13211 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13212 return false;
13213 else
13215 tree min1 = TYPE_MIN_VALUE (i1);
13216 tree min2 = TYPE_MIN_VALUE (i2);
13217 tree max1 = TYPE_MAX_VALUE (i1);
13218 tree max2 = TYPE_MAX_VALUE (i2);
13220 /* The minimum/maximum values have to be the same. */
13221 if ((min1 == min2
13222 || (min1 && min2
13223 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13224 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13225 || operand_equal_p (min1, min2, 0))))
13226 && (max1 == max2
13227 || (max1 && max2
13228 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13229 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13230 || operand_equal_p (max1, max2, 0)))))
13231 return true;
13232 else
13233 return false;
13237 case METHOD_TYPE:
13238 case FUNCTION_TYPE:
13239 /* Function types are the same if the return type and arguments types
13240 are the same. */
13241 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13242 trust_type_canonical))
13243 return false;
13245 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13246 return true;
13247 else
13249 tree parms1, parms2;
13251 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13252 parms1 && parms2;
13253 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13255 if (!gimple_canonical_types_compatible_p
13256 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13257 trust_type_canonical))
13258 return false;
13261 if (parms1 || parms2)
13262 return false;
13264 return true;
13267 case RECORD_TYPE:
13268 case UNION_TYPE:
13269 case QUAL_UNION_TYPE:
13271 tree f1, f2;
13273 /* Don't try to compare variants of an incomplete type, before
13274 TYPE_FIELDS has been copied around. */
13275 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13276 return true;
13279 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13280 return false;
13282 /* For aggregate types, all the fields must be the same. */
13283 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13284 f1 || f2;
13285 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13287 /* Skip non-fields and zero-sized fields. */
13288 while (f1 && (TREE_CODE (f1) != FIELD_DECL
13289 || (DECL_SIZE (f1)
13290 && integer_zerop (DECL_SIZE (f1)))))
13291 f1 = TREE_CHAIN (f1);
13292 while (f2 && (TREE_CODE (f2) != FIELD_DECL
13293 || (DECL_SIZE (f2)
13294 && integer_zerop (DECL_SIZE (f2)))))
13295 f2 = TREE_CHAIN (f2);
13296 if (!f1 || !f2)
13297 break;
13298 /* The fields must have the same name, offset and type. */
13299 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13300 || !gimple_compare_field_offset (f1, f2)
13301 || !gimple_canonical_types_compatible_p
13302 (TREE_TYPE (f1), TREE_TYPE (f2),
13303 trust_type_canonical))
13304 return false;
13307 /* If one aggregate has more fields than the other, they
13308 are not the same. */
13309 if (f1 || f2)
13310 return false;
13312 return true;
13315 default:
13316 /* Consider all types with language specific trees in them mutually
13317 compatible. This is executed only from verify_type and false
13318 positives can be tolerated. */
13319 gcc_assert (!in_lto_p);
13320 return true;
13324 /* Verify type T. */
13326 void
13327 verify_type (const_tree t)
13329 bool error_found = false;
13330 tree mv = TYPE_MAIN_VARIANT (t);
13331 if (!mv)
13333 error ("Main variant is not defined");
13334 error_found = true;
13336 else if (mv != TYPE_MAIN_VARIANT (mv))
13338 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
13339 debug_tree (mv);
13340 error_found = true;
13342 else if (t != mv && !verify_type_variant (t, mv))
13343 error_found = true;
13345 tree ct = TYPE_CANONICAL (t);
13346 if (!ct)
13348 else if (TYPE_CANONICAL (t) != ct)
13350 error ("TYPE_CANONICAL has different TYPE_CANONICAL");
13351 debug_tree (ct);
13352 error_found = true;
13354 /* Method and function types can not be used to address memory and thus
13355 TYPE_CANONICAL really matters only for determining useless conversions.
13357 FIXME: C++ FE produce declarations of builtin functions that are not
13358 compatible with main variants. */
13359 else if (TREE_CODE (t) == FUNCTION_TYPE)
13361 else if (t != ct
13362 /* FIXME: gimple_canonical_types_compatible_p can not compare types
13363 with variably sized arrays because their sizes possibly
13364 gimplified to different variables. */
13365 && !variably_modified_type_p (ct, NULL)
13366 && !gimple_canonical_types_compatible_p (t, ct, false))
13368 error ("TYPE_CANONICAL is not compatible");
13369 debug_tree (ct);
13370 error_found = true;
13373 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13374 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13376 error ("TYPE_MODE of TYPE_CANONICAL is not compatible");
13377 debug_tree (ct);
13378 error_found = true;
13380 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
13382 error ("TYPE_CANONICAL of main variant is not main variant");
13383 debug_tree (ct);
13384 debug_tree (TYPE_MAIN_VARIANT (ct));
13385 error_found = true;
13389 /* Check various uses of TYPE_MIN_VALUE_RAW. */
13390 if (RECORD_OR_UNION_TYPE_P (t))
13392 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13393 and danagle the pointer from time to time. */
13394 if (TYPE_VFIELD (t)
13395 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13396 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13398 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
13399 debug_tree (TYPE_VFIELD (t));
13400 error_found = true;
13403 else if (TREE_CODE (t) == POINTER_TYPE)
13405 if (TYPE_NEXT_PTR_TO (t)
13406 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13408 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
13409 debug_tree (TYPE_NEXT_PTR_TO (t));
13410 error_found = true;
13413 else if (TREE_CODE (t) == REFERENCE_TYPE)
13415 if (TYPE_NEXT_REF_TO (t)
13416 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13418 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
13419 debug_tree (TYPE_NEXT_REF_TO (t));
13420 error_found = true;
13423 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13424 || TREE_CODE (t) == FIXED_POINT_TYPE)
13426 /* FIXME: The following check should pass:
13427 useless_type_conversion_p (const_cast <tree> (t),
13428 TREE_TYPE (TYPE_MIN_VALUE (t))
13429 but does not for C sizetypes in LTO. */
13432 /* Check various uses of TYPE_MAXVAL_RAW. */
13433 if (RECORD_OR_UNION_TYPE_P (t))
13435 if (!TYPE_BINFO (t))
13437 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13439 error ("TYPE_BINFO is not TREE_BINFO");
13440 debug_tree (TYPE_BINFO (t));
13441 error_found = true;
13443 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
13445 error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT");
13446 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13447 error_found = true;
13450 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13452 if (TYPE_METHOD_BASETYPE (t)
13453 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13454 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13456 error ("TYPE_METHOD_BASETYPE is not record nor union");
13457 debug_tree (TYPE_METHOD_BASETYPE (t));
13458 error_found = true;
13461 else if (TREE_CODE (t) == OFFSET_TYPE)
13463 if (TYPE_OFFSET_BASETYPE (t)
13464 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13465 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13467 error ("TYPE_OFFSET_BASETYPE is not record nor union");
13468 debug_tree (TYPE_OFFSET_BASETYPE (t));
13469 error_found = true;
13472 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13473 || TREE_CODE (t) == FIXED_POINT_TYPE)
13475 /* FIXME: The following check should pass:
13476 useless_type_conversion_p (const_cast <tree> (t),
13477 TREE_TYPE (TYPE_MAX_VALUE (t))
13478 but does not for C sizetypes in LTO. */
13480 else if (TREE_CODE (t) == ARRAY_TYPE)
13482 if (TYPE_ARRAY_MAX_SIZE (t)
13483 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13485 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
13486 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13487 error_found = true;
13490 else if (TYPE_MAX_VALUE_RAW (t))
13492 error ("TYPE_MAX_VALUE_RAW non-NULL");
13493 debug_tree (TYPE_MAX_VALUE_RAW (t));
13494 error_found = true;
13497 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13499 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
13500 debug_tree (TYPE_LANG_SLOT_1 (t));
13501 error_found = true;
13504 /* Check various uses of TYPE_VALUES_RAW. */
13505 if (TREE_CODE (t) == ENUMERAL_TYPE)
13506 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13508 tree value = TREE_VALUE (l);
13509 tree name = TREE_PURPOSE (l);
13511 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13512 CONST_DECL of ENUMERAL TYPE. */
13513 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13515 error ("Enum value is not CONST_DECL or INTEGER_CST");
13516 debug_tree (value);
13517 debug_tree (name);
13518 error_found = true;
13520 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13521 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13523 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
13524 debug_tree (value);
13525 debug_tree (name);
13526 error_found = true;
13528 if (TREE_CODE (name) != IDENTIFIER_NODE)
13530 error ("Enum value name is not IDENTIFIER_NODE");
13531 debug_tree (value);
13532 debug_tree (name);
13533 error_found = true;
13536 else if (TREE_CODE (t) == ARRAY_TYPE)
13538 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13540 error ("Array TYPE_DOMAIN is not integer type");
13541 debug_tree (TYPE_DOMAIN (t));
13542 error_found = true;
13545 else if (RECORD_OR_UNION_TYPE_P (t))
13547 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
13549 error ("TYPE_FIELDS defined in incomplete type");
13550 error_found = true;
13552 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13554 /* TODO: verify properties of decls. */
13555 if (TREE_CODE (fld) == FIELD_DECL)
13557 else if (TREE_CODE (fld) == TYPE_DECL)
13559 else if (TREE_CODE (fld) == CONST_DECL)
13561 else if (VAR_P (fld))
13563 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13565 else if (TREE_CODE (fld) == USING_DECL)
13567 else if (TREE_CODE (fld) == FUNCTION_DECL)
13569 else
13571 error ("Wrong tree in TYPE_FIELDS list");
13572 debug_tree (fld);
13573 error_found = true;
13577 else if (TREE_CODE (t) == INTEGER_TYPE
13578 || TREE_CODE (t) == BOOLEAN_TYPE
13579 || TREE_CODE (t) == OFFSET_TYPE
13580 || TREE_CODE (t) == REFERENCE_TYPE
13581 || TREE_CODE (t) == NULLPTR_TYPE
13582 || TREE_CODE (t) == POINTER_TYPE)
13584 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13586 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
13587 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13588 error_found = true;
13590 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13592 error ("TYPE_CACHED_VALUES is not TREE_VEC");
13593 debug_tree (TYPE_CACHED_VALUES (t));
13594 error_found = true;
13596 /* Verify just enough of cache to ensure that no one copied it to new type.
13597 All copying should go by copy_node that should clear it. */
13598 else if (TYPE_CACHED_VALUES_P (t))
13600 int i;
13601 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13602 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13603 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13605 error ("wrong TYPE_CACHED_VALUES entry");
13606 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13607 error_found = true;
13608 break;
13612 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13613 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13615 /* C++ FE uses TREE_PURPOSE to store initial values. */
13616 if (TREE_PURPOSE (l) && in_lto_p)
13618 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
13619 debug_tree (l);
13620 error_found = true;
13622 if (!TYPE_P (TREE_VALUE (l)))
13624 error ("Wrong entry in TYPE_ARG_TYPES list");
13625 debug_tree (l);
13626 error_found = true;
13629 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13631 error ("TYPE_VALUES_RAW field is non-NULL");
13632 debug_tree (TYPE_VALUES_RAW (t));
13633 error_found = true;
13635 if (TREE_CODE (t) != INTEGER_TYPE
13636 && TREE_CODE (t) != BOOLEAN_TYPE
13637 && TREE_CODE (t) != OFFSET_TYPE
13638 && TREE_CODE (t) != REFERENCE_TYPE
13639 && TREE_CODE (t) != NULLPTR_TYPE
13640 && TREE_CODE (t) != POINTER_TYPE
13641 && TYPE_CACHED_VALUES_P (t))
13643 error ("TYPE_CACHED_VALUES_P is set while it should not");
13644 error_found = true;
13646 if (TYPE_STRING_FLAG (t)
13647 && TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != INTEGER_TYPE)
13649 error ("TYPE_STRING_FLAG is set on wrong type code");
13650 error_found = true;
13653 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13654 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13655 of a type. */
13656 if (TREE_CODE (t) == METHOD_TYPE
13657 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
13659 error ("TYPE_METHOD_BASETYPE is not main variant");
13660 error_found = true;
13663 if (error_found)
13665 debug_tree (const_cast <tree> (t));
13666 internal_error ("verify_type failed");
13671 /* Return 1 if ARG interpreted as signed in its precision is known to be
13672 always positive or 2 if ARG is known to be always negative, or 3 if
13673 ARG may be positive or negative. */
13676 get_range_pos_neg (tree arg)
13678 if (arg == error_mark_node)
13679 return 3;
13681 int prec = TYPE_PRECISION (TREE_TYPE (arg));
13682 int cnt = 0;
13683 if (TREE_CODE (arg) == INTEGER_CST)
13685 wide_int w = wi::sext (wi::to_wide (arg), prec);
13686 if (wi::neg_p (w))
13687 return 2;
13688 else
13689 return 1;
13691 while (CONVERT_EXPR_P (arg)
13692 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
13693 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
13695 arg = TREE_OPERAND (arg, 0);
13696 /* Narrower value zero extended into wider type
13697 will always result in positive values. */
13698 if (TYPE_UNSIGNED (TREE_TYPE (arg))
13699 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
13700 return 1;
13701 prec = TYPE_PRECISION (TREE_TYPE (arg));
13702 if (++cnt > 30)
13703 return 3;
13706 if (TREE_CODE (arg) != SSA_NAME)
13707 return 3;
13708 wide_int arg_min, arg_max;
13709 while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
13711 gimple *g = SSA_NAME_DEF_STMT (arg);
13712 if (is_gimple_assign (g)
13713 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
13715 tree t = gimple_assign_rhs1 (g);
13716 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
13717 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
13719 if (TYPE_UNSIGNED (TREE_TYPE (t))
13720 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
13721 return 1;
13722 prec = TYPE_PRECISION (TREE_TYPE (t));
13723 arg = t;
13724 if (++cnt > 30)
13725 return 3;
13726 continue;
13729 return 3;
13731 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
13733 /* For unsigned values, the "positive" range comes
13734 below the "negative" range. */
13735 if (!wi::neg_p (wi::sext (arg_max, prec), SIGNED))
13736 return 1;
13737 if (wi::neg_p (wi::sext (arg_min, prec), SIGNED))
13738 return 2;
13740 else
13742 if (!wi::neg_p (wi::sext (arg_min, prec), SIGNED))
13743 return 1;
13744 if (wi::neg_p (wi::sext (arg_max, prec), SIGNED))
13745 return 2;
13747 return 3;
13753 /* Return true if ARG is marked with the nonnull attribute in the
13754 current function signature. */
13756 bool
13757 nonnull_arg_p (const_tree arg)
13759 tree t, attrs, fntype;
13760 unsigned HOST_WIDE_INT arg_num;
13762 gcc_assert (TREE_CODE (arg) == PARM_DECL
13763 && (POINTER_TYPE_P (TREE_TYPE (arg))
13764 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
13766 /* The static chain decl is always non null. */
13767 if (arg == cfun->static_chain_decl)
13768 return true;
13770 /* THIS argument of method is always non-NULL. */
13771 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
13772 && arg == DECL_ARGUMENTS (cfun->decl)
13773 && flag_delete_null_pointer_checks)
13774 return true;
13776 /* Values passed by reference are always non-NULL. */
13777 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
13778 && flag_delete_null_pointer_checks)
13779 return true;
13781 fntype = TREE_TYPE (cfun->decl);
13782 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
13784 attrs = lookup_attribute ("nonnull", attrs);
13786 /* If "nonnull" wasn't specified, we know nothing about the argument. */
13787 if (attrs == NULL_TREE)
13788 return false;
13790 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
13791 if (TREE_VALUE (attrs) == NULL_TREE)
13792 return true;
13794 /* Get the position number for ARG in the function signature. */
13795 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
13797 t = DECL_CHAIN (t), arg_num++)
13799 if (t == arg)
13800 break;
13803 gcc_assert (t == arg);
13805 /* Now see if ARG_NUM is mentioned in the nonnull list. */
13806 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
13808 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
13809 return true;
13813 return false;
13816 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
13817 information. */
13819 location_t
13820 set_block (location_t loc, tree block)
13822 location_t pure_loc = get_pure_location (loc);
13823 source_range src_range = get_range_from_loc (line_table, loc);
13824 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
13827 location_t
13828 set_source_range (tree expr, location_t start, location_t finish)
13830 source_range src_range;
13831 src_range.m_start = start;
13832 src_range.m_finish = finish;
13833 return set_source_range (expr, src_range);
13836 location_t
13837 set_source_range (tree expr, source_range src_range)
13839 if (!EXPR_P (expr))
13840 return UNKNOWN_LOCATION;
13842 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
13843 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
13844 pure_loc,
13845 src_range,
13846 NULL);
13847 SET_EXPR_LOCATION (expr, adhoc);
13848 return adhoc;
13851 /* Return the name of combined function FN, for debugging purposes. */
13853 const char *
13854 combined_fn_name (combined_fn fn)
13856 if (builtin_fn_p (fn))
13858 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
13859 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
13861 else
13862 return internal_fn_name (as_internal_fn (fn));
13865 /* Return a bitmap with a bit set corresponding to each argument in
13866 a function call type FNTYPE declared with attribute nonnull,
13867 or null if none of the function's argument are nonnull. The caller
13868 must free the bitmap. */
13870 bitmap
13871 get_nonnull_args (const_tree fntype)
13873 if (fntype == NULL_TREE)
13874 return NULL;
13876 tree attrs = TYPE_ATTRIBUTES (fntype);
13877 if (!attrs)
13878 return NULL;
13880 bitmap argmap = NULL;
13882 /* A function declaration can specify multiple attribute nonnull,
13883 each with zero or more arguments. The loop below creates a bitmap
13884 representing a union of all the arguments. An empty (but non-null)
13885 bitmap means that all arguments have been declaraed nonnull. */
13886 for ( ; attrs; attrs = TREE_CHAIN (attrs))
13888 attrs = lookup_attribute ("nonnull", attrs);
13889 if (!attrs)
13890 break;
13892 if (!argmap)
13893 argmap = BITMAP_ALLOC (NULL);
13895 if (!TREE_VALUE (attrs))
13897 /* Clear the bitmap in case a previous attribute nonnull
13898 set it and this one overrides it for all arguments. */
13899 bitmap_clear (argmap);
13900 return argmap;
13903 /* Iterate over the indices of the format arguments declared nonnull
13904 and set a bit for each. */
13905 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
13907 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
13908 bitmap_set_bit (argmap, val);
13912 return argmap;
13915 /* Returns true if TYPE is a type where it and all of its subobjects
13916 (recursively) are of structure, union, or array type. */
13918 static bool
13919 default_is_empty_type (tree type)
13921 if (RECORD_OR_UNION_TYPE_P (type))
13923 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
13924 if (TREE_CODE (field) == FIELD_DECL
13925 && !DECL_PADDING_P (field)
13926 && !default_is_empty_type (TREE_TYPE (field)))
13927 return false;
13928 return true;
13930 else if (TREE_CODE (type) == ARRAY_TYPE)
13931 return (integer_minus_onep (array_type_nelts (type))
13932 || TYPE_DOMAIN (type) == NULL_TREE
13933 || default_is_empty_type (TREE_TYPE (type)));
13934 return false;
13937 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
13938 that shouldn't be passed via stack. */
13940 bool
13941 default_is_empty_record (const_tree type)
13943 if (!abi_version_at_least (12))
13944 return false;
13946 if (type == error_mark_node)
13947 return false;
13949 if (TREE_ADDRESSABLE (type))
13950 return false;
13952 return default_is_empty_type (TYPE_MAIN_VARIANT (type));
13955 /* Like int_size_in_bytes, but handle empty records specially. */
13957 HOST_WIDE_INT
13958 arg_int_size_in_bytes (const_tree type)
13960 return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
13963 /* Like size_in_bytes, but handle empty records specially. */
13965 tree
13966 arg_size_in_bytes (const_tree type)
13968 return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
13971 /* Return true if an expression with CODE has to have the same result type as
13972 its first operand. */
13974 bool
13975 expr_type_first_operand_type_p (tree_code code)
13977 switch (code)
13979 case NEGATE_EXPR:
13980 case ABS_EXPR:
13981 case BIT_NOT_EXPR:
13982 case PAREN_EXPR:
13983 case CONJ_EXPR:
13985 case PLUS_EXPR:
13986 case MINUS_EXPR:
13987 case MULT_EXPR:
13988 case TRUNC_DIV_EXPR:
13989 case CEIL_DIV_EXPR:
13990 case FLOOR_DIV_EXPR:
13991 case ROUND_DIV_EXPR:
13992 case TRUNC_MOD_EXPR:
13993 case CEIL_MOD_EXPR:
13994 case FLOOR_MOD_EXPR:
13995 case ROUND_MOD_EXPR:
13996 case RDIV_EXPR:
13997 case EXACT_DIV_EXPR:
13998 case MIN_EXPR:
13999 case MAX_EXPR:
14000 case BIT_IOR_EXPR:
14001 case BIT_XOR_EXPR:
14002 case BIT_AND_EXPR:
14004 case LSHIFT_EXPR:
14005 case RSHIFT_EXPR:
14006 case LROTATE_EXPR:
14007 case RROTATE_EXPR:
14008 return true;
14010 default:
14011 return false;
14015 /* List of pointer types used to declare builtins before we have seen their
14016 real declaration.
14018 Keep the size up to date in tree.h ! */
14019 const builtin_structptr_type builtin_structptr_types[6] =
14021 { fileptr_type_node, ptr_type_node, "FILE" },
14022 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
14023 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
14024 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
14025 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
14026 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
14029 #if CHECKING_P
14031 namespace selftest {
14033 /* Selftests for tree. */
14035 /* Verify that integer constants are sane. */
14037 static void
14038 test_integer_constants ()
14040 ASSERT_TRUE (integer_type_node != NULL);
14041 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
14043 tree type = integer_type_node;
14045 tree zero = build_zero_cst (type);
14046 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
14047 ASSERT_EQ (type, TREE_TYPE (zero));
14049 tree one = build_int_cst (type, 1);
14050 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
14051 ASSERT_EQ (type, TREE_TYPE (zero));
14054 /* Verify identifiers. */
14056 static void
14057 test_identifiers ()
14059 tree identifier = get_identifier ("foo");
14060 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
14061 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
14064 /* Verify LABEL_DECL. */
14066 static void
14067 test_labels ()
14069 tree identifier = get_identifier ("err");
14070 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
14071 identifier, void_type_node);
14072 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
14073 ASSERT_FALSE (FORCED_LABEL (label_decl));
14076 /* Return a new VECTOR_CST node whose type is TYPE and whose values
14077 are given by VALS. */
14079 static tree
14080 build_vector (tree type, vec<tree> vals MEM_STAT_DECL)
14082 gcc_assert (vals.length () == TYPE_VECTOR_SUBPARTS (type));
14083 tree_vector_builder builder (type, vals.length (), 1);
14084 builder.splice (vals);
14085 return builder.build ();
14088 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
14090 static void
14091 check_vector_cst (vec<tree> expected, tree actual)
14093 ASSERT_EQ (expected.length (), TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
14094 for (unsigned int i = 0; i < expected.length (); ++i)
14095 ASSERT_EQ (wi::to_wide (expected[i]),
14096 wi::to_wide (vector_cst_elt (actual, i)));
14099 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
14100 and that its elements match EXPECTED. */
14102 static void
14103 check_vector_cst_duplicate (vec<tree> expected, tree actual,
14104 unsigned int npatterns)
14106 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14107 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
14108 ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
14109 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
14110 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
14111 check_vector_cst (expected, actual);
14114 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
14115 and NPATTERNS background elements, and that its elements match
14116 EXPECTED. */
14118 static void
14119 check_vector_cst_fill (vec<tree> expected, tree actual,
14120 unsigned int npatterns)
14122 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14123 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
14124 ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
14125 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
14126 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
14127 check_vector_cst (expected, actual);
14130 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
14131 and that its elements match EXPECTED. */
14133 static void
14134 check_vector_cst_stepped (vec<tree> expected, tree actual,
14135 unsigned int npatterns)
14137 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14138 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
14139 ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
14140 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
14141 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
14142 check_vector_cst (expected, actual);
14145 /* Test the creation of VECTOR_CSTs. */
14147 static void
14148 test_vector_cst_patterns ()
14150 auto_vec<tree, 8> elements (8);
14151 elements.quick_grow (8);
14152 tree element_type = build_nonstandard_integer_type (16, true);
14153 tree vector_type = build_vector_type (element_type, 8);
14155 /* Test a simple linear series with a base of 0 and a step of 1:
14156 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
14157 for (unsigned int i = 0; i < 8; ++i)
14158 elements[i] = build_int_cst (element_type, i);
14159 check_vector_cst_stepped (elements, build_vector (vector_type, elements), 1);
14161 /* Try the same with the first element replaced by 100:
14162 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
14163 elements[0] = build_int_cst (element_type, 100);
14164 check_vector_cst_stepped (elements, build_vector (vector_type, elements), 1);
14166 /* Try a series that wraps around.
14167 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
14168 for (unsigned int i = 1; i < 8; ++i)
14169 elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
14170 check_vector_cst_stepped (elements, build_vector (vector_type, elements), 1);
14172 /* Try a downward series:
14173 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
14174 for (unsigned int i = 1; i < 8; ++i)
14175 elements[i] = build_int_cst (element_type, 80 - i);
14176 check_vector_cst_stepped (elements, build_vector (vector_type, elements), 1);
14178 /* Try two interleaved series with different bases and steps:
14179 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
14180 elements[1] = build_int_cst (element_type, 53);
14181 for (unsigned int i = 2; i < 8; i += 2)
14183 elements[i] = build_int_cst (element_type, 70 - i * 2);
14184 elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
14186 check_vector_cst_stepped (elements, build_vector (vector_type, elements), 2);
14188 /* Try a duplicated value:
14189 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
14190 for (unsigned int i = 1; i < 8; ++i)
14191 elements[i] = elements[0];
14192 check_vector_cst_duplicate (elements,
14193 build_vector (vector_type, elements), 1);
14195 /* Try an interleaved duplicated value:
14196 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
14197 elements[1] = build_int_cst (element_type, 55);
14198 for (unsigned int i = 2; i < 8; ++i)
14199 elements[i] = elements[i - 2];
14200 check_vector_cst_duplicate (elements,
14201 build_vector (vector_type, elements), 2);
14203 /* Try a duplicated value with 2 exceptions
14204 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
14205 elements[0] = build_int_cst (element_type, 41);
14206 elements[1] = build_int_cst (element_type, 97);
14207 check_vector_cst_fill (elements, build_vector (vector_type, elements), 2);
14209 /* Try with and without a step
14210 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
14211 for (unsigned int i = 3; i < 8; i += 2)
14212 elements[i] = build_int_cst (element_type, i * 7);
14213 check_vector_cst_stepped (elements, build_vector (vector_type, elements), 2);
14215 /* Try a fully-general constant:
14216 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
14217 elements[5] = build_int_cst (element_type, 9990);
14218 check_vector_cst_fill (elements, build_vector (vector_type, elements), 4);
14221 /* Run all of the selftests within this file. */
14223 void
14224 tree_c_tests ()
14226 test_integer_constants ();
14227 test_identifiers ();
14228 test_labels ();
14229 test_vector_cst_patterns ();
14232 } // namespace selftest
14234 #endif /* CHECKING_P */
14236 #include "gt-tree.h"