[Ada] Adapt body of formal sets and maps for SPARK
[official-gcc.git] / gcc / tree.cc
blob5e8876d2b38226b4a3715c121e3d3a67b9c55c93
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2022 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "langhooks-def.h"
58 #include "tree-diagnostic.h"
59 #include "except.h"
60 #include "builtins.h"
61 #include "print-tree.h"
62 #include "ipa-utils.h"
63 #include "selftest.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "rtl.h"
67 #include "regs.h"
68 #include "tree-vector-builder.h"
69 #include "gimple-fold.h"
70 #include "escaped_string.h"
71 #include "gimple-range.h"
72 #include "gomp-constants.h"
73 #include "dfp.h"
75 /* Tree code classes. */
77 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
78 #define END_OF_BASE_TREE_CODES tcc_exceptional,
80 const enum tree_code_class tree_code_type[] = {
81 #include "all-tree.def"
84 #undef DEFTREECODE
85 #undef END_OF_BASE_TREE_CODES
87 /* Table indexed by tree code giving number of expression
88 operands beyond the fixed part of the node structure.
89 Not used for types or decls. */
91 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
92 #define END_OF_BASE_TREE_CODES 0,
94 const unsigned char tree_code_length[] = {
95 #include "all-tree.def"
98 #undef DEFTREECODE
99 #undef END_OF_BASE_TREE_CODES
101 /* Names of tree components.
102 Used for printing out the tree and error messages. */
103 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
104 #define END_OF_BASE_TREE_CODES "@dummy",
106 static const char *const tree_code_name[] = {
107 #include "all-tree.def"
110 #undef DEFTREECODE
111 #undef END_OF_BASE_TREE_CODES
113 /* Each tree code class has an associated string representation.
114 These must correspond to the tree_code_class entries. */
116 const char *const tree_code_class_strings[] =
118 "exceptional",
119 "constant",
120 "type",
121 "declaration",
122 "reference",
123 "comparison",
124 "unary",
125 "binary",
126 "statement",
127 "vl_exp",
128 "expression"
131 /* obstack.[ch] explicitly declined to prototype this. */
132 extern int _obstack_allocated_p (struct obstack *h, void *obj);
134 /* Statistics-gathering stuff. */
136 static uint64_t tree_code_counts[MAX_TREE_CODES];
137 uint64_t tree_node_counts[(int) all_kinds];
138 uint64_t tree_node_sizes[(int) all_kinds];
140 /* Keep in sync with tree.h:enum tree_node_kind. */
141 static const char * const tree_node_kind_names[] = {
142 "decls",
143 "types",
144 "blocks",
145 "stmts",
146 "refs",
147 "exprs",
148 "constants",
149 "identifiers",
150 "vecs",
151 "binfos",
152 "ssa names",
153 "constructors",
154 "random kinds",
155 "lang_decl kinds",
156 "lang_type kinds",
157 "omp clauses",
160 /* Unique id for next decl created. */
161 static GTY(()) int next_decl_uid;
162 /* Unique id for next type created. */
163 static GTY(()) unsigned next_type_uid = 1;
164 /* Unique id for next debug decl created. Use negative numbers,
165 to catch erroneous uses. */
166 static GTY(()) int next_debug_decl_uid;
168 /* Since we cannot rehash a type after it is in the table, we have to
169 keep the hash code. */
171 struct GTY((for_user)) type_hash {
172 unsigned long hash;
173 tree type;
176 /* Initial size of the hash table (rounded to next prime). */
177 #define TYPE_HASH_INITIAL_SIZE 1000
179 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
181 static hashval_t hash (type_hash *t) { return t->hash; }
182 static bool equal (type_hash *a, type_hash *b);
184 static int
185 keep_cache_entry (type_hash *&t)
187 return ggc_marked_p (t->type);
191 /* Now here is the hash table. When recording a type, it is added to
192 the slot whose index is the hash code. Note that the hash table is
193 used for several kinds of types (function types, array types and
194 array index range types, for now). While all these live in the
195 same table, they are completely independent, and the hash code is
196 computed differently for each of these. */
198 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
200 /* Hash table and temporary node for larger integer const values. */
201 static GTY (()) tree int_cst_node;
203 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
205 static hashval_t hash (tree t);
206 static bool equal (tree x, tree y);
209 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
211 /* Class and variable for making sure that there is a single POLY_INT_CST
212 for a given value. */
213 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
215 typedef std::pair<tree, const poly_wide_int *> compare_type;
216 static hashval_t hash (tree t);
217 static bool equal (tree x, const compare_type &y);
220 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
222 /* Hash table for optimization flags and target option flags. Use the same
223 hash table for both sets of options. Nodes for building the current
224 optimization and target option nodes. The assumption is most of the time
225 the options created will already be in the hash table, so we avoid
226 allocating and freeing up a node repeatably. */
227 static GTY (()) tree cl_optimization_node;
228 static GTY (()) tree cl_target_option_node;
230 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
232 static hashval_t hash (tree t);
233 static bool equal (tree x, tree y);
236 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
238 /* General tree->tree mapping structure for use in hash tables. */
241 static GTY ((cache))
242 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
244 static GTY ((cache))
245 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
247 static GTY ((cache))
248 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
250 static void set_type_quals (tree, int);
251 static void print_type_hash_statistics (void);
252 static void print_debug_expr_statistics (void);
253 static void print_value_expr_statistics (void);
255 tree global_trees[TI_MAX];
256 tree integer_types[itk_none];
258 bool int_n_enabled_p[NUM_INT_N_ENTS];
259 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
261 bool tree_contains_struct[MAX_TREE_CODES][64];
263 /* Number of operands for each OMP clause. */
264 unsigned const char omp_clause_num_ops[] =
266 0, /* OMP_CLAUSE_ERROR */
267 1, /* OMP_CLAUSE_PRIVATE */
268 1, /* OMP_CLAUSE_SHARED */
269 1, /* OMP_CLAUSE_FIRSTPRIVATE */
270 2, /* OMP_CLAUSE_LASTPRIVATE */
271 5, /* OMP_CLAUSE_REDUCTION */
272 5, /* OMP_CLAUSE_TASK_REDUCTION */
273 5, /* OMP_CLAUSE_IN_REDUCTION */
274 1, /* OMP_CLAUSE_COPYIN */
275 1, /* OMP_CLAUSE_COPYPRIVATE */
276 3, /* OMP_CLAUSE_LINEAR */
277 1, /* OMP_CLAUSE_AFFINITY */
278 2, /* OMP_CLAUSE_ALIGNED */
279 3, /* OMP_CLAUSE_ALLOCATE */
280 1, /* OMP_CLAUSE_DEPEND */
281 1, /* OMP_CLAUSE_NONTEMPORAL */
282 1, /* OMP_CLAUSE_UNIFORM */
283 1, /* OMP_CLAUSE_TO_DECLARE */
284 1, /* OMP_CLAUSE_LINK */
285 1, /* OMP_CLAUSE_DETACH */
286 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
287 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
288 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
289 1, /* OMP_CLAUSE_INCLUSIVE */
290 1, /* OMP_CLAUSE_EXCLUSIVE */
291 2, /* OMP_CLAUSE_FROM */
292 2, /* OMP_CLAUSE_TO */
293 2, /* OMP_CLAUSE_MAP */
294 1, /* OMP_CLAUSE_HAS_DEVICE_ADDR */
295 2, /* OMP_CLAUSE__CACHE_ */
296 2, /* OMP_CLAUSE_GANG */
297 1, /* OMP_CLAUSE_ASYNC */
298 1, /* OMP_CLAUSE_WAIT */
299 0, /* OMP_CLAUSE_AUTO */
300 0, /* OMP_CLAUSE_SEQ */
301 1, /* OMP_CLAUSE__LOOPTEMP_ */
302 1, /* OMP_CLAUSE__REDUCTEMP_ */
303 1, /* OMP_CLAUSE__CONDTEMP_ */
304 1, /* OMP_CLAUSE__SCANTEMP_ */
305 1, /* OMP_CLAUSE_IF */
306 1, /* OMP_CLAUSE_NUM_THREADS */
307 1, /* OMP_CLAUSE_SCHEDULE */
308 0, /* OMP_CLAUSE_NOWAIT */
309 1, /* OMP_CLAUSE_ORDERED */
310 0, /* OMP_CLAUSE_DEFAULT */
311 3, /* OMP_CLAUSE_COLLAPSE */
312 0, /* OMP_CLAUSE_UNTIED */
313 1, /* OMP_CLAUSE_FINAL */
314 0, /* OMP_CLAUSE_MERGEABLE */
315 1, /* OMP_CLAUSE_DEVICE */
316 1, /* OMP_CLAUSE_DIST_SCHEDULE */
317 0, /* OMP_CLAUSE_INBRANCH */
318 0, /* OMP_CLAUSE_NOTINBRANCH */
319 2, /* OMP_CLAUSE_NUM_TEAMS */
320 1, /* OMP_CLAUSE_THREAD_LIMIT */
321 0, /* OMP_CLAUSE_PROC_BIND */
322 1, /* OMP_CLAUSE_SAFELEN */
323 1, /* OMP_CLAUSE_SIMDLEN */
324 0, /* OMP_CLAUSE_DEVICE_TYPE */
325 0, /* OMP_CLAUSE_FOR */
326 0, /* OMP_CLAUSE_PARALLEL */
327 0, /* OMP_CLAUSE_SECTIONS */
328 0, /* OMP_CLAUSE_TASKGROUP */
329 1, /* OMP_CLAUSE_PRIORITY */
330 1, /* OMP_CLAUSE_GRAINSIZE */
331 1, /* OMP_CLAUSE_NUM_TASKS */
332 0, /* OMP_CLAUSE_NOGROUP */
333 0, /* OMP_CLAUSE_THREADS */
334 0, /* OMP_CLAUSE_SIMD */
335 1, /* OMP_CLAUSE_HINT */
336 0, /* OMP_CLAUSE_DEFAULTMAP */
337 0, /* OMP_CLAUSE_ORDER */
338 0, /* OMP_CLAUSE_BIND */
339 1, /* OMP_CLAUSE_FILTER */
340 1, /* OMP_CLAUSE__SIMDUID_ */
341 0, /* OMP_CLAUSE__SIMT_ */
342 0, /* OMP_CLAUSE_INDEPENDENT */
343 1, /* OMP_CLAUSE_WORKER */
344 1, /* OMP_CLAUSE_VECTOR */
345 1, /* OMP_CLAUSE_NUM_GANGS */
346 1, /* OMP_CLAUSE_NUM_WORKERS */
347 1, /* OMP_CLAUSE_VECTOR_LENGTH */
348 3, /* OMP_CLAUSE_TILE */
349 0, /* OMP_CLAUSE_IF_PRESENT */
350 0, /* OMP_CLAUSE_FINALIZE */
351 0, /* OMP_CLAUSE_NOHOST */
354 const char * const omp_clause_code_name[] =
356 "error_clause",
357 "private",
358 "shared",
359 "firstprivate",
360 "lastprivate",
361 "reduction",
362 "task_reduction",
363 "in_reduction",
364 "copyin",
365 "copyprivate",
366 "linear",
367 "affinity",
368 "aligned",
369 "allocate",
370 "depend",
371 "nontemporal",
372 "uniform",
373 "to",
374 "link",
375 "detach",
376 "use_device_ptr",
377 "use_device_addr",
378 "is_device_ptr",
379 "inclusive",
380 "exclusive",
381 "from",
382 "to",
383 "map",
384 "has_device_addr",
385 "_cache_",
386 "gang",
387 "async",
388 "wait",
389 "auto",
390 "seq",
391 "_looptemp_",
392 "_reductemp_",
393 "_condtemp_",
394 "_scantemp_",
395 "if",
396 "num_threads",
397 "schedule",
398 "nowait",
399 "ordered",
400 "default",
401 "collapse",
402 "untied",
403 "final",
404 "mergeable",
405 "device",
406 "dist_schedule",
407 "inbranch",
408 "notinbranch",
409 "num_teams",
410 "thread_limit",
411 "proc_bind",
412 "safelen",
413 "simdlen",
414 "device_type",
415 "for",
416 "parallel",
417 "sections",
418 "taskgroup",
419 "priority",
420 "grainsize",
421 "num_tasks",
422 "nogroup",
423 "threads",
424 "simd",
425 "hint",
426 "defaultmap",
427 "order",
428 "bind",
429 "filter",
430 "_simduid_",
431 "_simt_",
432 "independent",
433 "worker",
434 "vector",
435 "num_gangs",
436 "num_workers",
437 "vector_length",
438 "tile",
439 "if_present",
440 "finalize",
441 "nohost",
444 /* Unless specific to OpenACC, we tend to internally maintain OpenMP-centric
445 clause names, but for use in diagnostics etc. would like to use the "user"
446 clause names. */
448 const char *
449 user_omp_clause_code_name (tree clause, bool oacc)
451 /* For OpenACC, the 'OMP_CLAUSE_MAP_KIND' of an 'OMP_CLAUSE_MAP' is used to
452 distinguish clauses as seen by the user. See also where front ends do
453 'build_omp_clause' with 'OMP_CLAUSE_MAP'. */
454 if (oacc && OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP)
455 switch (OMP_CLAUSE_MAP_KIND (clause))
457 case GOMP_MAP_FORCE_ALLOC:
458 case GOMP_MAP_ALLOC: return "create";
459 case GOMP_MAP_FORCE_TO:
460 case GOMP_MAP_TO: return "copyin";
461 case GOMP_MAP_FORCE_FROM:
462 case GOMP_MAP_FROM: return "copyout";
463 case GOMP_MAP_FORCE_TOFROM:
464 case GOMP_MAP_TOFROM: return "copy";
465 case GOMP_MAP_RELEASE: return "delete";
466 case GOMP_MAP_FORCE_PRESENT: return "present";
467 case GOMP_MAP_ATTACH: return "attach";
468 case GOMP_MAP_FORCE_DETACH:
469 case GOMP_MAP_DETACH: return "detach";
470 case GOMP_MAP_DEVICE_RESIDENT: return "device_resident";
471 case GOMP_MAP_LINK: return "link";
472 case GOMP_MAP_FORCE_DEVICEPTR: return "deviceptr";
473 default: break;
476 return omp_clause_code_name[OMP_CLAUSE_CODE (clause)];
480 /* Return the tree node structure used by tree code CODE. */
482 static inline enum tree_node_structure_enum
483 tree_node_structure_for_code (enum tree_code code)
485 switch (TREE_CODE_CLASS (code))
487 case tcc_declaration:
488 switch (code)
490 case CONST_DECL: return TS_CONST_DECL;
491 case DEBUG_EXPR_DECL: return TS_DECL_WRTL;
492 case FIELD_DECL: return TS_FIELD_DECL;
493 case FUNCTION_DECL: return TS_FUNCTION_DECL;
494 case LABEL_DECL: return TS_LABEL_DECL;
495 case PARM_DECL: return TS_PARM_DECL;
496 case RESULT_DECL: return TS_RESULT_DECL;
497 case TRANSLATION_UNIT_DECL: return TS_TRANSLATION_UNIT_DECL;
498 case TYPE_DECL: return TS_TYPE_DECL;
499 case VAR_DECL: return TS_VAR_DECL;
500 default: return TS_DECL_NON_COMMON;
503 case tcc_type: return TS_TYPE_NON_COMMON;
505 case tcc_binary:
506 case tcc_comparison:
507 case tcc_expression:
508 case tcc_reference:
509 case tcc_statement:
510 case tcc_unary:
511 case tcc_vl_exp: return TS_EXP;
513 default: /* tcc_constant and tcc_exceptional */
514 break;
517 switch (code)
519 /* tcc_constant cases. */
520 case COMPLEX_CST: return TS_COMPLEX;
521 case FIXED_CST: return TS_FIXED_CST;
522 case INTEGER_CST: return TS_INT_CST;
523 case POLY_INT_CST: return TS_POLY_INT_CST;
524 case REAL_CST: return TS_REAL_CST;
525 case STRING_CST: return TS_STRING;
526 case VECTOR_CST: return TS_VECTOR;
527 case VOID_CST: return TS_TYPED;
529 /* tcc_exceptional cases. */
530 case BLOCK: return TS_BLOCK;
531 case CONSTRUCTOR: return TS_CONSTRUCTOR;
532 case ERROR_MARK: return TS_COMMON;
533 case IDENTIFIER_NODE: return TS_IDENTIFIER;
534 case OMP_CLAUSE: return TS_OMP_CLAUSE;
535 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
536 case PLACEHOLDER_EXPR: return TS_COMMON;
537 case SSA_NAME: return TS_SSA_NAME;
538 case STATEMENT_LIST: return TS_STATEMENT_LIST;
539 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
540 case TREE_BINFO: return TS_BINFO;
541 case TREE_LIST: return TS_LIST;
542 case TREE_VEC: return TS_VEC;
544 default:
545 gcc_unreachable ();
550 /* Initialize tree_contains_struct to describe the hierarchy of tree
551 nodes. */
553 static void
554 initialize_tree_contains_struct (void)
556 unsigned i;
558 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
560 enum tree_code code;
561 enum tree_node_structure_enum ts_code;
563 code = (enum tree_code) i;
564 ts_code = tree_node_structure_for_code (code);
566 /* Mark the TS structure itself. */
567 tree_contains_struct[code][ts_code] = 1;
569 /* Mark all the structures that TS is derived from. */
570 switch (ts_code)
572 case TS_TYPED:
573 case TS_BLOCK:
574 case TS_OPTIMIZATION:
575 case TS_TARGET_OPTION:
576 MARK_TS_BASE (code);
577 break;
579 case TS_COMMON:
580 case TS_INT_CST:
581 case TS_POLY_INT_CST:
582 case TS_REAL_CST:
583 case TS_FIXED_CST:
584 case TS_VECTOR:
585 case TS_STRING:
586 case TS_COMPLEX:
587 case TS_SSA_NAME:
588 case TS_CONSTRUCTOR:
589 case TS_EXP:
590 case TS_STATEMENT_LIST:
591 MARK_TS_TYPED (code);
592 break;
594 case TS_IDENTIFIER:
595 case TS_DECL_MINIMAL:
596 case TS_TYPE_COMMON:
597 case TS_LIST:
598 case TS_VEC:
599 case TS_BINFO:
600 case TS_OMP_CLAUSE:
601 MARK_TS_COMMON (code);
602 break;
604 case TS_TYPE_WITH_LANG_SPECIFIC:
605 MARK_TS_TYPE_COMMON (code);
606 break;
608 case TS_TYPE_NON_COMMON:
609 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
610 break;
612 case TS_DECL_COMMON:
613 MARK_TS_DECL_MINIMAL (code);
614 break;
616 case TS_DECL_WRTL:
617 case TS_CONST_DECL:
618 MARK_TS_DECL_COMMON (code);
619 break;
621 case TS_DECL_NON_COMMON:
622 MARK_TS_DECL_WITH_VIS (code);
623 break;
625 case TS_DECL_WITH_VIS:
626 case TS_PARM_DECL:
627 case TS_LABEL_DECL:
628 case TS_RESULT_DECL:
629 MARK_TS_DECL_WRTL (code);
630 break;
632 case TS_FIELD_DECL:
633 MARK_TS_DECL_COMMON (code);
634 break;
636 case TS_VAR_DECL:
637 MARK_TS_DECL_WITH_VIS (code);
638 break;
640 case TS_TYPE_DECL:
641 case TS_FUNCTION_DECL:
642 MARK_TS_DECL_NON_COMMON (code);
643 break;
645 case TS_TRANSLATION_UNIT_DECL:
646 MARK_TS_DECL_COMMON (code);
647 break;
649 default:
650 gcc_unreachable ();
654 /* Basic consistency checks for attributes used in fold. */
655 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
656 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
657 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
658 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
659 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
660 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
661 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
662 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
663 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
664 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
665 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
666 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
667 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
668 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
669 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
670 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
671 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
672 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
673 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
674 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
675 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
676 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
677 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
678 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
679 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
680 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
681 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
682 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
683 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
684 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
685 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
686 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
687 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
688 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
689 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
690 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
691 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
692 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
693 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
694 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
698 /* Init tree.cc. */
700 void
701 init_ttree (void)
703 /* Initialize the hash table of types. */
704 type_hash_table
705 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
707 debug_expr_for_decl
708 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
710 value_expr_for_decl
711 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
713 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
715 poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
717 int_cst_node = make_int_cst (1, 1);
719 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
721 cl_optimization_node = make_node (OPTIMIZATION_NODE);
722 cl_target_option_node = make_node (TARGET_OPTION_NODE);
724 /* Initialize the tree_contains_struct array. */
725 initialize_tree_contains_struct ();
726 lang_hooks.init_ts ();
730 /* The name of the object as the assembler will see it (but before any
731 translations made by ASM_OUTPUT_LABELREF). Often this is the same
732 as DECL_NAME. It is an IDENTIFIER_NODE. */
733 tree
734 decl_assembler_name (tree decl)
736 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
737 lang_hooks.set_decl_assembler_name (decl);
738 return DECL_ASSEMBLER_NAME_RAW (decl);
741 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
742 (either of which may be NULL). Inform the FE, if this changes the
743 name. */
745 void
746 overwrite_decl_assembler_name (tree decl, tree name)
748 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
749 lang_hooks.overwrite_decl_assembler_name (decl, name);
752 /* Return true if DECL may need an assembler name to be set. */
754 static inline bool
755 need_assembler_name_p (tree decl)
757 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
758 Rule merging. This makes type_odr_p to return true on those types during
759 LTO and by comparing the mangled name, we can say what types are intended
760 to be equivalent across compilation unit.
762 We do not store names of type_in_anonymous_namespace_p.
764 Record, union and enumeration type have linkage that allows use
765 to check type_in_anonymous_namespace_p. We do not mangle compound types
766 that always can be compared structurally.
768 Similarly for builtin types, we compare properties of their main variant.
769 A special case are integer types where mangling do make differences
770 between char/signed char/unsigned char etc. Storing name for these makes
771 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
772 See cp/mangle.cc:write_builtin_type for details. */
774 if (TREE_CODE (decl) == TYPE_DECL)
776 if (DECL_NAME (decl)
777 && decl == TYPE_NAME (TREE_TYPE (decl))
778 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
779 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
780 && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
781 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE)
782 || TYPE_CXX_ODR_P (TREE_TYPE (decl)))
783 && (type_with_linkage_p (TREE_TYPE (decl))
784 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
785 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
786 return !DECL_ASSEMBLER_NAME_SET_P (decl);
787 return false;
789 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
790 if (!VAR_OR_FUNCTION_DECL_P (decl))
791 return false;
793 /* If DECL already has its assembler name set, it does not need a
794 new one. */
795 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
796 || DECL_ASSEMBLER_NAME_SET_P (decl))
797 return false;
799 /* Abstract decls do not need an assembler name. */
800 if (DECL_ABSTRACT_P (decl))
801 return false;
803 /* For VAR_DECLs, only static, public and external symbols need an
804 assembler name. */
805 if (VAR_P (decl)
806 && !TREE_STATIC (decl)
807 && !TREE_PUBLIC (decl)
808 && !DECL_EXTERNAL (decl))
809 return false;
811 if (TREE_CODE (decl) == FUNCTION_DECL)
813 /* Do not set assembler name on builtins. Allow RTL expansion to
814 decide whether to expand inline or via a regular call. */
815 if (fndecl_built_in_p (decl)
816 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
817 return false;
819 /* Functions represented in the callgraph need an assembler name. */
820 if (cgraph_node::get (decl) != NULL)
821 return true;
823 /* Unused and not public functions don't need an assembler name. */
824 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
825 return false;
828 return true;
831 /* If T needs an assembler name, have one created for it. */
833 void
834 assign_assembler_name_if_needed (tree t)
836 if (need_assembler_name_p (t))
838 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
839 diagnostics that use input_location to show locus
840 information. The problem here is that, at this point,
841 input_location is generally anchored to the end of the file
842 (since the parser is long gone), so we don't have a good
843 position to pin it to.
845 To alleviate this problem, this uses the location of T's
846 declaration. Examples of this are
847 testsuite/g++.dg/template/cond2.C and
848 testsuite/g++.dg/template/pr35240.C. */
849 location_t saved_location = input_location;
850 input_location = DECL_SOURCE_LOCATION (t);
852 decl_assembler_name (t);
854 input_location = saved_location;
858 /* When the target supports COMDAT groups, this indicates which group the
859 DECL is associated with. This can be either an IDENTIFIER_NODE or a
860 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
861 tree
862 decl_comdat_group (const_tree node)
864 struct symtab_node *snode = symtab_node::get (node);
865 if (!snode)
866 return NULL;
867 return snode->get_comdat_group ();
870 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
871 tree
872 decl_comdat_group_id (const_tree node)
874 struct symtab_node *snode = symtab_node::get (node);
875 if (!snode)
876 return NULL;
877 return snode->get_comdat_group_id ();
880 /* When the target supports named section, return its name as IDENTIFIER_NODE
881 or NULL if it is in no section. */
882 const char *
883 decl_section_name (const_tree node)
885 struct symtab_node *snode = symtab_node::get (node);
886 if (!snode)
887 return NULL;
888 return snode->get_section ();
891 /* Set section name of NODE to VALUE (that is expected to be
892 identifier node) */
893 void
894 set_decl_section_name (tree node, const char *value)
896 struct symtab_node *snode;
898 if (value == NULL)
900 snode = symtab_node::get (node);
901 if (!snode)
902 return;
904 else if (VAR_P (node))
905 snode = varpool_node::get_create (node);
906 else
907 snode = cgraph_node::get_create (node);
908 snode->set_section (value);
911 /* Set section name of NODE to match the section name of OTHER.
913 set_decl_section_name (decl, other) is equivalent to
914 set_decl_section_name (decl, DECL_SECTION_NAME (other)), but possibly more
915 efficient. */
916 void
917 set_decl_section_name (tree decl, const_tree other)
919 struct symtab_node *other_node = symtab_node::get (other);
920 if (other_node)
922 struct symtab_node *decl_node;
923 if (VAR_P (decl))
924 decl_node = varpool_node::get_create (decl);
925 else
926 decl_node = cgraph_node::get_create (decl);
927 decl_node->set_section (*other_node);
929 else
931 struct symtab_node *decl_node = symtab_node::get (decl);
932 if (!decl_node)
933 return;
934 decl_node->set_section (NULL);
938 /* Return TLS model of a variable NODE. */
939 enum tls_model
940 decl_tls_model (const_tree node)
942 struct varpool_node *snode = varpool_node::get (node);
943 if (!snode)
944 return TLS_MODEL_NONE;
945 return snode->tls_model;
948 /* Set TLS model of variable NODE to MODEL. */
949 void
950 set_decl_tls_model (tree node, enum tls_model model)
952 struct varpool_node *vnode;
954 if (model == TLS_MODEL_NONE)
956 vnode = varpool_node::get (node);
957 if (!vnode)
958 return;
960 else
961 vnode = varpool_node::get_create (node);
962 vnode->tls_model = model;
965 /* Compute the number of bytes occupied by a tree with code CODE.
966 This function cannot be used for nodes that have variable sizes,
967 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
968 size_t
969 tree_code_size (enum tree_code code)
971 switch (TREE_CODE_CLASS (code))
973 case tcc_declaration: /* A decl node */
974 switch (code)
976 case FIELD_DECL: return sizeof (tree_field_decl);
977 case PARM_DECL: return sizeof (tree_parm_decl);
978 case VAR_DECL: return sizeof (tree_var_decl);
979 case LABEL_DECL: return sizeof (tree_label_decl);
980 case RESULT_DECL: return sizeof (tree_result_decl);
981 case CONST_DECL: return sizeof (tree_const_decl);
982 case TYPE_DECL: return sizeof (tree_type_decl);
983 case FUNCTION_DECL: return sizeof (tree_function_decl);
984 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
985 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
986 case NAMESPACE_DECL:
987 case IMPORTED_DECL:
988 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
989 default:
990 gcc_checking_assert (code >= NUM_TREE_CODES);
991 return lang_hooks.tree_size (code);
994 case tcc_type: /* a type node */
995 switch (code)
997 case OFFSET_TYPE:
998 case ENUMERAL_TYPE:
999 case BOOLEAN_TYPE:
1000 case INTEGER_TYPE:
1001 case REAL_TYPE:
1002 case OPAQUE_TYPE:
1003 case POINTER_TYPE:
1004 case REFERENCE_TYPE:
1005 case NULLPTR_TYPE:
1006 case FIXED_POINT_TYPE:
1007 case COMPLEX_TYPE:
1008 case VECTOR_TYPE:
1009 case ARRAY_TYPE:
1010 case RECORD_TYPE:
1011 case UNION_TYPE:
1012 case QUAL_UNION_TYPE:
1013 case VOID_TYPE:
1014 case FUNCTION_TYPE:
1015 case METHOD_TYPE:
1016 case LANG_TYPE: return sizeof (tree_type_non_common);
1017 default:
1018 gcc_checking_assert (code >= NUM_TREE_CODES);
1019 return lang_hooks.tree_size (code);
1022 case tcc_reference: /* a reference */
1023 case tcc_expression: /* an expression */
1024 case tcc_statement: /* an expression with side effects */
1025 case tcc_comparison: /* a comparison expression */
1026 case tcc_unary: /* a unary arithmetic expression */
1027 case tcc_binary: /* a binary arithmetic expression */
1028 return (sizeof (struct tree_exp)
1029 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
1031 case tcc_constant: /* a constant */
1032 switch (code)
1034 case VOID_CST: return sizeof (tree_typed);
1035 case INTEGER_CST: gcc_unreachable ();
1036 case POLY_INT_CST: return sizeof (tree_poly_int_cst);
1037 case REAL_CST: return sizeof (tree_real_cst);
1038 case FIXED_CST: return sizeof (tree_fixed_cst);
1039 case COMPLEX_CST: return sizeof (tree_complex);
1040 case VECTOR_CST: gcc_unreachable ();
1041 case STRING_CST: gcc_unreachable ();
1042 default:
1043 gcc_checking_assert (code >= NUM_TREE_CODES);
1044 return lang_hooks.tree_size (code);
1047 case tcc_exceptional: /* something random, like an identifier. */
1048 switch (code)
1050 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
1051 case TREE_LIST: return sizeof (tree_list);
1053 case ERROR_MARK:
1054 case PLACEHOLDER_EXPR: return sizeof (tree_common);
1056 case TREE_VEC: gcc_unreachable ();
1057 case OMP_CLAUSE: gcc_unreachable ();
1059 case SSA_NAME: return sizeof (tree_ssa_name);
1061 case STATEMENT_LIST: return sizeof (tree_statement_list);
1062 case BLOCK: return sizeof (struct tree_block);
1063 case CONSTRUCTOR: return sizeof (tree_constructor);
1064 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
1065 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
1067 default:
1068 gcc_checking_assert (code >= NUM_TREE_CODES);
1069 return lang_hooks.tree_size (code);
1072 default:
1073 gcc_unreachable ();
1077 /* Compute the number of bytes occupied by NODE. This routine only
1078 looks at TREE_CODE, except for those nodes that have variable sizes. */
1079 size_t
1080 tree_size (const_tree node)
1082 const enum tree_code code = TREE_CODE (node);
1083 switch (code)
1085 case INTEGER_CST:
1086 return (sizeof (struct tree_int_cst)
1087 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
1089 case TREE_BINFO:
1090 return (offsetof (struct tree_binfo, base_binfos)
1091 + vec<tree, va_gc>
1092 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
1094 case TREE_VEC:
1095 return (sizeof (struct tree_vec)
1096 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
1098 case VECTOR_CST:
1099 return (sizeof (struct tree_vector)
1100 + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
1102 case STRING_CST:
1103 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
1105 case OMP_CLAUSE:
1106 return (sizeof (struct tree_omp_clause)
1107 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
1108 * sizeof (tree));
1110 default:
1111 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
1112 return (sizeof (struct tree_exp)
1113 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
1114 else
1115 return tree_code_size (code);
1119 /* Return tree node kind based on tree CODE. */
1121 static tree_node_kind
1122 get_stats_node_kind (enum tree_code code)
1124 enum tree_code_class type = TREE_CODE_CLASS (code);
1126 switch (type)
1128 case tcc_declaration: /* A decl node */
1129 return d_kind;
1130 case tcc_type: /* a type node */
1131 return t_kind;
1132 case tcc_statement: /* an expression with side effects */
1133 return s_kind;
1134 case tcc_reference: /* a reference */
1135 return r_kind;
1136 case tcc_expression: /* an expression */
1137 case tcc_comparison: /* a comparison expression */
1138 case tcc_unary: /* a unary arithmetic expression */
1139 case tcc_binary: /* a binary arithmetic expression */
1140 return e_kind;
1141 case tcc_constant: /* a constant */
1142 return c_kind;
1143 case tcc_exceptional: /* something random, like an identifier. */
1144 switch (code)
1146 case IDENTIFIER_NODE:
1147 return id_kind;
1148 case TREE_VEC:
1149 return vec_kind;
1150 case TREE_BINFO:
1151 return binfo_kind;
1152 case SSA_NAME:
1153 return ssa_name_kind;
1154 case BLOCK:
1155 return b_kind;
1156 case CONSTRUCTOR:
1157 return constr_kind;
1158 case OMP_CLAUSE:
1159 return omp_clause_kind;
1160 default:
1161 return x_kind;
1163 break;
1164 case tcc_vl_exp:
1165 return e_kind;
1166 default:
1167 gcc_unreachable ();
1171 /* Record interesting allocation statistics for a tree node with CODE
1172 and LENGTH. */
1174 static void
1175 record_node_allocation_statistics (enum tree_code code, size_t length)
1177 if (!GATHER_STATISTICS)
1178 return;
1180 tree_node_kind kind = get_stats_node_kind (code);
1182 tree_code_counts[(int) code]++;
1183 tree_node_counts[(int) kind]++;
1184 tree_node_sizes[(int) kind] += length;
1187 /* Allocate and return a new UID from the DECL_UID namespace. */
1190 allocate_decl_uid (void)
1192 return next_decl_uid++;
1195 /* Return a newly allocated node of code CODE. For decl and type
1196 nodes, some other fields are initialized. The rest of the node is
1197 initialized to zero. This function cannot be used for TREE_VEC,
1198 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1199 tree_code_size.
1201 Achoo! I got a code in the node. */
1203 tree
1204 make_node (enum tree_code code MEM_STAT_DECL)
1206 tree t;
1207 enum tree_code_class type = TREE_CODE_CLASS (code);
1208 size_t length = tree_code_size (code);
1210 record_node_allocation_statistics (code, length);
1212 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1213 TREE_SET_CODE (t, code);
1215 switch (type)
1217 case tcc_statement:
1218 if (code != DEBUG_BEGIN_STMT)
1219 TREE_SIDE_EFFECTS (t) = 1;
1220 break;
1222 case tcc_declaration:
1223 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1225 if (code == FUNCTION_DECL)
1227 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1228 SET_DECL_MODE (t, FUNCTION_MODE);
1230 else
1231 SET_DECL_ALIGN (t, 1);
1233 DECL_SOURCE_LOCATION (t) = input_location;
1234 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1235 DECL_UID (t) = --next_debug_decl_uid;
1236 else
1238 DECL_UID (t) = allocate_decl_uid ();
1239 SET_DECL_PT_UID (t, -1);
1241 if (TREE_CODE (t) == LABEL_DECL)
1242 LABEL_DECL_UID (t) = -1;
1244 break;
1246 case tcc_type:
1247 TYPE_UID (t) = next_type_uid++;
1248 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1249 TYPE_USER_ALIGN (t) = 0;
1250 TYPE_MAIN_VARIANT (t) = t;
1251 TYPE_CANONICAL (t) = t;
1253 /* Default to no attributes for type, but let target change that. */
1254 TYPE_ATTRIBUTES (t) = NULL_TREE;
1255 targetm.set_default_type_attributes (t);
1257 /* We have not yet computed the alias set for this type. */
1258 TYPE_ALIAS_SET (t) = -1;
1259 break;
1261 case tcc_constant:
1262 TREE_CONSTANT (t) = 1;
1263 break;
1265 case tcc_expression:
1266 switch (code)
1268 case INIT_EXPR:
1269 case MODIFY_EXPR:
1270 case VA_ARG_EXPR:
1271 case PREDECREMENT_EXPR:
1272 case PREINCREMENT_EXPR:
1273 case POSTDECREMENT_EXPR:
1274 case POSTINCREMENT_EXPR:
1275 /* All of these have side-effects, no matter what their
1276 operands are. */
1277 TREE_SIDE_EFFECTS (t) = 1;
1278 break;
1280 default:
1281 break;
1283 break;
1285 case tcc_exceptional:
1286 switch (code)
1288 case TARGET_OPTION_NODE:
1289 TREE_TARGET_OPTION(t)
1290 = ggc_cleared_alloc<struct cl_target_option> ();
1291 break;
1293 case OPTIMIZATION_NODE:
1294 TREE_OPTIMIZATION (t)
1295 = ggc_cleared_alloc<struct cl_optimization> ();
1296 break;
1298 default:
1299 break;
1301 break;
1303 default:
1304 /* Other classes need no special treatment. */
1305 break;
1308 return t;
1311 /* Free tree node. */
1313 void
1314 free_node (tree node)
1316 enum tree_code code = TREE_CODE (node);
1317 if (GATHER_STATISTICS)
1319 enum tree_node_kind kind = get_stats_node_kind (code);
1321 gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1322 gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1323 gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1325 tree_code_counts[(int) TREE_CODE (node)]--;
1326 tree_node_counts[(int) kind]--;
1327 tree_node_sizes[(int) kind] -= tree_size (node);
1329 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1330 vec_free (CONSTRUCTOR_ELTS (node));
1331 else if (code == BLOCK)
1332 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1333 else if (code == TREE_BINFO)
1334 vec_free (BINFO_BASE_ACCESSES (node));
1335 else if (code == OPTIMIZATION_NODE)
1336 cl_optimization_option_free (TREE_OPTIMIZATION (node));
1337 else if (code == TARGET_OPTION_NODE)
1338 cl_target_option_free (TREE_TARGET_OPTION (node));
1339 ggc_free (node);
1342 /* Return a new node with the same contents as NODE except that its
1343 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1345 tree
1346 copy_node (tree node MEM_STAT_DECL)
1348 tree t;
1349 enum tree_code code = TREE_CODE (node);
1350 size_t length;
1352 gcc_assert (code != STATEMENT_LIST);
1354 length = tree_size (node);
1355 record_node_allocation_statistics (code, length);
1356 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1357 memcpy (t, node, length);
1359 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1360 TREE_CHAIN (t) = 0;
1361 TREE_ASM_WRITTEN (t) = 0;
1362 TREE_VISITED (t) = 0;
1364 if (TREE_CODE_CLASS (code) == tcc_declaration)
1366 if (code == DEBUG_EXPR_DECL)
1367 DECL_UID (t) = --next_debug_decl_uid;
1368 else
1370 DECL_UID (t) = allocate_decl_uid ();
1371 if (DECL_PT_UID_SET_P (node))
1372 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1374 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1375 && DECL_HAS_VALUE_EXPR_P (node))
1377 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1378 DECL_HAS_VALUE_EXPR_P (t) = 1;
1380 /* DECL_DEBUG_EXPR is copied explicitly by callers. */
1381 if (VAR_P (node))
1383 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1384 t->decl_with_vis.symtab_node = NULL;
1386 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1388 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1389 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1391 if (TREE_CODE (node) == FUNCTION_DECL)
1393 DECL_STRUCT_FUNCTION (t) = NULL;
1394 t->decl_with_vis.symtab_node = NULL;
1397 else if (TREE_CODE_CLASS (code) == tcc_type)
1399 TYPE_UID (t) = next_type_uid++;
1400 /* The following is so that the debug code for
1401 the copy is different from the original type.
1402 The two statements usually duplicate each other
1403 (because they clear fields of the same union),
1404 but the optimizer should catch that. */
1405 TYPE_SYMTAB_ADDRESS (t) = 0;
1406 TYPE_SYMTAB_DIE (t) = 0;
1408 /* Do not copy the values cache. */
1409 if (TYPE_CACHED_VALUES_P (t))
1411 TYPE_CACHED_VALUES_P (t) = 0;
1412 TYPE_CACHED_VALUES (t) = NULL_TREE;
1415 else if (code == TARGET_OPTION_NODE)
1417 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1418 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1419 sizeof (struct cl_target_option));
1421 else if (code == OPTIMIZATION_NODE)
1423 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1424 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1425 sizeof (struct cl_optimization));
1428 return t;
1431 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1432 For example, this can copy a list made of TREE_LIST nodes. */
1434 tree
1435 copy_list (tree list)
1437 tree head;
1438 tree prev, next;
1440 if (list == 0)
1441 return 0;
1443 head = prev = copy_node (list);
1444 next = TREE_CHAIN (list);
1445 while (next)
1447 TREE_CHAIN (prev) = copy_node (next);
1448 prev = TREE_CHAIN (prev);
1449 next = TREE_CHAIN (next);
1451 return head;
1455 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1456 INTEGER_CST with value CST and type TYPE. */
1458 static unsigned int
1459 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1461 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1462 /* We need extra HWIs if CST is an unsigned integer with its
1463 upper bit set. */
1464 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1465 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1466 return cst.get_len ();
1469 /* Return a new INTEGER_CST with value CST and type TYPE. */
1471 static tree
1472 build_new_int_cst (tree type, const wide_int &cst)
1474 unsigned int len = cst.get_len ();
1475 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1476 tree nt = make_int_cst (len, ext_len);
1478 if (len < ext_len)
1480 --ext_len;
1481 TREE_INT_CST_ELT (nt, ext_len)
1482 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1483 for (unsigned int i = len; i < ext_len; ++i)
1484 TREE_INT_CST_ELT (nt, i) = -1;
1486 else if (TYPE_UNSIGNED (type)
1487 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1489 len--;
1490 TREE_INT_CST_ELT (nt, len)
1491 = zext_hwi (cst.elt (len),
1492 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1495 for (unsigned int i = 0; i < len; i++)
1496 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1497 TREE_TYPE (nt) = type;
1498 return nt;
1501 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1503 static tree
1504 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1505 CXX_MEM_STAT_INFO)
1507 size_t length = sizeof (struct tree_poly_int_cst);
1508 record_node_allocation_statistics (POLY_INT_CST, length);
1510 tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1512 TREE_SET_CODE (t, POLY_INT_CST);
1513 TREE_CONSTANT (t) = 1;
1514 TREE_TYPE (t) = type;
1515 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1516 POLY_INT_CST_COEFF (t, i) = coeffs[i];
1517 return t;
1520 /* Create a constant tree that contains CST sign-extended to TYPE. */
1522 tree
1523 build_int_cst (tree type, poly_int64 cst)
1525 /* Support legacy code. */
1526 if (!type)
1527 type = integer_type_node;
1529 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1532 /* Create a constant tree that contains CST zero-extended to TYPE. */
1534 tree
1535 build_int_cstu (tree type, poly_uint64 cst)
1537 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1540 /* Create a constant tree that contains CST sign-extended to TYPE. */
1542 tree
1543 build_int_cst_type (tree type, poly_int64 cst)
1545 gcc_assert (type);
1546 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1549 /* Constructs tree in type TYPE from with value given by CST. Signedness
1550 of CST is assumed to be the same as the signedness of TYPE. */
1552 tree
1553 double_int_to_tree (tree type, double_int cst)
1555 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1558 /* We force the wide_int CST to the range of the type TYPE by sign or
1559 zero extending it. OVERFLOWABLE indicates if we are interested in
1560 overflow of the value, when >0 we are only interested in signed
1561 overflow, for <0 we are interested in any overflow. OVERFLOWED
1562 indicates whether overflow has already occurred. CONST_OVERFLOWED
1563 indicates whether constant overflow has already occurred. We force
1564 T's value to be within range of T's type (by setting to 0 or 1 all
1565 the bits outside the type's range). We set TREE_OVERFLOWED if,
1566 OVERFLOWED is nonzero,
1567 or OVERFLOWABLE is >0 and signed overflow occurs
1568 or OVERFLOWABLE is <0 and any overflow occurs
1569 We return a new tree node for the extended wide_int. The node
1570 is shared if no overflow flags are set. */
1573 tree
1574 force_fit_type (tree type, const poly_wide_int_ref &cst,
1575 int overflowable, bool overflowed)
1577 signop sign = TYPE_SIGN (type);
1579 /* If we need to set overflow flags, return a new unshared node. */
1580 if (overflowed || !wi::fits_to_tree_p (cst, type))
1582 if (overflowed
1583 || overflowable < 0
1584 || (overflowable > 0 && sign == SIGNED))
1586 poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1587 sign);
1588 tree t;
1589 if (tmp.is_constant ())
1590 t = build_new_int_cst (type, tmp.coeffs[0]);
1591 else
1593 tree coeffs[NUM_POLY_INT_COEFFS];
1594 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1596 coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1597 TREE_OVERFLOW (coeffs[i]) = 1;
1599 t = build_new_poly_int_cst (type, coeffs);
1601 TREE_OVERFLOW (t) = 1;
1602 return t;
1606 /* Else build a shared node. */
1607 return wide_int_to_tree (type, cst);
1610 /* These are the hash table functions for the hash table of INTEGER_CST
1611 nodes of a sizetype. */
1613 /* Return the hash code X, an INTEGER_CST. */
1615 hashval_t
1616 int_cst_hasher::hash (tree x)
1618 const_tree const t = x;
1619 hashval_t code = TYPE_UID (TREE_TYPE (t));
1620 int i;
1622 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1623 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1625 return code;
1628 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1629 is the same as that given by *Y, which is the same. */
1631 bool
1632 int_cst_hasher::equal (tree x, tree y)
1634 const_tree const xt = x;
1635 const_tree const yt = y;
1637 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1638 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1639 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1640 return false;
1642 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1643 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1644 return false;
1646 return true;
1649 /* Cache wide_int CST into the TYPE_CACHED_VALUES cache for TYPE.
1650 SLOT is the slot entry to store it in, and MAX_SLOTS is the maximum
1651 number of slots that can be cached for the type. */
1653 static inline tree
1654 cache_wide_int_in_type_cache (tree type, const wide_int &cst,
1655 int slot, int max_slots)
1657 gcc_checking_assert (slot >= 0);
1658 /* Initialize cache. */
1659 if (!TYPE_CACHED_VALUES_P (type))
1661 TYPE_CACHED_VALUES_P (type) = 1;
1662 TYPE_CACHED_VALUES (type) = make_tree_vec (max_slots);
1664 tree t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot);
1665 if (!t)
1667 /* Create a new shared int. */
1668 t = build_new_int_cst (type, cst);
1669 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot) = t;
1671 return t;
1674 /* Create an INT_CST node of TYPE and value CST.
1675 The returned node is always shared. For small integers we use a
1676 per-type vector cache, for larger ones we use a single hash table.
1677 The value is extended from its precision according to the sign of
1678 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1679 the upper bits and ensures that hashing and value equality based
1680 upon the underlying HOST_WIDE_INTs works without masking. */
1682 static tree
1683 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1685 tree t;
1686 int ix = -1;
1687 int limit = 0;
1689 gcc_assert (type);
1690 unsigned int prec = TYPE_PRECISION (type);
1691 signop sgn = TYPE_SIGN (type);
1693 /* Verify that everything is canonical. */
1694 int l = pcst.get_len ();
1695 if (l > 1)
1697 if (pcst.elt (l - 1) == 0)
1698 gcc_checking_assert (pcst.elt (l - 2) < 0);
1699 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1700 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1703 wide_int cst = wide_int::from (pcst, prec, sgn);
1704 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1706 enum tree_code code = TREE_CODE (type);
1707 if (code == POINTER_TYPE || code == REFERENCE_TYPE)
1709 /* Cache NULL pointer and zero bounds. */
1710 if (cst == 0)
1711 ix = 0;
1712 /* Cache upper bounds of pointers. */
1713 else if (cst == wi::max_value (prec, sgn))
1714 ix = 1;
1715 /* Cache 1 which is used for a non-zero range. */
1716 else if (cst == 1)
1717 ix = 2;
1719 if (ix >= 0)
1721 t = cache_wide_int_in_type_cache (type, cst, ix, 3);
1722 /* Make sure no one is clobbering the shared constant. */
1723 gcc_checking_assert (TREE_TYPE (t) == type
1724 && cst == wi::to_wide (t));
1725 return t;
1728 if (ext_len == 1)
1730 /* We just need to store a single HOST_WIDE_INT. */
1731 HOST_WIDE_INT hwi;
1732 if (TYPE_UNSIGNED (type))
1733 hwi = cst.to_uhwi ();
1734 else
1735 hwi = cst.to_shwi ();
1737 switch (code)
1739 case NULLPTR_TYPE:
1740 gcc_assert (hwi == 0);
1741 /* Fallthru. */
1743 case POINTER_TYPE:
1744 case REFERENCE_TYPE:
1745 /* Ignore pointers, as they were already handled above. */
1746 break;
1748 case BOOLEAN_TYPE:
1749 /* Cache false or true. */
1750 limit = 2;
1751 if (IN_RANGE (hwi, 0, 1))
1752 ix = hwi;
1753 break;
1755 case INTEGER_TYPE:
1756 case OFFSET_TYPE:
1757 if (TYPE_SIGN (type) == UNSIGNED)
1759 /* Cache [0, N). */
1760 limit = param_integer_share_limit;
1761 if (IN_RANGE (hwi, 0, param_integer_share_limit - 1))
1762 ix = hwi;
1764 else
1766 /* Cache [-1, N). */
1767 limit = param_integer_share_limit + 1;
1768 if (IN_RANGE (hwi, -1, param_integer_share_limit - 1))
1769 ix = hwi + 1;
1771 break;
1773 case ENUMERAL_TYPE:
1774 break;
1776 default:
1777 gcc_unreachable ();
1780 if (ix >= 0)
1782 t = cache_wide_int_in_type_cache (type, cst, ix, limit);
1783 /* Make sure no one is clobbering the shared constant. */
1784 gcc_checking_assert (TREE_TYPE (t) == type
1785 && TREE_INT_CST_NUNITS (t) == 1
1786 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1787 && TREE_INT_CST_EXT_NUNITS (t) == 1
1788 && TREE_INT_CST_ELT (t, 0) == hwi);
1789 return t;
1791 else
1793 /* Use the cache of larger shared ints, using int_cst_node as
1794 a temporary. */
1796 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1797 TREE_TYPE (int_cst_node) = type;
1799 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1800 t = *slot;
1801 if (!t)
1803 /* Insert this one into the hash table. */
1804 t = int_cst_node;
1805 *slot = t;
1806 /* Make a new node for next time round. */
1807 int_cst_node = make_int_cst (1, 1);
1811 else
1813 /* The value either hashes properly or we drop it on the floor
1814 for the gc to take care of. There will not be enough of them
1815 to worry about. */
1817 tree nt = build_new_int_cst (type, cst);
1818 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1819 t = *slot;
1820 if (!t)
1822 /* Insert this one into the hash table. */
1823 t = nt;
1824 *slot = t;
1826 else
1827 ggc_free (nt);
1830 return t;
1833 hashval_t
1834 poly_int_cst_hasher::hash (tree t)
1836 inchash::hash hstate;
1838 hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1839 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1840 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1842 return hstate.end ();
1845 bool
1846 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1848 if (TREE_TYPE (x) != y.first)
1849 return false;
1850 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1851 if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1852 return false;
1853 return true;
1856 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1857 The elements must also have type TYPE. */
1859 tree
1860 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1862 unsigned int prec = TYPE_PRECISION (type);
1863 gcc_assert (prec <= values.coeffs[0].get_precision ());
1864 poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1866 inchash::hash h;
1867 h.add_int (TYPE_UID (type));
1868 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1869 h.add_wide_int (c.coeffs[i]);
1870 poly_int_cst_hasher::compare_type comp (type, &c);
1871 tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1872 INSERT);
1873 if (*slot == NULL_TREE)
1875 tree coeffs[NUM_POLY_INT_COEFFS];
1876 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1877 coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1878 *slot = build_new_poly_int_cst (type, coeffs);
1880 return *slot;
1883 /* Create a constant tree with value VALUE in type TYPE. */
1885 tree
1886 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1888 if (value.is_constant ())
1889 return wide_int_to_tree_1 (type, value.coeffs[0]);
1890 return build_poly_int_cst (type, value);
1893 /* Insert INTEGER_CST T into a cache of integer constants. And return
1894 the cached constant (which may or may not be T). If MIGHT_DUPLICATE
1895 is false, and T falls into the type's 'smaller values' range, there
1896 cannot be an existing entry. Otherwise, if MIGHT_DUPLICATE is true,
1897 or the value is large, should an existing entry exist, it is
1898 returned (rather than inserting T). */
1900 tree
1901 cache_integer_cst (tree t, bool might_duplicate ATTRIBUTE_UNUSED)
1903 tree type = TREE_TYPE (t);
1904 int ix = -1;
1905 int limit = 0;
1906 int prec = TYPE_PRECISION (type);
1908 gcc_assert (!TREE_OVERFLOW (t));
1910 /* The caching indices here must match those in
1911 wide_int_to_type_1. */
1912 switch (TREE_CODE (type))
1914 case NULLPTR_TYPE:
1915 gcc_checking_assert (integer_zerop (t));
1916 /* Fallthru. */
1918 case POINTER_TYPE:
1919 case REFERENCE_TYPE:
1921 if (integer_zerop (t))
1922 ix = 0;
1923 else if (integer_onep (t))
1924 ix = 2;
1926 if (ix >= 0)
1927 limit = 3;
1929 break;
1931 case BOOLEAN_TYPE:
1932 /* Cache false or true. */
1933 limit = 2;
1934 if (wi::ltu_p (wi::to_wide (t), 2))
1935 ix = TREE_INT_CST_ELT (t, 0);
1936 break;
1938 case INTEGER_TYPE:
1939 case OFFSET_TYPE:
1940 if (TYPE_UNSIGNED (type))
1942 /* Cache 0..N */
1943 limit = param_integer_share_limit;
1945 /* This is a little hokie, but if the prec is smaller than
1946 what is necessary to hold param_integer_share_limit, then the
1947 obvious test will not get the correct answer. */
1948 if (prec < HOST_BITS_PER_WIDE_INT)
1950 if (tree_to_uhwi (t)
1951 < (unsigned HOST_WIDE_INT) param_integer_share_limit)
1952 ix = tree_to_uhwi (t);
1954 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1955 ix = tree_to_uhwi (t);
1957 else
1959 /* Cache -1..N */
1960 limit = param_integer_share_limit + 1;
1962 if (integer_minus_onep (t))
1963 ix = 0;
1964 else if (!wi::neg_p (wi::to_wide (t)))
1966 if (prec < HOST_BITS_PER_WIDE_INT)
1968 if (tree_to_shwi (t) < param_integer_share_limit)
1969 ix = tree_to_shwi (t) + 1;
1971 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1972 ix = tree_to_shwi (t) + 1;
1975 break;
1977 case ENUMERAL_TYPE:
1978 /* The slot used by TYPE_CACHED_VALUES is used for the enum
1979 members. */
1980 break;
1982 default:
1983 gcc_unreachable ();
1986 if (ix >= 0)
1988 /* Look for it in the type's vector of small shared ints. */
1989 if (!TYPE_CACHED_VALUES_P (type))
1991 TYPE_CACHED_VALUES_P (type) = 1;
1992 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1995 if (tree r = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix))
1997 gcc_checking_assert (might_duplicate);
1998 t = r;
2000 else
2001 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
2003 else
2005 /* Use the cache of larger shared ints. */
2006 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
2007 if (tree r = *slot)
2009 /* If there is already an entry for the number verify it's the
2010 same value. */
2011 gcc_checking_assert (wi::to_wide (tree (r)) == wi::to_wide (t));
2012 /* And return the cached value. */
2013 t = r;
2015 else
2016 /* Otherwise insert this one into the hash table. */
2017 *slot = t;
2020 return t;
2024 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
2025 and the rest are zeros. */
2027 tree
2028 build_low_bits_mask (tree type, unsigned bits)
2030 gcc_assert (bits <= TYPE_PRECISION (type));
2032 return wide_int_to_tree (type, wi::mask (bits, false,
2033 TYPE_PRECISION (type)));
2036 /* Checks that X is integer constant that can be expressed in (unsigned)
2037 HOST_WIDE_INT without loss of precision. */
2039 bool
2040 cst_and_fits_in_hwi (const_tree x)
2042 return (TREE_CODE (x) == INTEGER_CST
2043 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
2046 /* Build a newly constructed VECTOR_CST with the given values of
2047 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
2049 tree
2050 make_vector (unsigned log2_npatterns,
2051 unsigned int nelts_per_pattern MEM_STAT_DECL)
2053 gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
2054 tree t;
2055 unsigned npatterns = 1 << log2_npatterns;
2056 unsigned encoded_nelts = npatterns * nelts_per_pattern;
2057 unsigned length = (sizeof (struct tree_vector)
2058 + (encoded_nelts - 1) * sizeof (tree));
2060 record_node_allocation_statistics (VECTOR_CST, length);
2062 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2064 TREE_SET_CODE (t, VECTOR_CST);
2065 TREE_CONSTANT (t) = 1;
2066 VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
2067 VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
2069 return t;
2072 /* Return a new VECTOR_CST node whose type is TYPE and whose values
2073 are extracted from V, a vector of CONSTRUCTOR_ELT. */
2075 tree
2076 build_vector_from_ctor (tree type, const vec<constructor_elt, va_gc> *v)
2078 if (vec_safe_length (v) == 0)
2079 return build_zero_cst (type);
2081 unsigned HOST_WIDE_INT idx, nelts;
2082 tree value;
2084 /* We can't construct a VECTOR_CST for a variable number of elements. */
2085 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
2086 tree_vector_builder vec (type, nelts, 1);
2087 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
2089 if (TREE_CODE (value) == VECTOR_CST)
2091 /* If NELTS is constant then this must be too. */
2092 unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
2093 for (unsigned i = 0; i < sub_nelts; ++i)
2094 vec.quick_push (VECTOR_CST_ELT (value, i));
2096 else
2097 vec.quick_push (value);
2099 while (vec.length () < nelts)
2100 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
2102 return vec.build ();
2105 /* Build a vector of type VECTYPE where all the elements are SCs. */
2106 tree
2107 build_vector_from_val (tree vectype, tree sc)
2109 unsigned HOST_WIDE_INT i, nunits;
2111 if (sc == error_mark_node)
2112 return sc;
2114 /* Verify that the vector type is suitable for SC. Note that there
2115 is some inconsistency in the type-system with respect to restrict
2116 qualifications of pointers. Vector types always have a main-variant
2117 element type and the qualification is applied to the vector-type.
2118 So TREE_TYPE (vector-type) does not return a properly qualified
2119 vector element-type. */
2120 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
2121 TREE_TYPE (vectype)));
2123 if (CONSTANT_CLASS_P (sc))
2125 tree_vector_builder v (vectype, 1, 1);
2126 v.quick_push (sc);
2127 return v.build ();
2129 else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
2130 return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
2131 else
2133 vec<constructor_elt, va_gc> *v;
2134 vec_alloc (v, nunits);
2135 for (i = 0; i < nunits; ++i)
2136 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
2137 return build_constructor (vectype, v);
2141 /* If TYPE is not a vector type, just return SC, otherwise return
2142 build_vector_from_val (TYPE, SC). */
2144 tree
2145 build_uniform_cst (tree type, tree sc)
2147 if (!VECTOR_TYPE_P (type))
2148 return sc;
2150 return build_vector_from_val (type, sc);
2153 /* Build a vector series of type TYPE in which element I has the value
2154 BASE + I * STEP. The result is a constant if BASE and STEP are constant
2155 and a VEC_SERIES_EXPR otherwise. */
2157 tree
2158 build_vec_series (tree type, tree base, tree step)
2160 if (integer_zerop (step))
2161 return build_vector_from_val (type, base);
2162 if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
2164 tree_vector_builder builder (type, 1, 3);
2165 tree elt1 = wide_int_to_tree (TREE_TYPE (base),
2166 wi::to_wide (base) + wi::to_wide (step));
2167 tree elt2 = wide_int_to_tree (TREE_TYPE (base),
2168 wi::to_wide (elt1) + wi::to_wide (step));
2169 builder.quick_push (base);
2170 builder.quick_push (elt1);
2171 builder.quick_push (elt2);
2172 return builder.build ();
2174 return build2 (VEC_SERIES_EXPR, type, base, step);
2177 /* Return a vector with the same number of units and number of bits
2178 as VEC_TYPE, but in which the elements are a linear series of unsigned
2179 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
2181 tree
2182 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
2184 tree index_vec_type = vec_type;
2185 tree index_elt_type = TREE_TYPE (vec_type);
2186 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
2187 if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
2189 index_elt_type = build_nonstandard_integer_type
2190 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
2191 index_vec_type = build_vector_type (index_elt_type, nunits);
2194 tree_vector_builder v (index_vec_type, 1, 3);
2195 for (unsigned int i = 0; i < 3; ++i)
2196 v.quick_push (build_int_cstu (index_elt_type, base + i * step));
2197 return v.build ();
2200 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
2201 elements are A and the rest are B. */
2203 tree
2204 build_vector_a_then_b (tree vec_type, unsigned int num_a, tree a, tree b)
2206 gcc_assert (known_le (num_a, TYPE_VECTOR_SUBPARTS (vec_type)));
2207 unsigned int count = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type));
2208 /* Optimize the constant case. */
2209 if ((count & 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type).is_constant ())
2210 count /= 2;
2211 tree_vector_builder builder (vec_type, count, 2);
2212 for (unsigned int i = 0; i < count * 2; ++i)
2213 builder.quick_push (i < num_a ? a : b);
2214 return builder.build ();
2217 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2218 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
2220 void
2221 recompute_constructor_flags (tree c)
2223 unsigned int i;
2224 tree val;
2225 bool constant_p = true;
2226 bool side_effects_p = false;
2227 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2229 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2231 /* Mostly ctors will have elts that don't have side-effects, so
2232 the usual case is to scan all the elements. Hence a single
2233 loop for both const and side effects, rather than one loop
2234 each (with early outs). */
2235 if (!TREE_CONSTANT (val))
2236 constant_p = false;
2237 if (TREE_SIDE_EFFECTS (val))
2238 side_effects_p = true;
2241 TREE_SIDE_EFFECTS (c) = side_effects_p;
2242 TREE_CONSTANT (c) = constant_p;
2245 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2246 CONSTRUCTOR C. */
2248 void
2249 verify_constructor_flags (tree c)
2251 unsigned int i;
2252 tree val;
2253 bool constant_p = TREE_CONSTANT (c);
2254 bool side_effects_p = TREE_SIDE_EFFECTS (c);
2255 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2257 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2259 if (constant_p && !TREE_CONSTANT (val))
2260 internal_error ("non-constant element in constant CONSTRUCTOR");
2261 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
2262 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2266 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2267 are in the vec pointed to by VALS. */
2268 tree
2269 build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL)
2271 tree c = make_node (CONSTRUCTOR PASS_MEM_STAT);
2273 TREE_TYPE (c) = type;
2274 CONSTRUCTOR_ELTS (c) = vals;
2276 recompute_constructor_flags (c);
2278 return c;
2281 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2282 INDEX and VALUE. */
2283 tree
2284 build_constructor_single (tree type, tree index, tree value)
2286 vec<constructor_elt, va_gc> *v;
2287 constructor_elt elt = {index, value};
2289 vec_alloc (v, 1);
2290 v->quick_push (elt);
2292 return build_constructor (type, v);
2296 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2297 are in a list pointed to by VALS. */
2298 tree
2299 build_constructor_from_list (tree type, tree vals)
2301 tree t;
2302 vec<constructor_elt, va_gc> *v = NULL;
2304 if (vals)
2306 vec_alloc (v, list_length (vals));
2307 for (t = vals; t; t = TREE_CHAIN (t))
2308 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2311 return build_constructor (type, v);
2314 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2315 are in a vector pointed to by VALS. Note that the TREE_PURPOSE
2316 fields in the constructor remain null. */
2318 tree
2319 build_constructor_from_vec (tree type, const vec<tree, va_gc> *vals)
2321 vec<constructor_elt, va_gc> *v = NULL;
2323 for (tree t : vals)
2324 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t);
2326 return build_constructor (type, v);
2329 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2330 of elements, provided as index/value pairs. */
2332 tree
2333 build_constructor_va (tree type, int nelts, ...)
2335 vec<constructor_elt, va_gc> *v = NULL;
2336 va_list p;
2338 va_start (p, nelts);
2339 vec_alloc (v, nelts);
2340 while (nelts--)
2342 tree index = va_arg (p, tree);
2343 tree value = va_arg (p, tree);
2344 CONSTRUCTOR_APPEND_ELT (v, index, value);
2346 va_end (p);
2347 return build_constructor (type, v);
2350 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2352 tree
2353 build_clobber (tree type, enum clobber_kind kind)
2355 tree clobber = build_constructor (type, NULL);
2356 TREE_THIS_VOLATILE (clobber) = true;
2357 CLOBBER_KIND (clobber) = kind;
2358 return clobber;
2361 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2363 tree
2364 build_fixed (tree type, FIXED_VALUE_TYPE f)
2366 tree v;
2367 FIXED_VALUE_TYPE *fp;
2369 v = make_node (FIXED_CST);
2370 fp = ggc_alloc<fixed_value> ();
2371 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2373 TREE_TYPE (v) = type;
2374 TREE_FIXED_CST_PTR (v) = fp;
2375 return v;
2378 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2380 tree
2381 build_real (tree type, REAL_VALUE_TYPE d)
2383 tree v;
2384 int overflow = 0;
2386 /* dconst{1,2,m1,half} are used in various places in
2387 the middle-end and optimizers, allow them here
2388 even for decimal floating point types as an exception
2389 by converting them to decimal. */
2390 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type))
2391 && d.cl == rvc_normal
2392 && !d.decimal)
2394 if (memcmp (&d, &dconst1, sizeof (d)) == 0)
2395 decimal_real_from_string (&d, "1");
2396 else if (memcmp (&d, &dconst2, sizeof (d)) == 0)
2397 decimal_real_from_string (&d, "2");
2398 else if (memcmp (&d, &dconstm1, sizeof (d)) == 0)
2399 decimal_real_from_string (&d, "-1");
2400 else if (memcmp (&d, &dconsthalf, sizeof (d)) == 0)
2401 decimal_real_from_string (&d, "0.5");
2402 else
2403 gcc_unreachable ();
2406 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2407 Consider doing it via real_convert now. */
2409 v = make_node (REAL_CST);
2410 TREE_TYPE (v) = type;
2411 memcpy (TREE_REAL_CST_PTR (v), &d, sizeof (REAL_VALUE_TYPE));
2412 TREE_OVERFLOW (v) = overflow;
2413 return v;
2416 /* Like build_real, but first truncate D to the type. */
2418 tree
2419 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2421 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2424 /* Return a new REAL_CST node whose type is TYPE
2425 and whose value is the integer value of the INTEGER_CST node I. */
2427 REAL_VALUE_TYPE
2428 real_value_from_int_cst (const_tree type, const_tree i)
2430 REAL_VALUE_TYPE d;
2432 /* Clear all bits of the real value type so that we can later do
2433 bitwise comparisons to see if two values are the same. */
2434 memset (&d, 0, sizeof d);
2436 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2437 TYPE_SIGN (TREE_TYPE (i)));
2438 return d;
2441 /* Given a tree representing an integer constant I, return a tree
2442 representing the same value as a floating-point constant of type TYPE. */
2444 tree
2445 build_real_from_int_cst (tree type, const_tree i)
2447 tree v;
2448 int overflow = TREE_OVERFLOW (i);
2450 v = build_real (type, real_value_from_int_cst (type, i));
2452 TREE_OVERFLOW (v) |= overflow;
2453 return v;
2456 /* Return a new REAL_CST node whose type is TYPE
2457 and whose value is the integer value I which has sign SGN. */
2459 tree
2460 build_real_from_wide (tree type, const wide_int_ref &i, signop sgn)
2462 REAL_VALUE_TYPE d;
2464 /* Clear all bits of the real value type so that we can later do
2465 bitwise comparisons to see if two values are the same. */
2466 memset (&d, 0, sizeof d);
2468 real_from_integer (&d, TYPE_MODE (type), i, sgn);
2469 return build_real (type, d);
2472 /* Return a newly constructed STRING_CST node whose value is the LEN
2473 characters at STR when STR is nonnull, or all zeros otherwise.
2474 Note that for a C string literal, LEN should include the trailing NUL.
2475 The TREE_TYPE is not initialized. */
2477 tree
2478 build_string (unsigned len, const char *str /*= NULL */)
2480 /* Do not waste bytes provided by padding of struct tree_string. */
2481 unsigned size = len + offsetof (struct tree_string, str) + 1;
2483 record_node_allocation_statistics (STRING_CST, size);
2485 tree s = (tree) ggc_internal_alloc (size);
2487 memset (s, 0, sizeof (struct tree_typed));
2488 TREE_SET_CODE (s, STRING_CST);
2489 TREE_CONSTANT (s) = 1;
2490 TREE_STRING_LENGTH (s) = len;
2491 if (str)
2492 memcpy (s->string.str, str, len);
2493 else
2494 memset (s->string.str, 0, len);
2495 s->string.str[len] = '\0';
2497 return s;
2500 /* Return a newly constructed COMPLEX_CST node whose value is
2501 specified by the real and imaginary parts REAL and IMAG.
2502 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2503 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2505 tree
2506 build_complex (tree type, tree real, tree imag)
2508 gcc_assert (CONSTANT_CLASS_P (real));
2509 gcc_assert (CONSTANT_CLASS_P (imag));
2511 tree t = make_node (COMPLEX_CST);
2513 TREE_REALPART (t) = real;
2514 TREE_IMAGPART (t) = imag;
2515 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2516 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2517 return t;
2520 /* Build a complex (inf +- 0i), such as for the result of cproj.
2521 TYPE is the complex tree type of the result. If NEG is true, the
2522 imaginary zero is negative. */
2524 tree
2525 build_complex_inf (tree type, bool neg)
2527 REAL_VALUE_TYPE rinf, rzero = dconst0;
2529 real_inf (&rinf);
2530 rzero.sign = neg;
2531 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2532 build_real (TREE_TYPE (type), rzero));
2535 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2536 element is set to 1. In particular, this is 1 + i for complex types. */
2538 tree
2539 build_each_one_cst (tree type)
2541 if (TREE_CODE (type) == COMPLEX_TYPE)
2543 tree scalar = build_one_cst (TREE_TYPE (type));
2544 return build_complex (type, scalar, scalar);
2546 else
2547 return build_one_cst (type);
2550 /* Return a constant of arithmetic type TYPE which is the
2551 multiplicative identity of the set TYPE. */
2553 tree
2554 build_one_cst (tree type)
2556 switch (TREE_CODE (type))
2558 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2559 case POINTER_TYPE: case REFERENCE_TYPE:
2560 case OFFSET_TYPE:
2561 return build_int_cst (type, 1);
2563 case REAL_TYPE:
2564 return build_real (type, dconst1);
2566 case FIXED_POINT_TYPE:
2567 /* We can only generate 1 for accum types. */
2568 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2569 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2571 case VECTOR_TYPE:
2573 tree scalar = build_one_cst (TREE_TYPE (type));
2575 return build_vector_from_val (type, scalar);
2578 case COMPLEX_TYPE:
2579 return build_complex (type,
2580 build_one_cst (TREE_TYPE (type)),
2581 build_zero_cst (TREE_TYPE (type)));
2583 default:
2584 gcc_unreachable ();
2588 /* Return an integer of type TYPE containing all 1's in as much precision as
2589 it contains, or a complex or vector whose subparts are such integers. */
2591 tree
2592 build_all_ones_cst (tree type)
2594 if (TREE_CODE (type) == COMPLEX_TYPE)
2596 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2597 return build_complex (type, scalar, scalar);
2599 else
2600 return build_minus_one_cst (type);
2603 /* Return a constant of arithmetic type TYPE which is the
2604 opposite of the multiplicative identity of the set TYPE. */
2606 tree
2607 build_minus_one_cst (tree type)
2609 switch (TREE_CODE (type))
2611 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2612 case POINTER_TYPE: case REFERENCE_TYPE:
2613 case OFFSET_TYPE:
2614 return build_int_cst (type, -1);
2616 case REAL_TYPE:
2617 return build_real (type, dconstm1);
2619 case FIXED_POINT_TYPE:
2620 /* We can only generate 1 for accum types. */
2621 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2622 return build_fixed (type,
2623 fixed_from_double_int (double_int_minus_one,
2624 SCALAR_TYPE_MODE (type)));
2626 case VECTOR_TYPE:
2628 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2630 return build_vector_from_val (type, scalar);
2633 case COMPLEX_TYPE:
2634 return build_complex (type,
2635 build_minus_one_cst (TREE_TYPE (type)),
2636 build_zero_cst (TREE_TYPE (type)));
2638 default:
2639 gcc_unreachable ();
2643 /* Build 0 constant of type TYPE. This is used by constructor folding
2644 and thus the constant should be represented in memory by
2645 zero(es). */
2647 tree
2648 build_zero_cst (tree type)
2650 switch (TREE_CODE (type))
2652 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2653 case POINTER_TYPE: case REFERENCE_TYPE:
2654 case OFFSET_TYPE: case NULLPTR_TYPE:
2655 return build_int_cst (type, 0);
2657 case REAL_TYPE:
2658 return build_real (type, dconst0);
2660 case FIXED_POINT_TYPE:
2661 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2663 case VECTOR_TYPE:
2665 tree scalar = build_zero_cst (TREE_TYPE (type));
2667 return build_vector_from_val (type, scalar);
2670 case COMPLEX_TYPE:
2672 tree zero = build_zero_cst (TREE_TYPE (type));
2674 return build_complex (type, zero, zero);
2677 default:
2678 if (!AGGREGATE_TYPE_P (type))
2679 return fold_convert (type, integer_zero_node);
2680 return build_constructor (type, NULL);
2685 /* Build a BINFO with LEN language slots. */
2687 tree
2688 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2690 tree t;
2691 size_t length = (offsetof (struct tree_binfo, base_binfos)
2692 + vec<tree, va_gc>::embedded_size (base_binfos));
2694 record_node_allocation_statistics (TREE_BINFO, length);
2696 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2698 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2700 TREE_SET_CODE (t, TREE_BINFO);
2702 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2704 return t;
2707 /* Create a CASE_LABEL_EXPR tree node and return it. */
2709 tree
2710 build_case_label (tree low_value, tree high_value, tree label_decl)
2712 tree t = make_node (CASE_LABEL_EXPR);
2714 TREE_TYPE (t) = void_type_node;
2715 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2717 CASE_LOW (t) = low_value;
2718 CASE_HIGH (t) = high_value;
2719 CASE_LABEL (t) = label_decl;
2720 CASE_CHAIN (t) = NULL_TREE;
2722 return t;
2725 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2726 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2727 The latter determines the length of the HOST_WIDE_INT vector. */
2729 tree
2730 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2732 tree t;
2733 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2734 + sizeof (struct tree_int_cst));
2736 gcc_assert (len);
2737 record_node_allocation_statistics (INTEGER_CST, length);
2739 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2741 TREE_SET_CODE (t, INTEGER_CST);
2742 TREE_INT_CST_NUNITS (t) = len;
2743 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2744 /* to_offset can only be applied to trees that are offset_int-sized
2745 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2746 must be exactly the precision of offset_int and so LEN is correct. */
2747 if (ext_len <= OFFSET_INT_ELTS)
2748 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2749 else
2750 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2752 TREE_CONSTANT (t) = 1;
2754 return t;
2757 /* Build a newly constructed TREE_VEC node of length LEN. */
2759 tree
2760 make_tree_vec (int len MEM_STAT_DECL)
2762 tree t;
2763 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2765 record_node_allocation_statistics (TREE_VEC, length);
2767 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2769 TREE_SET_CODE (t, TREE_VEC);
2770 TREE_VEC_LENGTH (t) = len;
2772 return t;
2775 /* Grow a TREE_VEC node to new length LEN. */
2777 tree
2778 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2780 gcc_assert (TREE_CODE (v) == TREE_VEC);
2782 int oldlen = TREE_VEC_LENGTH (v);
2783 gcc_assert (len > oldlen);
2785 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2786 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2788 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2790 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2792 TREE_VEC_LENGTH (v) = len;
2794 return v;
2797 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2798 fixed, and scalar, complex or vector. */
2800 bool
2801 zerop (const_tree expr)
2803 return (integer_zerop (expr)
2804 || real_zerop (expr)
2805 || fixed_zerop (expr));
2808 /* Return 1 if EXPR is the integer constant zero or a complex constant
2809 of zero, or a location wrapper for such a constant. */
2811 bool
2812 integer_zerop (const_tree expr)
2814 STRIP_ANY_LOCATION_WRAPPER (expr);
2816 switch (TREE_CODE (expr))
2818 case INTEGER_CST:
2819 return wi::to_wide (expr) == 0;
2820 case COMPLEX_CST:
2821 return (integer_zerop (TREE_REALPART (expr))
2822 && integer_zerop (TREE_IMAGPART (expr)));
2823 case VECTOR_CST:
2824 return (VECTOR_CST_NPATTERNS (expr) == 1
2825 && VECTOR_CST_DUPLICATE_P (expr)
2826 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2827 default:
2828 return false;
2832 /* Return 1 if EXPR is the integer constant one or the corresponding
2833 complex constant, or a location wrapper for such a constant. */
2835 bool
2836 integer_onep (const_tree expr)
2838 STRIP_ANY_LOCATION_WRAPPER (expr);
2840 switch (TREE_CODE (expr))
2842 case INTEGER_CST:
2843 return wi::eq_p (wi::to_widest (expr), 1);
2844 case COMPLEX_CST:
2845 return (integer_onep (TREE_REALPART (expr))
2846 && integer_zerop (TREE_IMAGPART (expr)));
2847 case VECTOR_CST:
2848 return (VECTOR_CST_NPATTERNS (expr) == 1
2849 && VECTOR_CST_DUPLICATE_P (expr)
2850 && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2851 default:
2852 return false;
2856 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2857 return 1 if every piece is the integer constant one.
2858 Also return 1 for location wrappers for such a constant. */
2860 bool
2861 integer_each_onep (const_tree expr)
2863 STRIP_ANY_LOCATION_WRAPPER (expr);
2865 if (TREE_CODE (expr) == COMPLEX_CST)
2866 return (integer_onep (TREE_REALPART (expr))
2867 && integer_onep (TREE_IMAGPART (expr)));
2868 else
2869 return integer_onep (expr);
2872 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2873 it contains, or a complex or vector whose subparts are such integers,
2874 or a location wrapper for such a constant. */
2876 bool
2877 integer_all_onesp (const_tree expr)
2879 STRIP_ANY_LOCATION_WRAPPER (expr);
2881 if (TREE_CODE (expr) == COMPLEX_CST
2882 && integer_all_onesp (TREE_REALPART (expr))
2883 && integer_all_onesp (TREE_IMAGPART (expr)))
2884 return true;
2886 else if (TREE_CODE (expr) == VECTOR_CST)
2887 return (VECTOR_CST_NPATTERNS (expr) == 1
2888 && VECTOR_CST_DUPLICATE_P (expr)
2889 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2891 else if (TREE_CODE (expr) != INTEGER_CST)
2892 return false;
2894 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2895 == wi::to_wide (expr));
2898 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
2899 for such a constant. */
2901 bool
2902 integer_minus_onep (const_tree expr)
2904 STRIP_ANY_LOCATION_WRAPPER (expr);
2906 if (TREE_CODE (expr) == COMPLEX_CST)
2907 return (integer_all_onesp (TREE_REALPART (expr))
2908 && integer_zerop (TREE_IMAGPART (expr)));
2909 else
2910 return integer_all_onesp (expr);
2913 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2914 one bit on), or a location wrapper for such a constant. */
2916 bool
2917 integer_pow2p (const_tree expr)
2919 STRIP_ANY_LOCATION_WRAPPER (expr);
2921 if (TREE_CODE (expr) == COMPLEX_CST
2922 && integer_pow2p (TREE_REALPART (expr))
2923 && integer_zerop (TREE_IMAGPART (expr)))
2924 return true;
2926 if (TREE_CODE (expr) != INTEGER_CST)
2927 return false;
2929 return wi::popcount (wi::to_wide (expr)) == 1;
2932 /* Return 1 if EXPR is an integer constant other than zero or a
2933 complex constant other than zero, or a location wrapper for such a
2934 constant. */
2936 bool
2937 integer_nonzerop (const_tree expr)
2939 STRIP_ANY_LOCATION_WRAPPER (expr);
2941 return ((TREE_CODE (expr) == INTEGER_CST
2942 && wi::to_wide (expr) != 0)
2943 || (TREE_CODE (expr) == COMPLEX_CST
2944 && (integer_nonzerop (TREE_REALPART (expr))
2945 || integer_nonzerop (TREE_IMAGPART (expr)))));
2948 /* Return 1 if EXPR is the integer constant one. For vector,
2949 return 1 if every piece is the integer constant minus one
2950 (representing the value TRUE).
2951 Also return 1 for location wrappers for such a constant. */
2953 bool
2954 integer_truep (const_tree expr)
2956 STRIP_ANY_LOCATION_WRAPPER (expr);
2958 if (TREE_CODE (expr) == VECTOR_CST)
2959 return integer_all_onesp (expr);
2960 return integer_onep (expr);
2963 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
2964 for such a constant. */
2966 bool
2967 fixed_zerop (const_tree expr)
2969 STRIP_ANY_LOCATION_WRAPPER (expr);
2971 return (TREE_CODE (expr) == FIXED_CST
2972 && TREE_FIXED_CST (expr).data.is_zero ());
2975 /* Return the power of two represented by a tree node known to be a
2976 power of two. */
2979 tree_log2 (const_tree expr)
2981 if (TREE_CODE (expr) == COMPLEX_CST)
2982 return tree_log2 (TREE_REALPART (expr));
2984 return wi::exact_log2 (wi::to_wide (expr));
2987 /* Similar, but return the largest integer Y such that 2 ** Y is less
2988 than or equal to EXPR. */
2991 tree_floor_log2 (const_tree expr)
2993 if (TREE_CODE (expr) == COMPLEX_CST)
2994 return tree_log2 (TREE_REALPART (expr));
2996 return wi::floor_log2 (wi::to_wide (expr));
2999 /* Return number of known trailing zero bits in EXPR, or, if the value of
3000 EXPR is known to be zero, the precision of it's type. */
3002 unsigned int
3003 tree_ctz (const_tree expr)
3005 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3006 && !POINTER_TYPE_P (TREE_TYPE (expr)))
3007 return 0;
3009 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
3010 switch (TREE_CODE (expr))
3012 case INTEGER_CST:
3013 ret1 = wi::ctz (wi::to_wide (expr));
3014 return MIN (ret1, prec);
3015 case SSA_NAME:
3016 ret1 = wi::ctz (get_nonzero_bits (expr));
3017 return MIN (ret1, prec);
3018 case PLUS_EXPR:
3019 case MINUS_EXPR:
3020 case BIT_IOR_EXPR:
3021 case BIT_XOR_EXPR:
3022 case MIN_EXPR:
3023 case MAX_EXPR:
3024 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3025 if (ret1 == 0)
3026 return ret1;
3027 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3028 return MIN (ret1, ret2);
3029 case POINTER_PLUS_EXPR:
3030 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3031 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3032 /* Second operand is sizetype, which could be in theory
3033 wider than pointer's precision. Make sure we never
3034 return more than prec. */
3035 ret2 = MIN (ret2, prec);
3036 return MIN (ret1, ret2);
3037 case BIT_AND_EXPR:
3038 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3039 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3040 return MAX (ret1, ret2);
3041 case MULT_EXPR:
3042 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3043 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3044 return MIN (ret1 + ret2, prec);
3045 case LSHIFT_EXPR:
3046 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3047 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3048 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3050 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3051 return MIN (ret1 + ret2, prec);
3053 return ret1;
3054 case RSHIFT_EXPR:
3055 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3056 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3058 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3059 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3060 if (ret1 > ret2)
3061 return ret1 - ret2;
3063 return 0;
3064 case TRUNC_DIV_EXPR:
3065 case CEIL_DIV_EXPR:
3066 case FLOOR_DIV_EXPR:
3067 case ROUND_DIV_EXPR:
3068 case EXACT_DIV_EXPR:
3069 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3070 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
3072 int l = tree_log2 (TREE_OPERAND (expr, 1));
3073 if (l >= 0)
3075 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3076 ret2 = l;
3077 if (ret1 > ret2)
3078 return ret1 - ret2;
3081 return 0;
3082 CASE_CONVERT:
3083 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3084 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
3085 ret1 = prec;
3086 return MIN (ret1, prec);
3087 case SAVE_EXPR:
3088 return tree_ctz (TREE_OPERAND (expr, 0));
3089 case COND_EXPR:
3090 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
3091 if (ret1 == 0)
3092 return 0;
3093 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
3094 return MIN (ret1, ret2);
3095 case COMPOUND_EXPR:
3096 return tree_ctz (TREE_OPERAND (expr, 1));
3097 case ADDR_EXPR:
3098 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
3099 if (ret1 > BITS_PER_UNIT)
3101 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
3102 return MIN (ret1, prec);
3104 return 0;
3105 default:
3106 return 0;
3110 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
3111 decimal float constants, so don't return 1 for them.
3112 Also return 1 for location wrappers around such a constant. */
3114 bool
3115 real_zerop (const_tree expr)
3117 STRIP_ANY_LOCATION_WRAPPER (expr);
3119 switch (TREE_CODE (expr))
3121 case REAL_CST:
3122 return real_equal (&TREE_REAL_CST (expr), &dconst0)
3123 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3124 case COMPLEX_CST:
3125 return real_zerop (TREE_REALPART (expr))
3126 && real_zerop (TREE_IMAGPART (expr));
3127 case VECTOR_CST:
3129 /* Don't simply check for a duplicate because the predicate
3130 accepts both +0.0 and -0.0. */
3131 unsigned count = vector_cst_encoded_nelts (expr);
3132 for (unsigned int i = 0; i < count; ++i)
3133 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
3134 return false;
3135 return true;
3137 default:
3138 return false;
3142 /* Return 1 if EXPR is the real constant one in real or complex form.
3143 Trailing zeroes matter for decimal float constants, so don't return
3144 1 for them.
3145 Also return 1 for location wrappers around such a constant. */
3147 bool
3148 real_onep (const_tree expr)
3150 STRIP_ANY_LOCATION_WRAPPER (expr);
3152 switch (TREE_CODE (expr))
3154 case REAL_CST:
3155 return real_equal (&TREE_REAL_CST (expr), &dconst1)
3156 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3157 case COMPLEX_CST:
3158 return real_onep (TREE_REALPART (expr))
3159 && real_zerop (TREE_IMAGPART (expr));
3160 case VECTOR_CST:
3161 return (VECTOR_CST_NPATTERNS (expr) == 1
3162 && VECTOR_CST_DUPLICATE_P (expr)
3163 && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3164 default:
3165 return false;
3169 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
3170 matter for decimal float constants, so don't return 1 for them.
3171 Also return 1 for location wrappers around such a constant. */
3173 bool
3174 real_minus_onep (const_tree expr)
3176 STRIP_ANY_LOCATION_WRAPPER (expr);
3178 switch (TREE_CODE (expr))
3180 case REAL_CST:
3181 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
3182 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3183 case COMPLEX_CST:
3184 return real_minus_onep (TREE_REALPART (expr))
3185 && real_zerop (TREE_IMAGPART (expr));
3186 case VECTOR_CST:
3187 return (VECTOR_CST_NPATTERNS (expr) == 1
3188 && VECTOR_CST_DUPLICATE_P (expr)
3189 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3190 default:
3191 return false;
3195 /* Nonzero if EXP is a constant or a cast of a constant. */
3197 bool
3198 really_constant_p (const_tree exp)
3200 /* This is not quite the same as STRIP_NOPS. It does more. */
3201 while (CONVERT_EXPR_P (exp)
3202 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3203 exp = TREE_OPERAND (exp, 0);
3204 return TREE_CONSTANT (exp);
3207 /* Return true if T holds a polynomial pointer difference, storing it in
3208 *VALUE if so. A true return means that T's precision is no greater
3209 than 64 bits, which is the largest address space we support, so *VALUE
3210 never loses precision. However, the signedness of the result does
3211 not necessarily match the signedness of T: sometimes an unsigned type
3212 like sizetype is used to encode a value that is actually negative. */
3214 bool
3215 ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
3217 if (!t)
3218 return false;
3219 if (TREE_CODE (t) == INTEGER_CST)
3221 if (!cst_and_fits_in_hwi (t))
3222 return false;
3223 *value = int_cst_value (t);
3224 return true;
3226 if (POLY_INT_CST_P (t))
3228 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3229 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
3230 return false;
3231 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3232 value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
3233 return true;
3235 return false;
3238 poly_int64
3239 tree_to_poly_int64 (const_tree t)
3241 gcc_assert (tree_fits_poly_int64_p (t));
3242 if (POLY_INT_CST_P (t))
3243 return poly_int_cst_value (t).force_shwi ();
3244 return TREE_INT_CST_LOW (t);
3247 poly_uint64
3248 tree_to_poly_uint64 (const_tree t)
3250 gcc_assert (tree_fits_poly_uint64_p (t));
3251 if (POLY_INT_CST_P (t))
3252 return poly_int_cst_value (t).force_uhwi ();
3253 return TREE_INT_CST_LOW (t);
3256 /* Return first list element whose TREE_VALUE is ELEM.
3257 Return 0 if ELEM is not in LIST. */
3259 tree
3260 value_member (tree elem, tree list)
3262 while (list)
3264 if (elem == TREE_VALUE (list))
3265 return list;
3266 list = TREE_CHAIN (list);
3268 return NULL_TREE;
3271 /* Return first list element whose TREE_PURPOSE is ELEM.
3272 Return 0 if ELEM is not in LIST. */
3274 tree
3275 purpose_member (const_tree elem, tree list)
3277 while (list)
3279 if (elem == TREE_PURPOSE (list))
3280 return list;
3281 list = TREE_CHAIN (list);
3283 return NULL_TREE;
3286 /* Return true if ELEM is in V. */
3288 bool
3289 vec_member (const_tree elem, vec<tree, va_gc> *v)
3291 unsigned ix;
3292 tree t;
3293 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
3294 if (elem == t)
3295 return true;
3296 return false;
3299 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3300 NULL_TREE. */
3302 tree
3303 chain_index (int idx, tree chain)
3305 for (; chain && idx > 0; --idx)
3306 chain = TREE_CHAIN (chain);
3307 return chain;
3310 /* Return nonzero if ELEM is part of the chain CHAIN. */
3312 bool
3313 chain_member (const_tree elem, const_tree chain)
3315 while (chain)
3317 if (elem == chain)
3318 return true;
3319 chain = DECL_CHAIN (chain);
3322 return false;
3325 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3326 We expect a null pointer to mark the end of the chain.
3327 This is the Lisp primitive `length'. */
3330 list_length (const_tree t)
3332 const_tree p = t;
3333 #ifdef ENABLE_TREE_CHECKING
3334 const_tree q = t;
3335 #endif
3336 int len = 0;
3338 while (p)
3340 p = TREE_CHAIN (p);
3341 #ifdef ENABLE_TREE_CHECKING
3342 if (len % 2)
3343 q = TREE_CHAIN (q);
3344 gcc_assert (p != q);
3345 #endif
3346 len++;
3349 return len;
3352 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3353 UNION_TYPE TYPE, or NULL_TREE if none. */
3355 tree
3356 first_field (const_tree type)
3358 tree t = TYPE_FIELDS (type);
3359 while (t && TREE_CODE (t) != FIELD_DECL)
3360 t = TREE_CHAIN (t);
3361 return t;
3364 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3365 UNION_TYPE TYPE, or NULL_TREE if none. */
3367 tree
3368 last_field (const_tree type)
3370 tree last = NULL_TREE;
3372 for (tree fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
3374 if (TREE_CODE (fld) != FIELD_DECL)
3375 continue;
3377 last = fld;
3380 return last;
3383 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3384 by modifying the last node in chain 1 to point to chain 2.
3385 This is the Lisp primitive `nconc'. */
3387 tree
3388 chainon (tree op1, tree op2)
3390 tree t1;
3392 if (!op1)
3393 return op2;
3394 if (!op2)
3395 return op1;
3397 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3398 continue;
3399 TREE_CHAIN (t1) = op2;
3401 #ifdef ENABLE_TREE_CHECKING
3403 tree t2;
3404 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3405 gcc_assert (t2 != t1);
3407 #endif
3409 return op1;
3412 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3414 tree
3415 tree_last (tree chain)
3417 tree next;
3418 if (chain)
3419 while ((next = TREE_CHAIN (chain)))
3420 chain = next;
3421 return chain;
3424 /* Reverse the order of elements in the chain T,
3425 and return the new head of the chain (old last element). */
3427 tree
3428 nreverse (tree t)
3430 tree prev = 0, decl, next;
3431 for (decl = t; decl; decl = next)
3433 /* We shouldn't be using this function to reverse BLOCK chains; we
3434 have blocks_nreverse for that. */
3435 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3436 next = TREE_CHAIN (decl);
3437 TREE_CHAIN (decl) = prev;
3438 prev = decl;
3440 return prev;
3443 /* Return a newly created TREE_LIST node whose
3444 purpose and value fields are PARM and VALUE. */
3446 tree
3447 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3449 tree t = make_node (TREE_LIST PASS_MEM_STAT);
3450 TREE_PURPOSE (t) = parm;
3451 TREE_VALUE (t) = value;
3452 return t;
3455 /* Build a chain of TREE_LIST nodes from a vector. */
3457 tree
3458 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3460 tree ret = NULL_TREE;
3461 tree *pp = &ret;
3462 unsigned int i;
3463 tree t;
3464 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3466 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3467 pp = &TREE_CHAIN (*pp);
3469 return ret;
3472 /* Return a newly created TREE_LIST node whose
3473 purpose and value fields are PURPOSE and VALUE
3474 and whose TREE_CHAIN is CHAIN. */
3476 tree
3477 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3479 tree node;
3481 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3482 memset (node, 0, sizeof (struct tree_common));
3484 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3486 TREE_SET_CODE (node, TREE_LIST);
3487 TREE_CHAIN (node) = chain;
3488 TREE_PURPOSE (node) = purpose;
3489 TREE_VALUE (node) = value;
3490 return node;
3493 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3494 trees. */
3496 vec<tree, va_gc> *
3497 ctor_to_vec (tree ctor)
3499 vec<tree, va_gc> *vec;
3500 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3501 unsigned int ix;
3502 tree val;
3504 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3505 vec->quick_push (val);
3507 return vec;
3510 /* Return the size nominally occupied by an object of type TYPE
3511 when it resides in memory. The value is measured in units of bytes,
3512 and its data type is that normally used for type sizes
3513 (which is the first type created by make_signed_type or
3514 make_unsigned_type). */
3516 tree
3517 size_in_bytes_loc (location_t loc, const_tree type)
3519 tree t;
3521 if (type == error_mark_node)
3522 return integer_zero_node;
3524 type = TYPE_MAIN_VARIANT (type);
3525 t = TYPE_SIZE_UNIT (type);
3527 if (t == 0)
3529 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3530 return size_zero_node;
3533 return t;
3536 /* Return the size of TYPE (in bytes) as a wide integer
3537 or return -1 if the size can vary or is larger than an integer. */
3539 HOST_WIDE_INT
3540 int_size_in_bytes (const_tree type)
3542 tree t;
3544 if (type == error_mark_node)
3545 return 0;
3547 type = TYPE_MAIN_VARIANT (type);
3548 t = TYPE_SIZE_UNIT (type);
3550 if (t && tree_fits_uhwi_p (t))
3551 return TREE_INT_CST_LOW (t);
3552 else
3553 return -1;
3556 /* Return the maximum size of TYPE (in bytes) as a wide integer
3557 or return -1 if the size can vary or is larger than an integer. */
3559 HOST_WIDE_INT
3560 max_int_size_in_bytes (const_tree type)
3562 HOST_WIDE_INT size = -1;
3563 tree size_tree;
3565 /* If this is an array type, check for a possible MAX_SIZE attached. */
3567 if (TREE_CODE (type) == ARRAY_TYPE)
3569 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3571 if (size_tree && tree_fits_uhwi_p (size_tree))
3572 size = tree_to_uhwi (size_tree);
3575 /* If we still haven't been able to get a size, see if the language
3576 can compute a maximum size. */
3578 if (size == -1)
3580 size_tree = lang_hooks.types.max_size (type);
3582 if (size_tree && tree_fits_uhwi_p (size_tree))
3583 size = tree_to_uhwi (size_tree);
3586 return size;
3589 /* Return the bit position of FIELD, in bits from the start of the record.
3590 This is a tree of type bitsizetype. */
3592 tree
3593 bit_position (const_tree field)
3595 return bit_from_pos (DECL_FIELD_OFFSET (field),
3596 DECL_FIELD_BIT_OFFSET (field));
3599 /* Return the byte position of FIELD, in bytes from the start of the record.
3600 This is a tree of type sizetype. */
3602 tree
3603 byte_position (const_tree field)
3605 return byte_from_pos (DECL_FIELD_OFFSET (field),
3606 DECL_FIELD_BIT_OFFSET (field));
3609 /* Likewise, but return as an integer. It must be representable in
3610 that way (since it could be a signed value, we don't have the
3611 option of returning -1 like int_size_in_byte can. */
3613 HOST_WIDE_INT
3614 int_byte_position (const_tree field)
3616 return tree_to_shwi (byte_position (field));
3619 /* Return, as a tree node, the number of elements for TYPE (which is an
3620 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3622 tree
3623 array_type_nelts (const_tree type)
3625 tree index_type, min, max;
3627 /* If they did it with unspecified bounds, then we should have already
3628 given an error about it before we got here. */
3629 if (! TYPE_DOMAIN (type))
3630 return error_mark_node;
3632 index_type = TYPE_DOMAIN (type);
3633 min = TYPE_MIN_VALUE (index_type);
3634 max = TYPE_MAX_VALUE (index_type);
3636 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3637 if (!max)
3639 /* zero sized arrays are represented from C FE as complete types with
3640 NULL TYPE_MAX_VALUE and zero TYPE_SIZE, while C++ FE represents
3641 them as min 0, max -1. */
3642 if (COMPLETE_TYPE_P (type)
3643 && integer_zerop (TYPE_SIZE (type))
3644 && integer_zerop (min))
3645 return build_int_cst (TREE_TYPE (min), -1);
3647 return error_mark_node;
3650 return (integer_zerop (min)
3651 ? max
3652 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3655 /* If arg is static -- a reference to an object in static storage -- then
3656 return the object. This is not the same as the C meaning of `static'.
3657 If arg isn't static, return NULL. */
3659 tree
3660 staticp (tree arg)
3662 switch (TREE_CODE (arg))
3664 case FUNCTION_DECL:
3665 /* Nested functions are static, even though taking their address will
3666 involve a trampoline as we unnest the nested function and create
3667 the trampoline on the tree level. */
3668 return arg;
3670 case VAR_DECL:
3671 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3672 && ! DECL_THREAD_LOCAL_P (arg)
3673 && ! DECL_DLLIMPORT_P (arg)
3674 ? arg : NULL);
3676 case CONST_DECL:
3677 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3678 ? arg : NULL);
3680 case CONSTRUCTOR:
3681 return TREE_STATIC (arg) ? arg : NULL;
3683 case LABEL_DECL:
3684 case STRING_CST:
3685 return arg;
3687 case COMPONENT_REF:
3688 /* If the thing being referenced is not a field, then it is
3689 something language specific. */
3690 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3692 /* If we are referencing a bitfield, we can't evaluate an
3693 ADDR_EXPR at compile time and so it isn't a constant. */
3694 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3695 return NULL;
3697 return staticp (TREE_OPERAND (arg, 0));
3699 case BIT_FIELD_REF:
3700 return NULL;
3702 case INDIRECT_REF:
3703 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3705 case ARRAY_REF:
3706 case ARRAY_RANGE_REF:
3707 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3708 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3709 return staticp (TREE_OPERAND (arg, 0));
3710 else
3711 return NULL;
3713 case COMPOUND_LITERAL_EXPR:
3714 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3716 default:
3717 return NULL;
3724 /* Return whether OP is a DECL whose address is function-invariant. */
3726 bool
3727 decl_address_invariant_p (const_tree op)
3729 /* The conditions below are slightly less strict than the one in
3730 staticp. */
3732 switch (TREE_CODE (op))
3734 case PARM_DECL:
3735 case RESULT_DECL:
3736 case LABEL_DECL:
3737 case FUNCTION_DECL:
3738 return true;
3740 case VAR_DECL:
3741 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3742 || DECL_THREAD_LOCAL_P (op)
3743 || DECL_CONTEXT (op) == current_function_decl
3744 || decl_function_context (op) == current_function_decl)
3745 return true;
3746 break;
3748 case CONST_DECL:
3749 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3750 || decl_function_context (op) == current_function_decl)
3751 return true;
3752 break;
3754 default:
3755 break;
3758 return false;
3761 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3763 bool
3764 decl_address_ip_invariant_p (const_tree op)
3766 /* The conditions below are slightly less strict than the one in
3767 staticp. */
3769 switch (TREE_CODE (op))
3771 case LABEL_DECL:
3772 case FUNCTION_DECL:
3773 case STRING_CST:
3774 return true;
3776 case VAR_DECL:
3777 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3778 && !DECL_DLLIMPORT_P (op))
3779 || DECL_THREAD_LOCAL_P (op))
3780 return true;
3781 break;
3783 case CONST_DECL:
3784 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3785 return true;
3786 break;
3788 default:
3789 break;
3792 return false;
3796 /* Return true if T is function-invariant (internal function, does
3797 not handle arithmetic; that's handled in skip_simple_arithmetic and
3798 tree_invariant_p). */
3800 static bool
3801 tree_invariant_p_1 (tree t)
3803 tree op;
3805 if (TREE_CONSTANT (t)
3806 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3807 return true;
3809 switch (TREE_CODE (t))
3811 case SAVE_EXPR:
3812 return true;
3814 case ADDR_EXPR:
3815 op = TREE_OPERAND (t, 0);
3816 while (handled_component_p (op))
3818 switch (TREE_CODE (op))
3820 case ARRAY_REF:
3821 case ARRAY_RANGE_REF:
3822 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3823 || TREE_OPERAND (op, 2) != NULL_TREE
3824 || TREE_OPERAND (op, 3) != NULL_TREE)
3825 return false;
3826 break;
3828 case COMPONENT_REF:
3829 if (TREE_OPERAND (op, 2) != NULL_TREE)
3830 return false;
3831 break;
3833 default:;
3835 op = TREE_OPERAND (op, 0);
3838 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3840 default:
3841 break;
3844 return false;
3847 /* Return true if T is function-invariant. */
3849 bool
3850 tree_invariant_p (tree t)
3852 tree inner = skip_simple_arithmetic (t);
3853 return tree_invariant_p_1 (inner);
3856 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3857 Do this to any expression which may be used in more than one place,
3858 but must be evaluated only once.
3860 Normally, expand_expr would reevaluate the expression each time.
3861 Calling save_expr produces something that is evaluated and recorded
3862 the first time expand_expr is called on it. Subsequent calls to
3863 expand_expr just reuse the recorded value.
3865 The call to expand_expr that generates code that actually computes
3866 the value is the first call *at compile time*. Subsequent calls
3867 *at compile time* generate code to use the saved value.
3868 This produces correct result provided that *at run time* control
3869 always flows through the insns made by the first expand_expr
3870 before reaching the other places where the save_expr was evaluated.
3871 You, the caller of save_expr, must make sure this is so.
3873 Constants, and certain read-only nodes, are returned with no
3874 SAVE_EXPR because that is safe. Expressions containing placeholders
3875 are not touched; see tree.def for an explanation of what these
3876 are used for. */
3878 tree
3879 save_expr (tree expr)
3881 tree inner;
3883 /* If the tree evaluates to a constant, then we don't want to hide that
3884 fact (i.e. this allows further folding, and direct checks for constants).
3885 However, a read-only object that has side effects cannot be bypassed.
3886 Since it is no problem to reevaluate literals, we just return the
3887 literal node. */
3888 inner = skip_simple_arithmetic (expr);
3889 if (TREE_CODE (inner) == ERROR_MARK)
3890 return inner;
3892 if (tree_invariant_p_1 (inner))
3893 return expr;
3895 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3896 it means that the size or offset of some field of an object depends on
3897 the value within another field.
3899 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3900 and some variable since it would then need to be both evaluated once and
3901 evaluated more than once. Front-ends must assure this case cannot
3902 happen by surrounding any such subexpressions in their own SAVE_EXPR
3903 and forcing evaluation at the proper time. */
3904 if (contains_placeholder_p (inner))
3905 return expr;
3907 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3909 /* This expression might be placed ahead of a jump to ensure that the
3910 value was computed on both sides of the jump. So make sure it isn't
3911 eliminated as dead. */
3912 TREE_SIDE_EFFECTS (expr) = 1;
3913 return expr;
3916 /* Look inside EXPR into any simple arithmetic operations. Return the
3917 outermost non-arithmetic or non-invariant node. */
3919 tree
3920 skip_simple_arithmetic (tree expr)
3922 /* We don't care about whether this can be used as an lvalue in this
3923 context. */
3924 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3925 expr = TREE_OPERAND (expr, 0);
3927 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3928 a constant, it will be more efficient to not make another SAVE_EXPR since
3929 it will allow better simplification and GCSE will be able to merge the
3930 computations if they actually occur. */
3931 while (true)
3933 if (UNARY_CLASS_P (expr))
3934 expr = TREE_OPERAND (expr, 0);
3935 else if (BINARY_CLASS_P (expr))
3937 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3938 expr = TREE_OPERAND (expr, 0);
3939 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3940 expr = TREE_OPERAND (expr, 1);
3941 else
3942 break;
3944 else
3945 break;
3948 return expr;
3951 /* Look inside EXPR into simple arithmetic operations involving constants.
3952 Return the outermost non-arithmetic or non-constant node. */
3954 tree
3955 skip_simple_constant_arithmetic (tree expr)
3957 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3958 expr = TREE_OPERAND (expr, 0);
3960 while (true)
3962 if (UNARY_CLASS_P (expr))
3963 expr = TREE_OPERAND (expr, 0);
3964 else if (BINARY_CLASS_P (expr))
3966 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3967 expr = TREE_OPERAND (expr, 0);
3968 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3969 expr = TREE_OPERAND (expr, 1);
3970 else
3971 break;
3973 else
3974 break;
3977 return expr;
3980 /* Return which tree structure is used by T. */
3982 enum tree_node_structure_enum
3983 tree_node_structure (const_tree t)
3985 const enum tree_code code = TREE_CODE (t);
3986 return tree_node_structure_for_code (code);
3989 /* Set various status flags when building a CALL_EXPR object T. */
3991 static void
3992 process_call_operands (tree t)
3994 bool side_effects = TREE_SIDE_EFFECTS (t);
3995 bool read_only = false;
3996 int i = call_expr_flags (t);
3998 /* Calls have side-effects, except those to const or pure functions. */
3999 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
4000 side_effects = true;
4001 /* Propagate TREE_READONLY of arguments for const functions. */
4002 if (i & ECF_CONST)
4003 read_only = true;
4005 if (!side_effects || read_only)
4006 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
4008 tree op = TREE_OPERAND (t, i);
4009 if (op && TREE_SIDE_EFFECTS (op))
4010 side_effects = true;
4011 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
4012 read_only = false;
4015 TREE_SIDE_EFFECTS (t) = side_effects;
4016 TREE_READONLY (t) = read_only;
4019 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
4020 size or offset that depends on a field within a record. */
4022 bool
4023 contains_placeholder_p (const_tree exp)
4025 enum tree_code code;
4027 if (!exp)
4028 return 0;
4030 code = TREE_CODE (exp);
4031 if (code == PLACEHOLDER_EXPR)
4032 return 1;
4034 switch (TREE_CODE_CLASS (code))
4036 case tcc_reference:
4037 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
4038 position computations since they will be converted into a
4039 WITH_RECORD_EXPR involving the reference, which will assume
4040 here will be valid. */
4041 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4043 case tcc_exceptional:
4044 if (code == TREE_LIST)
4045 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
4046 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
4047 break;
4049 case tcc_unary:
4050 case tcc_binary:
4051 case tcc_comparison:
4052 case tcc_expression:
4053 switch (code)
4055 case COMPOUND_EXPR:
4056 /* Ignoring the first operand isn't quite right, but works best. */
4057 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
4059 case COND_EXPR:
4060 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4061 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
4062 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
4064 case SAVE_EXPR:
4065 /* The save_expr function never wraps anything containing
4066 a PLACEHOLDER_EXPR. */
4067 return 0;
4069 default:
4070 break;
4073 switch (TREE_CODE_LENGTH (code))
4075 case 1:
4076 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4077 case 2:
4078 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4079 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
4080 default:
4081 return 0;
4084 case tcc_vl_exp:
4085 switch (code)
4087 case CALL_EXPR:
4089 const_tree arg;
4090 const_call_expr_arg_iterator iter;
4091 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
4092 if (CONTAINS_PLACEHOLDER_P (arg))
4093 return 1;
4094 return 0;
4096 default:
4097 return 0;
4100 default:
4101 return 0;
4103 return 0;
4106 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
4107 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
4108 field positions. */
4110 static bool
4111 type_contains_placeholder_1 (const_tree type)
4113 /* If the size contains a placeholder or the parent type (component type in
4114 the case of arrays) type involves a placeholder, this type does. */
4115 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
4116 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
4117 || (!POINTER_TYPE_P (type)
4118 && TREE_TYPE (type)
4119 && type_contains_placeholder_p (TREE_TYPE (type))))
4120 return true;
4122 /* Now do type-specific checks. Note that the last part of the check above
4123 greatly limits what we have to do below. */
4124 switch (TREE_CODE (type))
4126 case VOID_TYPE:
4127 case OPAQUE_TYPE:
4128 case COMPLEX_TYPE:
4129 case ENUMERAL_TYPE:
4130 case BOOLEAN_TYPE:
4131 case POINTER_TYPE:
4132 case OFFSET_TYPE:
4133 case REFERENCE_TYPE:
4134 case METHOD_TYPE:
4135 case FUNCTION_TYPE:
4136 case VECTOR_TYPE:
4137 case NULLPTR_TYPE:
4138 return false;
4140 case INTEGER_TYPE:
4141 case REAL_TYPE:
4142 case FIXED_POINT_TYPE:
4143 /* Here we just check the bounds. */
4144 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
4145 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
4147 case ARRAY_TYPE:
4148 /* We have already checked the component type above, so just check
4149 the domain type. Flexible array members have a null domain. */
4150 return TYPE_DOMAIN (type) ?
4151 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
4153 case RECORD_TYPE:
4154 case UNION_TYPE:
4155 case QUAL_UNION_TYPE:
4157 tree field;
4159 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4160 if (TREE_CODE (field) == FIELD_DECL
4161 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
4162 || (TREE_CODE (type) == QUAL_UNION_TYPE
4163 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
4164 || type_contains_placeholder_p (TREE_TYPE (field))))
4165 return true;
4167 return false;
4170 default:
4171 gcc_unreachable ();
4175 /* Wrapper around above function used to cache its result. */
4177 bool
4178 type_contains_placeholder_p (tree type)
4180 bool result;
4182 /* If the contains_placeholder_bits field has been initialized,
4183 then we know the answer. */
4184 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
4185 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
4187 /* Indicate that we've seen this type node, and the answer is false.
4188 This is what we want to return if we run into recursion via fields. */
4189 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
4191 /* Compute the real value. */
4192 result = type_contains_placeholder_1 (type);
4194 /* Store the real value. */
4195 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
4197 return result;
4200 /* Push tree EXP onto vector QUEUE if it is not already present. */
4202 static void
4203 push_without_duplicates (tree exp, vec<tree> *queue)
4205 unsigned int i;
4206 tree iter;
4208 FOR_EACH_VEC_ELT (*queue, i, iter)
4209 if (simple_cst_equal (iter, exp) == 1)
4210 break;
4212 if (!iter)
4213 queue->safe_push (exp);
4216 /* Given a tree EXP, find all occurrences of references to fields
4217 in a PLACEHOLDER_EXPR and place them in vector REFS without
4218 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
4219 we assume here that EXP contains only arithmetic expressions
4220 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
4221 argument list. */
4223 void
4224 find_placeholder_in_expr (tree exp, vec<tree> *refs)
4226 enum tree_code code = TREE_CODE (exp);
4227 tree inner;
4228 int i;
4230 /* We handle TREE_LIST and COMPONENT_REF separately. */
4231 if (code == TREE_LIST)
4233 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
4234 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
4236 else if (code == COMPONENT_REF)
4238 for (inner = TREE_OPERAND (exp, 0);
4239 REFERENCE_CLASS_P (inner);
4240 inner = TREE_OPERAND (inner, 0))
4243 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
4244 push_without_duplicates (exp, refs);
4245 else
4246 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
4248 else
4249 switch (TREE_CODE_CLASS (code))
4251 case tcc_constant:
4252 break;
4254 case tcc_declaration:
4255 /* Variables allocated to static storage can stay. */
4256 if (!TREE_STATIC (exp))
4257 push_without_duplicates (exp, refs);
4258 break;
4260 case tcc_expression:
4261 /* This is the pattern built in ada/make_aligning_type. */
4262 if (code == ADDR_EXPR
4263 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
4265 push_without_duplicates (exp, refs);
4266 break;
4269 /* Fall through. */
4271 case tcc_exceptional:
4272 case tcc_unary:
4273 case tcc_binary:
4274 case tcc_comparison:
4275 case tcc_reference:
4276 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
4277 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4278 break;
4280 case tcc_vl_exp:
4281 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4282 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4283 break;
4285 default:
4286 gcc_unreachable ();
4290 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4291 return a tree with all occurrences of references to F in a
4292 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4293 CONST_DECLs. Note that we assume here that EXP contains only
4294 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4295 occurring only in their argument list. */
4297 tree
4298 substitute_in_expr (tree exp, tree f, tree r)
4300 enum tree_code code = TREE_CODE (exp);
4301 tree op0, op1, op2, op3;
4302 tree new_tree;
4304 /* We handle TREE_LIST and COMPONENT_REF separately. */
4305 if (code == TREE_LIST)
4307 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
4308 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
4309 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4310 return exp;
4312 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4314 else if (code == COMPONENT_REF)
4316 tree inner;
4318 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4319 and it is the right field, replace it with R. */
4320 for (inner = TREE_OPERAND (exp, 0);
4321 REFERENCE_CLASS_P (inner);
4322 inner = TREE_OPERAND (inner, 0))
4325 /* The field. */
4326 op1 = TREE_OPERAND (exp, 1);
4328 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
4329 return r;
4331 /* If this expression hasn't been completed let, leave it alone. */
4332 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
4333 return exp;
4335 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4336 if (op0 == TREE_OPERAND (exp, 0))
4337 return exp;
4339 new_tree
4340 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4342 else
4343 switch (TREE_CODE_CLASS (code))
4345 case tcc_constant:
4346 return exp;
4348 case tcc_declaration:
4349 if (exp == f)
4350 return r;
4351 else
4352 return exp;
4354 case tcc_expression:
4355 if (exp == f)
4356 return r;
4358 /* Fall through. */
4360 case tcc_exceptional:
4361 case tcc_unary:
4362 case tcc_binary:
4363 case tcc_comparison:
4364 case tcc_reference:
4365 switch (TREE_CODE_LENGTH (code))
4367 case 0:
4368 return exp;
4370 case 1:
4371 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4372 if (op0 == TREE_OPERAND (exp, 0))
4373 return exp;
4375 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4376 break;
4378 case 2:
4379 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4380 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4382 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4383 return exp;
4385 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4386 break;
4388 case 3:
4389 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4390 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4391 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4393 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4394 && op2 == TREE_OPERAND (exp, 2))
4395 return exp;
4397 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4398 break;
4400 case 4:
4401 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4402 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4403 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4404 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4406 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4407 && op2 == TREE_OPERAND (exp, 2)
4408 && op3 == TREE_OPERAND (exp, 3))
4409 return exp;
4411 new_tree
4412 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4413 break;
4415 default:
4416 gcc_unreachable ();
4418 break;
4420 case tcc_vl_exp:
4422 int i;
4424 new_tree = NULL_TREE;
4426 /* If we are trying to replace F with a constant or with another
4427 instance of one of the arguments of the call, inline back
4428 functions which do nothing else than computing a value from
4429 the arguments they are passed. This makes it possible to
4430 fold partially or entirely the replacement expression. */
4431 if (code == CALL_EXPR)
4433 bool maybe_inline = false;
4434 if (CONSTANT_CLASS_P (r))
4435 maybe_inline = true;
4436 else
4437 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4438 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4440 maybe_inline = true;
4441 break;
4443 if (maybe_inline)
4445 tree t = maybe_inline_call_in_expr (exp);
4446 if (t)
4447 return SUBSTITUTE_IN_EXPR (t, f, r);
4451 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4453 tree op = TREE_OPERAND (exp, i);
4454 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4455 if (new_op != op)
4457 if (!new_tree)
4458 new_tree = copy_node (exp);
4459 TREE_OPERAND (new_tree, i) = new_op;
4463 if (new_tree)
4465 new_tree = fold (new_tree);
4466 if (TREE_CODE (new_tree) == CALL_EXPR)
4467 process_call_operands (new_tree);
4469 else
4470 return exp;
4472 break;
4474 default:
4475 gcc_unreachable ();
4478 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4480 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4481 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4483 return new_tree;
4486 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4487 for it within OBJ, a tree that is an object or a chain of references. */
4489 tree
4490 substitute_placeholder_in_expr (tree exp, tree obj)
4492 enum tree_code code = TREE_CODE (exp);
4493 tree op0, op1, op2, op3;
4494 tree new_tree;
4496 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4497 in the chain of OBJ. */
4498 if (code == PLACEHOLDER_EXPR)
4500 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4501 tree elt;
4503 for (elt = obj; elt != 0;
4504 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4505 || TREE_CODE (elt) == COND_EXPR)
4506 ? TREE_OPERAND (elt, 1)
4507 : (REFERENCE_CLASS_P (elt)
4508 || UNARY_CLASS_P (elt)
4509 || BINARY_CLASS_P (elt)
4510 || VL_EXP_CLASS_P (elt)
4511 || EXPRESSION_CLASS_P (elt))
4512 ? TREE_OPERAND (elt, 0) : 0))
4513 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4514 return elt;
4516 for (elt = obj; elt != 0;
4517 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4518 || TREE_CODE (elt) == COND_EXPR)
4519 ? TREE_OPERAND (elt, 1)
4520 : (REFERENCE_CLASS_P (elt)
4521 || UNARY_CLASS_P (elt)
4522 || BINARY_CLASS_P (elt)
4523 || VL_EXP_CLASS_P (elt)
4524 || EXPRESSION_CLASS_P (elt))
4525 ? TREE_OPERAND (elt, 0) : 0))
4526 if (POINTER_TYPE_P (TREE_TYPE (elt))
4527 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4528 == need_type))
4529 return fold_build1 (INDIRECT_REF, need_type, elt);
4531 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4532 survives until RTL generation, there will be an error. */
4533 return exp;
4536 /* TREE_LIST is special because we need to look at TREE_VALUE
4537 and TREE_CHAIN, not TREE_OPERANDS. */
4538 else if (code == TREE_LIST)
4540 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4541 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4542 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4543 return exp;
4545 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4547 else
4548 switch (TREE_CODE_CLASS (code))
4550 case tcc_constant:
4551 case tcc_declaration:
4552 return exp;
4554 case tcc_exceptional:
4555 case tcc_unary:
4556 case tcc_binary:
4557 case tcc_comparison:
4558 case tcc_expression:
4559 case tcc_reference:
4560 case tcc_statement:
4561 switch (TREE_CODE_LENGTH (code))
4563 case 0:
4564 return exp;
4566 case 1:
4567 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4568 if (op0 == TREE_OPERAND (exp, 0))
4569 return exp;
4571 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4572 break;
4574 case 2:
4575 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4576 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4578 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4579 return exp;
4581 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4582 break;
4584 case 3:
4585 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4586 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4587 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4589 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4590 && op2 == TREE_OPERAND (exp, 2))
4591 return exp;
4593 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4594 break;
4596 case 4:
4597 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4598 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4599 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4600 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4602 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4603 && op2 == TREE_OPERAND (exp, 2)
4604 && op3 == TREE_OPERAND (exp, 3))
4605 return exp;
4607 new_tree
4608 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4609 break;
4611 default:
4612 gcc_unreachable ();
4614 break;
4616 case tcc_vl_exp:
4618 int i;
4620 new_tree = NULL_TREE;
4622 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4624 tree op = TREE_OPERAND (exp, i);
4625 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4626 if (new_op != op)
4628 if (!new_tree)
4629 new_tree = copy_node (exp);
4630 TREE_OPERAND (new_tree, i) = new_op;
4634 if (new_tree)
4636 new_tree = fold (new_tree);
4637 if (TREE_CODE (new_tree) == CALL_EXPR)
4638 process_call_operands (new_tree);
4640 else
4641 return exp;
4643 break;
4645 default:
4646 gcc_unreachable ();
4649 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4651 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4652 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4654 return new_tree;
4658 /* Subroutine of stabilize_reference; this is called for subtrees of
4659 references. Any expression with side-effects must be put in a SAVE_EXPR
4660 to ensure that it is only evaluated once.
4662 We don't put SAVE_EXPR nodes around everything, because assigning very
4663 simple expressions to temporaries causes us to miss good opportunities
4664 for optimizations. Among other things, the opportunity to fold in the
4665 addition of a constant into an addressing mode often gets lost, e.g.
4666 "y[i+1] += x;". In general, we take the approach that we should not make
4667 an assignment unless we are forced into it - i.e., that any non-side effect
4668 operator should be allowed, and that cse should take care of coalescing
4669 multiple utterances of the same expression should that prove fruitful. */
4671 static tree
4672 stabilize_reference_1 (tree e)
4674 tree result;
4675 enum tree_code code = TREE_CODE (e);
4677 /* We cannot ignore const expressions because it might be a reference
4678 to a const array but whose index contains side-effects. But we can
4679 ignore things that are actual constant or that already have been
4680 handled by this function. */
4682 if (tree_invariant_p (e))
4683 return e;
4685 switch (TREE_CODE_CLASS (code))
4687 case tcc_exceptional:
4688 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4689 have side-effects. */
4690 if (code == STATEMENT_LIST)
4691 return save_expr (e);
4692 /* FALLTHRU */
4693 case tcc_type:
4694 case tcc_declaration:
4695 case tcc_comparison:
4696 case tcc_statement:
4697 case tcc_expression:
4698 case tcc_reference:
4699 case tcc_vl_exp:
4700 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4701 so that it will only be evaluated once. */
4702 /* The reference (r) and comparison (<) classes could be handled as
4703 below, but it is generally faster to only evaluate them once. */
4704 if (TREE_SIDE_EFFECTS (e))
4705 return save_expr (e);
4706 return e;
4708 case tcc_constant:
4709 /* Constants need no processing. In fact, we should never reach
4710 here. */
4711 return e;
4713 case tcc_binary:
4714 /* Division is slow and tends to be compiled with jumps,
4715 especially the division by powers of 2 that is often
4716 found inside of an array reference. So do it just once. */
4717 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4718 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4719 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4720 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4721 return save_expr (e);
4722 /* Recursively stabilize each operand. */
4723 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4724 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4725 break;
4727 case tcc_unary:
4728 /* Recursively stabilize each operand. */
4729 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4730 break;
4732 default:
4733 gcc_unreachable ();
4736 TREE_TYPE (result) = TREE_TYPE (e);
4737 TREE_READONLY (result) = TREE_READONLY (e);
4738 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4739 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4741 return result;
4744 /* Stabilize a reference so that we can use it any number of times
4745 without causing its operands to be evaluated more than once.
4746 Returns the stabilized reference. This works by means of save_expr,
4747 so see the caveats in the comments about save_expr.
4749 Also allows conversion expressions whose operands are references.
4750 Any other kind of expression is returned unchanged. */
4752 tree
4753 stabilize_reference (tree ref)
4755 tree result;
4756 enum tree_code code = TREE_CODE (ref);
4758 switch (code)
4760 case VAR_DECL:
4761 case PARM_DECL:
4762 case RESULT_DECL:
4763 /* No action is needed in this case. */
4764 return ref;
4766 CASE_CONVERT:
4767 case FLOAT_EXPR:
4768 case FIX_TRUNC_EXPR:
4769 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4770 break;
4772 case INDIRECT_REF:
4773 result = build_nt (INDIRECT_REF,
4774 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4775 break;
4777 case COMPONENT_REF:
4778 result = build_nt (COMPONENT_REF,
4779 stabilize_reference (TREE_OPERAND (ref, 0)),
4780 TREE_OPERAND (ref, 1), NULL_TREE);
4781 break;
4783 case BIT_FIELD_REF:
4784 result = build_nt (BIT_FIELD_REF,
4785 stabilize_reference (TREE_OPERAND (ref, 0)),
4786 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4787 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4788 break;
4790 case ARRAY_REF:
4791 result = build_nt (ARRAY_REF,
4792 stabilize_reference (TREE_OPERAND (ref, 0)),
4793 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4794 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4795 break;
4797 case ARRAY_RANGE_REF:
4798 result = build_nt (ARRAY_RANGE_REF,
4799 stabilize_reference (TREE_OPERAND (ref, 0)),
4800 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4801 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4802 break;
4804 case COMPOUND_EXPR:
4805 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4806 it wouldn't be ignored. This matters when dealing with
4807 volatiles. */
4808 return stabilize_reference_1 (ref);
4810 /* If arg isn't a kind of lvalue we recognize, make no change.
4811 Caller should recognize the error for an invalid lvalue. */
4812 default:
4813 return ref;
4815 case ERROR_MARK:
4816 return error_mark_node;
4819 TREE_TYPE (result) = TREE_TYPE (ref);
4820 TREE_READONLY (result) = TREE_READONLY (ref);
4821 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4822 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4823 protected_set_expr_location (result, EXPR_LOCATION (ref));
4825 return result;
4828 /* Low-level constructors for expressions. */
4830 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4831 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4833 void
4834 recompute_tree_invariant_for_addr_expr (tree t)
4836 tree node;
4837 bool tc = true, se = false;
4839 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4841 /* We started out assuming this address is both invariant and constant, but
4842 does not have side effects. Now go down any handled components and see if
4843 any of them involve offsets that are either non-constant or non-invariant.
4844 Also check for side-effects.
4846 ??? Note that this code makes no attempt to deal with the case where
4847 taking the address of something causes a copy due to misalignment. */
4849 #define UPDATE_FLAGS(NODE) \
4850 do { tree _node = (NODE); \
4851 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4852 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4854 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4855 node = TREE_OPERAND (node, 0))
4857 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4858 array reference (probably made temporarily by the G++ front end),
4859 so ignore all the operands. */
4860 if ((TREE_CODE (node) == ARRAY_REF
4861 || TREE_CODE (node) == ARRAY_RANGE_REF)
4862 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4864 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4865 if (TREE_OPERAND (node, 2))
4866 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4867 if (TREE_OPERAND (node, 3))
4868 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4870 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4871 FIELD_DECL, apparently. The G++ front end can put something else
4872 there, at least temporarily. */
4873 else if (TREE_CODE (node) == COMPONENT_REF
4874 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4876 if (TREE_OPERAND (node, 2))
4877 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4881 node = lang_hooks.expr_to_decl (node, &tc, &se);
4883 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4884 the address, since &(*a)->b is a form of addition. If it's a constant, the
4885 address is constant too. If it's a decl, its address is constant if the
4886 decl is static. Everything else is not constant and, furthermore,
4887 taking the address of a volatile variable is not volatile. */
4888 if (TREE_CODE (node) == INDIRECT_REF
4889 || TREE_CODE (node) == MEM_REF)
4890 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4891 else if (CONSTANT_CLASS_P (node))
4893 else if (DECL_P (node))
4894 tc &= (staticp (node) != NULL_TREE);
4895 else
4897 tc = false;
4898 se |= TREE_SIDE_EFFECTS (node);
4902 TREE_CONSTANT (t) = tc;
4903 TREE_SIDE_EFFECTS (t) = se;
4904 #undef UPDATE_FLAGS
4907 /* Build an expression of code CODE, data type TYPE, and operands as
4908 specified. Expressions and reference nodes can be created this way.
4909 Constants, decls, types and misc nodes cannot be.
4911 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4912 enough for all extant tree codes. */
4914 tree
4915 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4917 tree t;
4919 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4921 t = make_node (code PASS_MEM_STAT);
4922 TREE_TYPE (t) = tt;
4924 return t;
4927 tree
4928 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4930 int length = sizeof (struct tree_exp);
4931 tree t;
4933 record_node_allocation_statistics (code, length);
4935 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4937 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4939 memset (t, 0, sizeof (struct tree_common));
4941 TREE_SET_CODE (t, code);
4943 TREE_TYPE (t) = type;
4944 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4945 TREE_OPERAND (t, 0) = node;
4946 if (node && !TYPE_P (node))
4948 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4949 TREE_READONLY (t) = TREE_READONLY (node);
4952 if (TREE_CODE_CLASS (code) == tcc_statement)
4954 if (code != DEBUG_BEGIN_STMT)
4955 TREE_SIDE_EFFECTS (t) = 1;
4957 else switch (code)
4959 case VA_ARG_EXPR:
4960 /* All of these have side-effects, no matter what their
4961 operands are. */
4962 TREE_SIDE_EFFECTS (t) = 1;
4963 TREE_READONLY (t) = 0;
4964 break;
4966 case INDIRECT_REF:
4967 /* Whether a dereference is readonly has nothing to do with whether
4968 its operand is readonly. */
4969 TREE_READONLY (t) = 0;
4970 break;
4972 case ADDR_EXPR:
4973 if (node)
4974 recompute_tree_invariant_for_addr_expr (t);
4975 break;
4977 default:
4978 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4979 && node && !TYPE_P (node)
4980 && TREE_CONSTANT (node))
4981 TREE_CONSTANT (t) = 1;
4982 if (TREE_CODE_CLASS (code) == tcc_reference
4983 && node && TREE_THIS_VOLATILE (node))
4984 TREE_THIS_VOLATILE (t) = 1;
4985 break;
4988 return t;
4991 #define PROCESS_ARG(N) \
4992 do { \
4993 TREE_OPERAND (t, N) = arg##N; \
4994 if (arg##N &&!TYPE_P (arg##N)) \
4996 if (TREE_SIDE_EFFECTS (arg##N)) \
4997 side_effects = 1; \
4998 if (!TREE_READONLY (arg##N) \
4999 && !CONSTANT_CLASS_P (arg##N)) \
5000 (void) (read_only = 0); \
5001 if (!TREE_CONSTANT (arg##N)) \
5002 (void) (constant = 0); \
5004 } while (0)
5006 tree
5007 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
5009 bool constant, read_only, side_effects, div_by_zero;
5010 tree t;
5012 gcc_assert (TREE_CODE_LENGTH (code) == 2);
5014 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
5015 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
5016 /* When sizetype precision doesn't match that of pointers
5017 we need to be able to build explicit extensions or truncations
5018 of the offset argument. */
5019 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
5020 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
5021 && TREE_CODE (arg1) == INTEGER_CST);
5023 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
5024 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
5025 && ptrofftype_p (TREE_TYPE (arg1)));
5027 t = make_node (code PASS_MEM_STAT);
5028 TREE_TYPE (t) = tt;
5030 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
5031 result based on those same flags for the arguments. But if the
5032 arguments aren't really even `tree' expressions, we shouldn't be trying
5033 to do this. */
5035 /* Expressions without side effects may be constant if their
5036 arguments are as well. */
5037 constant = (TREE_CODE_CLASS (code) == tcc_comparison
5038 || TREE_CODE_CLASS (code) == tcc_binary);
5039 read_only = 1;
5040 side_effects = TREE_SIDE_EFFECTS (t);
5042 switch (code)
5044 case TRUNC_DIV_EXPR:
5045 case CEIL_DIV_EXPR:
5046 case FLOOR_DIV_EXPR:
5047 case ROUND_DIV_EXPR:
5048 case EXACT_DIV_EXPR:
5049 case CEIL_MOD_EXPR:
5050 case FLOOR_MOD_EXPR:
5051 case ROUND_MOD_EXPR:
5052 case TRUNC_MOD_EXPR:
5053 div_by_zero = integer_zerop (arg1);
5054 break;
5055 default:
5056 div_by_zero = false;
5059 PROCESS_ARG (0);
5060 PROCESS_ARG (1);
5062 TREE_SIDE_EFFECTS (t) = side_effects;
5063 if (code == MEM_REF)
5065 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5067 tree o = TREE_OPERAND (arg0, 0);
5068 TREE_READONLY (t) = TREE_READONLY (o);
5069 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5072 else
5074 TREE_READONLY (t) = read_only;
5075 /* Don't mark X / 0 as constant. */
5076 TREE_CONSTANT (t) = constant && !div_by_zero;
5077 TREE_THIS_VOLATILE (t)
5078 = (TREE_CODE_CLASS (code) == tcc_reference
5079 && arg0 && TREE_THIS_VOLATILE (arg0));
5082 return t;
5086 tree
5087 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
5088 tree arg2 MEM_STAT_DECL)
5090 bool constant, read_only, side_effects;
5091 tree t;
5093 gcc_assert (TREE_CODE_LENGTH (code) == 3);
5094 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5096 t = make_node (code PASS_MEM_STAT);
5097 TREE_TYPE (t) = tt;
5099 read_only = 1;
5101 /* As a special exception, if COND_EXPR has NULL branches, we
5102 assume that it is a gimple statement and always consider
5103 it to have side effects. */
5104 if (code == COND_EXPR
5105 && tt == void_type_node
5106 && arg1 == NULL_TREE
5107 && arg2 == NULL_TREE)
5108 side_effects = true;
5109 else
5110 side_effects = TREE_SIDE_EFFECTS (t);
5112 PROCESS_ARG (0);
5113 PROCESS_ARG (1);
5114 PROCESS_ARG (2);
5116 if (code == COND_EXPR)
5117 TREE_READONLY (t) = read_only;
5119 TREE_SIDE_EFFECTS (t) = side_effects;
5120 TREE_THIS_VOLATILE (t)
5121 = (TREE_CODE_CLASS (code) == tcc_reference
5122 && arg0 && TREE_THIS_VOLATILE (arg0));
5124 return t;
5127 tree
5128 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
5129 tree arg2, tree arg3 MEM_STAT_DECL)
5131 bool constant, read_only, side_effects;
5132 tree t;
5134 gcc_assert (TREE_CODE_LENGTH (code) == 4);
5136 t = make_node (code PASS_MEM_STAT);
5137 TREE_TYPE (t) = tt;
5139 side_effects = TREE_SIDE_EFFECTS (t);
5141 PROCESS_ARG (0);
5142 PROCESS_ARG (1);
5143 PROCESS_ARG (2);
5144 PROCESS_ARG (3);
5146 TREE_SIDE_EFFECTS (t) = side_effects;
5147 TREE_THIS_VOLATILE (t)
5148 = (TREE_CODE_CLASS (code) == tcc_reference
5149 && arg0 && TREE_THIS_VOLATILE (arg0));
5151 return t;
5154 tree
5155 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
5156 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
5158 bool constant, read_only, side_effects;
5159 tree t;
5161 gcc_assert (TREE_CODE_LENGTH (code) == 5);
5163 t = make_node (code PASS_MEM_STAT);
5164 TREE_TYPE (t) = tt;
5166 side_effects = TREE_SIDE_EFFECTS (t);
5168 PROCESS_ARG (0);
5169 PROCESS_ARG (1);
5170 PROCESS_ARG (2);
5171 PROCESS_ARG (3);
5172 PROCESS_ARG (4);
5174 TREE_SIDE_EFFECTS (t) = side_effects;
5175 if (code == TARGET_MEM_REF)
5177 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5179 tree o = TREE_OPERAND (arg0, 0);
5180 TREE_READONLY (t) = TREE_READONLY (o);
5181 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5184 else
5185 TREE_THIS_VOLATILE (t)
5186 = (TREE_CODE_CLASS (code) == tcc_reference
5187 && arg0 && TREE_THIS_VOLATILE (arg0));
5189 return t;
5192 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
5193 on the pointer PTR. */
5195 tree
5196 build_simple_mem_ref_loc (location_t loc, tree ptr)
5198 poly_int64 offset = 0;
5199 tree ptype = TREE_TYPE (ptr);
5200 tree tem;
5201 /* For convenience allow addresses that collapse to a simple base
5202 and offset. */
5203 if (TREE_CODE (ptr) == ADDR_EXPR
5204 && (handled_component_p (TREE_OPERAND (ptr, 0))
5205 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
5207 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
5208 gcc_assert (ptr);
5209 if (TREE_CODE (ptr) == MEM_REF)
5211 offset += mem_ref_offset (ptr).force_shwi ();
5212 ptr = TREE_OPERAND (ptr, 0);
5214 else
5215 ptr = build_fold_addr_expr (ptr);
5216 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
5218 tem = build2 (MEM_REF, TREE_TYPE (ptype),
5219 ptr, build_int_cst (ptype, offset));
5220 SET_EXPR_LOCATION (tem, loc);
5221 return tem;
5224 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
5226 poly_offset_int
5227 mem_ref_offset (const_tree t)
5229 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
5230 SIGNED);
5233 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
5234 offsetted by OFFSET units. */
5236 tree
5237 build_invariant_address (tree type, tree base, poly_int64 offset)
5239 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
5240 build_fold_addr_expr (base),
5241 build_int_cst (ptr_type_node, offset));
5242 tree addr = build1 (ADDR_EXPR, type, ref);
5243 recompute_tree_invariant_for_addr_expr (addr);
5244 return addr;
5247 /* Similar except don't specify the TREE_TYPE
5248 and leave the TREE_SIDE_EFFECTS as 0.
5249 It is permissible for arguments to be null,
5250 or even garbage if their values do not matter. */
5252 tree
5253 build_nt (enum tree_code code, ...)
5255 tree t;
5256 int length;
5257 int i;
5258 va_list p;
5260 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5262 va_start (p, code);
5264 t = make_node (code);
5265 length = TREE_CODE_LENGTH (code);
5267 for (i = 0; i < length; i++)
5268 TREE_OPERAND (t, i) = va_arg (p, tree);
5270 va_end (p);
5271 return t;
5274 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5275 tree vec. */
5277 tree
5278 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
5280 tree ret, t;
5281 unsigned int ix;
5283 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
5284 CALL_EXPR_FN (ret) = fn;
5285 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
5286 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
5287 CALL_EXPR_ARG (ret, ix) = t;
5288 return ret;
5291 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5292 and data type TYPE.
5293 We do NOT enter this node in any sort of symbol table.
5295 LOC is the location of the decl.
5297 layout_decl is used to set up the decl's storage layout.
5298 Other slots are initialized to 0 or null pointers. */
5300 tree
5301 build_decl (location_t loc, enum tree_code code, tree name,
5302 tree type MEM_STAT_DECL)
5304 tree t;
5306 t = make_node (code PASS_MEM_STAT);
5307 DECL_SOURCE_LOCATION (t) = loc;
5309 /* if (type == error_mark_node)
5310 type = integer_type_node; */
5311 /* That is not done, deliberately, so that having error_mark_node
5312 as the type can suppress useless errors in the use of this variable. */
5314 DECL_NAME (t) = name;
5315 TREE_TYPE (t) = type;
5317 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
5318 layout_decl (t, 0);
5320 return t;
5323 /* Create and return a DEBUG_EXPR_DECL node of the given TYPE. */
5325 tree
5326 build_debug_expr_decl (tree type)
5328 tree vexpr = make_node (DEBUG_EXPR_DECL);
5329 DECL_ARTIFICIAL (vexpr) = 1;
5330 TREE_TYPE (vexpr) = type;
5331 SET_DECL_MODE (vexpr, TYPE_MODE (type));
5332 return vexpr;
5335 /* Builds and returns function declaration with NAME and TYPE. */
5337 tree
5338 build_fn_decl (const char *name, tree type)
5340 tree id = get_identifier (name);
5341 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
5343 DECL_EXTERNAL (decl) = 1;
5344 TREE_PUBLIC (decl) = 1;
5345 DECL_ARTIFICIAL (decl) = 1;
5346 TREE_NOTHROW (decl) = 1;
5348 return decl;
5351 vec<tree, va_gc> *all_translation_units;
5353 /* Builds a new translation-unit decl with name NAME, queues it in the
5354 global list of translation-unit decls and returns it. */
5356 tree
5357 build_translation_unit_decl (tree name)
5359 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5360 name, NULL_TREE);
5361 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5362 vec_safe_push (all_translation_units, tu);
5363 return tu;
5367 /* BLOCK nodes are used to represent the structure of binding contours
5368 and declarations, once those contours have been exited and their contents
5369 compiled. This information is used for outputting debugging info. */
5371 tree
5372 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5374 tree block = make_node (BLOCK);
5376 BLOCK_VARS (block) = vars;
5377 BLOCK_SUBBLOCKS (block) = subblocks;
5378 BLOCK_SUPERCONTEXT (block) = supercontext;
5379 BLOCK_CHAIN (block) = chain;
5380 return block;
5384 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5386 LOC is the location to use in tree T. */
5388 void
5389 protected_set_expr_location (tree t, location_t loc)
5391 if (CAN_HAVE_LOCATION_P (t))
5392 SET_EXPR_LOCATION (t, loc);
5393 else if (t && TREE_CODE (t) == STATEMENT_LIST)
5395 t = expr_single (t);
5396 if (t && CAN_HAVE_LOCATION_P (t))
5397 SET_EXPR_LOCATION (t, loc);
5401 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
5402 UNKNOWN_LOCATION. */
5404 void
5405 protected_set_expr_location_if_unset (tree t, location_t loc)
5407 t = expr_single (t);
5408 if (t && !EXPR_HAS_LOCATION (t))
5409 protected_set_expr_location (t, loc);
5412 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5413 of the various TYPE_QUAL values. */
5415 static void
5416 set_type_quals (tree type, int type_quals)
5418 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
5419 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
5420 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
5421 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
5422 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
5425 /* Returns true iff CAND and BASE have equivalent language-specific
5426 qualifiers. */
5428 bool
5429 check_lang_type (const_tree cand, const_tree base)
5431 if (lang_hooks.types.type_hash_eq == NULL)
5432 return true;
5433 /* type_hash_eq currently only applies to these types. */
5434 if (TREE_CODE (cand) != FUNCTION_TYPE
5435 && TREE_CODE (cand) != METHOD_TYPE)
5436 return true;
5437 return lang_hooks.types.type_hash_eq (cand, base);
5440 /* This function checks to see if TYPE matches the size one of the built-in
5441 atomic types, and returns that core atomic type. */
5443 static tree
5444 find_atomic_core_type (const_tree type)
5446 tree base_atomic_type;
5448 /* Only handle complete types. */
5449 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
5450 return NULL_TREE;
5452 switch (tree_to_uhwi (TYPE_SIZE (type)))
5454 case 8:
5455 base_atomic_type = atomicQI_type_node;
5456 break;
5458 case 16:
5459 base_atomic_type = atomicHI_type_node;
5460 break;
5462 case 32:
5463 base_atomic_type = atomicSI_type_node;
5464 break;
5466 case 64:
5467 base_atomic_type = atomicDI_type_node;
5468 break;
5470 case 128:
5471 base_atomic_type = atomicTI_type_node;
5472 break;
5474 default:
5475 base_atomic_type = NULL_TREE;
5478 return base_atomic_type;
5481 /* Returns true iff unqualified CAND and BASE are equivalent. */
5483 bool
5484 check_base_type (const_tree cand, const_tree base)
5486 if (TYPE_NAME (cand) != TYPE_NAME (base)
5487 /* Apparently this is needed for Objective-C. */
5488 || TYPE_CONTEXT (cand) != TYPE_CONTEXT (base)
5489 || !attribute_list_equal (TYPE_ATTRIBUTES (cand),
5490 TYPE_ATTRIBUTES (base)))
5491 return false;
5492 /* Check alignment. */
5493 if (TYPE_ALIGN (cand) == TYPE_ALIGN (base)
5494 && TYPE_USER_ALIGN (cand) == TYPE_USER_ALIGN (base))
5495 return true;
5496 /* Atomic types increase minimal alignment. We must to do so as well
5497 or we get duplicated canonical types. See PR88686. */
5498 if ((TYPE_QUALS (cand) & TYPE_QUAL_ATOMIC))
5500 /* See if this object can map to a basic atomic type. */
5501 tree atomic_type = find_atomic_core_type (cand);
5502 if (atomic_type && TYPE_ALIGN (atomic_type) == TYPE_ALIGN (cand))
5503 return true;
5505 return false;
5508 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5510 bool
5511 check_qualified_type (const_tree cand, const_tree base, int type_quals)
5513 return (TYPE_QUALS (cand) == type_quals
5514 && check_base_type (cand, base)
5515 && check_lang_type (cand, base));
5518 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
5520 static bool
5521 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
5523 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
5524 && TYPE_NAME (cand) == TYPE_NAME (base)
5525 /* Apparently this is needed for Objective-C. */
5526 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5527 /* Check alignment. */
5528 && TYPE_ALIGN (cand) == align
5529 /* Check this is a user-aligned type as build_aligned_type
5530 would create. */
5531 && TYPE_USER_ALIGN (cand)
5532 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5533 TYPE_ATTRIBUTES (base))
5534 && check_lang_type (cand, base));
5537 /* Return a version of the TYPE, qualified as indicated by the
5538 TYPE_QUALS, if one exists. If no qualified version exists yet,
5539 return NULL_TREE. */
5541 tree
5542 get_qualified_type (tree type, int type_quals)
5544 if (TYPE_QUALS (type) == type_quals)
5545 return type;
5547 tree mv = TYPE_MAIN_VARIANT (type);
5548 if (check_qualified_type (mv, type, type_quals))
5549 return mv;
5551 /* Search the chain of variants to see if there is already one there just
5552 like the one we need to have. If so, use that existing one. We must
5553 preserve the TYPE_NAME, since there is code that depends on this. */
5554 for (tree *tp = &TYPE_NEXT_VARIANT (mv); *tp; tp = &TYPE_NEXT_VARIANT (*tp))
5555 if (check_qualified_type (*tp, type, type_quals))
5557 /* Put the found variant at the head of the variant list so
5558 frequently searched variants get found faster. The C++ FE
5559 benefits greatly from this. */
5560 tree t = *tp;
5561 *tp = TYPE_NEXT_VARIANT (t);
5562 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
5563 TYPE_NEXT_VARIANT (mv) = t;
5564 return t;
5567 return NULL_TREE;
5570 /* Like get_qualified_type, but creates the type if it does not
5571 exist. This function never returns NULL_TREE. */
5573 tree
5574 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
5576 tree t;
5578 /* See if we already have the appropriate qualified variant. */
5579 t = get_qualified_type (type, type_quals);
5581 /* If not, build it. */
5582 if (!t)
5584 t = build_variant_type_copy (type PASS_MEM_STAT);
5585 set_type_quals (t, type_quals);
5587 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
5589 /* See if this object can map to a basic atomic type. */
5590 tree atomic_type = find_atomic_core_type (type);
5591 if (atomic_type)
5593 /* Ensure the alignment of this type is compatible with
5594 the required alignment of the atomic type. */
5595 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
5596 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
5600 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5601 /* Propagate structural equality. */
5602 SET_TYPE_STRUCTURAL_EQUALITY (t);
5603 else if (TYPE_CANONICAL (type) != type)
5604 /* Build the underlying canonical type, since it is different
5605 from TYPE. */
5607 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
5608 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
5610 else
5611 /* T is its own canonical type. */
5612 TYPE_CANONICAL (t) = t;
5616 return t;
5619 /* Create a variant of type T with alignment ALIGN. */
5621 tree
5622 build_aligned_type (tree type, unsigned int align)
5624 tree t;
5626 if (TYPE_PACKED (type)
5627 || TYPE_ALIGN (type) == align)
5628 return type;
5630 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5631 if (check_aligned_type (t, type, align))
5632 return t;
5634 t = build_variant_type_copy (type);
5635 SET_TYPE_ALIGN (t, align);
5636 TYPE_USER_ALIGN (t) = 1;
5638 return t;
5641 /* Create a new distinct copy of TYPE. The new type is made its own
5642 MAIN_VARIANT. If TYPE requires structural equality checks, the
5643 resulting type requires structural equality checks; otherwise, its
5644 TYPE_CANONICAL points to itself. */
5646 tree
5647 build_distinct_type_copy (tree type MEM_STAT_DECL)
5649 tree t = copy_node (type PASS_MEM_STAT);
5651 TYPE_POINTER_TO (t) = 0;
5652 TYPE_REFERENCE_TO (t) = 0;
5654 /* Set the canonical type either to a new equivalence class, or
5655 propagate the need for structural equality checks. */
5656 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5657 SET_TYPE_STRUCTURAL_EQUALITY (t);
5658 else
5659 TYPE_CANONICAL (t) = t;
5661 /* Make it its own variant. */
5662 TYPE_MAIN_VARIANT (t) = t;
5663 TYPE_NEXT_VARIANT (t) = 0;
5665 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
5666 whose TREE_TYPE is not t. This can also happen in the Ada
5667 frontend when using subtypes. */
5669 return t;
5672 /* Create a new variant of TYPE, equivalent but distinct. This is so
5673 the caller can modify it. TYPE_CANONICAL for the return type will
5674 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
5675 are considered equal by the language itself (or that both types
5676 require structural equality checks). */
5678 tree
5679 build_variant_type_copy (tree type MEM_STAT_DECL)
5681 tree t, m = TYPE_MAIN_VARIANT (type);
5683 t = build_distinct_type_copy (type PASS_MEM_STAT);
5685 /* Since we're building a variant, assume that it is a non-semantic
5686 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
5687 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
5688 /* Type variants have no alias set defined. */
5689 TYPE_ALIAS_SET (t) = -1;
5691 /* Add the new type to the chain of variants of TYPE. */
5692 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
5693 TYPE_NEXT_VARIANT (m) = t;
5694 TYPE_MAIN_VARIANT (t) = m;
5696 return t;
5699 /* Return true if the from tree in both tree maps are equal. */
5702 tree_map_base_eq (const void *va, const void *vb)
5704 const struct tree_map_base *const a = (const struct tree_map_base *) va,
5705 *const b = (const struct tree_map_base *) vb;
5706 return (a->from == b->from);
5709 /* Hash a from tree in a tree_base_map. */
5711 unsigned int
5712 tree_map_base_hash (const void *item)
5714 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
5717 /* Return true if this tree map structure is marked for garbage collection
5718 purposes. We simply return true if the from tree is marked, so that this
5719 structure goes away when the from tree goes away. */
5722 tree_map_base_marked_p (const void *p)
5724 return ggc_marked_p (((const struct tree_map_base *) p)->from);
5727 /* Hash a from tree in a tree_map. */
5729 unsigned int
5730 tree_map_hash (const void *item)
5732 return (((const struct tree_map *) item)->hash);
5735 /* Hash a from tree in a tree_decl_map. */
5737 unsigned int
5738 tree_decl_map_hash (const void *item)
5740 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
5743 /* Return the initialization priority for DECL. */
5745 priority_type
5746 decl_init_priority_lookup (tree decl)
5748 symtab_node *snode = symtab_node::get (decl);
5750 if (!snode)
5751 return DEFAULT_INIT_PRIORITY;
5752 return
5753 snode->get_init_priority ();
5756 /* Return the finalization priority for DECL. */
5758 priority_type
5759 decl_fini_priority_lookup (tree decl)
5761 cgraph_node *node = cgraph_node::get (decl);
5763 if (!node)
5764 return DEFAULT_INIT_PRIORITY;
5765 return
5766 node->get_fini_priority ();
5769 /* Set the initialization priority for DECL to PRIORITY. */
5771 void
5772 decl_init_priority_insert (tree decl, priority_type priority)
5774 struct symtab_node *snode;
5776 if (priority == DEFAULT_INIT_PRIORITY)
5778 snode = symtab_node::get (decl);
5779 if (!snode)
5780 return;
5782 else if (VAR_P (decl))
5783 snode = varpool_node::get_create (decl);
5784 else
5785 snode = cgraph_node::get_create (decl);
5786 snode->set_init_priority (priority);
5789 /* Set the finalization priority for DECL to PRIORITY. */
5791 void
5792 decl_fini_priority_insert (tree decl, priority_type priority)
5794 struct cgraph_node *node;
5796 if (priority == DEFAULT_INIT_PRIORITY)
5798 node = cgraph_node::get (decl);
5799 if (!node)
5800 return;
5802 else
5803 node = cgraph_node::get_create (decl);
5804 node->set_fini_priority (priority);
5807 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
5809 static void
5810 print_debug_expr_statistics (void)
5812 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
5813 (long) debug_expr_for_decl->size (),
5814 (long) debug_expr_for_decl->elements (),
5815 debug_expr_for_decl->collisions ());
5818 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
5820 static void
5821 print_value_expr_statistics (void)
5823 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
5824 (long) value_expr_for_decl->size (),
5825 (long) value_expr_for_decl->elements (),
5826 value_expr_for_decl->collisions ());
5829 /* Lookup a debug expression for FROM, and return it if we find one. */
5831 tree
5832 decl_debug_expr_lookup (tree from)
5834 struct tree_decl_map *h, in;
5835 in.base.from = from;
5837 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5838 if (h)
5839 return h->to;
5840 return NULL_TREE;
5843 /* Insert a mapping FROM->TO in the debug expression hashtable. */
5845 void
5846 decl_debug_expr_insert (tree from, tree to)
5848 struct tree_decl_map *h;
5850 h = ggc_alloc<tree_decl_map> ();
5851 h->base.from = from;
5852 h->to = to;
5853 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5856 /* Lookup a value expression for FROM, and return it if we find one. */
5858 tree
5859 decl_value_expr_lookup (tree from)
5861 struct tree_decl_map *h, in;
5862 in.base.from = from;
5864 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5865 if (h)
5866 return h->to;
5867 return NULL_TREE;
5870 /* Insert a mapping FROM->TO in the value expression hashtable. */
5872 void
5873 decl_value_expr_insert (tree from, tree to)
5875 struct tree_decl_map *h;
5877 h = ggc_alloc<tree_decl_map> ();
5878 h->base.from = from;
5879 h->to = to;
5880 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5883 /* Lookup a vector of debug arguments for FROM, and return it if we
5884 find one. */
5886 vec<tree, va_gc> **
5887 decl_debug_args_lookup (tree from)
5889 struct tree_vec_map *h, in;
5891 if (!DECL_HAS_DEBUG_ARGS_P (from))
5892 return NULL;
5893 gcc_checking_assert (debug_args_for_decl != NULL);
5894 in.base.from = from;
5895 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
5896 if (h)
5897 return &h->to;
5898 return NULL;
5901 /* Insert a mapping FROM->empty vector of debug arguments in the value
5902 expression hashtable. */
5904 vec<tree, va_gc> **
5905 decl_debug_args_insert (tree from)
5907 struct tree_vec_map *h;
5908 tree_vec_map **loc;
5910 if (DECL_HAS_DEBUG_ARGS_P (from))
5911 return decl_debug_args_lookup (from);
5912 if (debug_args_for_decl == NULL)
5913 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
5914 h = ggc_alloc<tree_vec_map> ();
5915 h->base.from = from;
5916 h->to = NULL;
5917 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
5918 *loc = h;
5919 DECL_HAS_DEBUG_ARGS_P (from) = 1;
5920 return &h->to;
5923 /* Hashing of types so that we don't make duplicates.
5924 The entry point is `type_hash_canon'. */
5926 /* Generate the default hash code for TYPE. This is designed for
5927 speed, rather than maximum entropy. */
5929 hashval_t
5930 type_hash_canon_hash (tree type)
5932 inchash::hash hstate;
5934 hstate.add_int (TREE_CODE (type));
5936 if (TREE_TYPE (type))
5937 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
5939 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
5940 /* Just the identifier is adequate to distinguish. */
5941 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
5943 switch (TREE_CODE (type))
5945 case METHOD_TYPE:
5946 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
5947 /* FALLTHROUGH. */
5948 case FUNCTION_TYPE:
5949 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
5950 if (TREE_VALUE (t) != error_mark_node)
5951 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
5952 break;
5954 case OFFSET_TYPE:
5955 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
5956 break;
5958 case ARRAY_TYPE:
5960 if (TYPE_DOMAIN (type))
5961 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
5962 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
5964 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
5965 hstate.add_object (typeless);
5968 break;
5970 case INTEGER_TYPE:
5972 tree t = TYPE_MAX_VALUE (type);
5973 if (!t)
5974 t = TYPE_MIN_VALUE (type);
5975 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
5976 hstate.add_object (TREE_INT_CST_ELT (t, i));
5977 break;
5980 case REAL_TYPE:
5981 case FIXED_POINT_TYPE:
5983 unsigned prec = TYPE_PRECISION (type);
5984 hstate.add_object (prec);
5985 break;
5988 case VECTOR_TYPE:
5989 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
5990 break;
5992 default:
5993 break;
5996 return hstate.end ();
5999 /* These are the Hashtable callback functions. */
6001 /* Returns true iff the types are equivalent. */
6003 bool
6004 type_cache_hasher::equal (type_hash *a, type_hash *b)
6006 /* First test the things that are the same for all types. */
6007 if (a->hash != b->hash
6008 || TREE_CODE (a->type) != TREE_CODE (b->type)
6009 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6010 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6011 TYPE_ATTRIBUTES (b->type))
6012 || (TREE_CODE (a->type) != COMPLEX_TYPE
6013 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6014 return 0;
6016 /* Be careful about comparing arrays before and after the element type
6017 has been completed; don't compare TYPE_ALIGN unless both types are
6018 complete. */
6019 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6020 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6021 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6022 return 0;
6024 switch (TREE_CODE (a->type))
6026 case VOID_TYPE:
6027 case OPAQUE_TYPE:
6028 case COMPLEX_TYPE:
6029 case POINTER_TYPE:
6030 case REFERENCE_TYPE:
6031 case NULLPTR_TYPE:
6032 return 1;
6034 case VECTOR_TYPE:
6035 return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
6036 TYPE_VECTOR_SUBPARTS (b->type));
6038 case ENUMERAL_TYPE:
6039 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6040 && !(TYPE_VALUES (a->type)
6041 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6042 && TYPE_VALUES (b->type)
6043 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6044 && type_list_equal (TYPE_VALUES (a->type),
6045 TYPE_VALUES (b->type))))
6046 return 0;
6048 /* fall through */
6050 case INTEGER_TYPE:
6051 case REAL_TYPE:
6052 case BOOLEAN_TYPE:
6053 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6054 return false;
6055 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6056 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6057 TYPE_MAX_VALUE (b->type)))
6058 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6059 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6060 TYPE_MIN_VALUE (b->type))));
6062 case FIXED_POINT_TYPE:
6063 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6065 case OFFSET_TYPE:
6066 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6068 case METHOD_TYPE:
6069 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6070 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6071 || (TYPE_ARG_TYPES (a->type)
6072 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6073 && TYPE_ARG_TYPES (b->type)
6074 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6075 && type_list_equal (TYPE_ARG_TYPES (a->type),
6076 TYPE_ARG_TYPES (b->type)))))
6077 break;
6078 return 0;
6079 case ARRAY_TYPE:
6080 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6081 where the flag should be inherited from the element type
6082 and can change after ARRAY_TYPEs are created; on non-aggregates
6083 compare it and hash it, scalars will never have that flag set
6084 and we need to differentiate between arrays created by different
6085 front-ends or middle-end created arrays. */
6086 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
6087 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
6088 || (TYPE_TYPELESS_STORAGE (a->type)
6089 == TYPE_TYPELESS_STORAGE (b->type))));
6091 case RECORD_TYPE:
6092 case UNION_TYPE:
6093 case QUAL_UNION_TYPE:
6094 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6095 || (TYPE_FIELDS (a->type)
6096 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6097 && TYPE_FIELDS (b->type)
6098 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6099 && type_list_equal (TYPE_FIELDS (a->type),
6100 TYPE_FIELDS (b->type))));
6102 case FUNCTION_TYPE:
6103 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6104 || (TYPE_ARG_TYPES (a->type)
6105 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6106 && TYPE_ARG_TYPES (b->type)
6107 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6108 && type_list_equal (TYPE_ARG_TYPES (a->type),
6109 TYPE_ARG_TYPES (b->type))))
6110 break;
6111 return 0;
6113 default:
6114 return 0;
6117 if (lang_hooks.types.type_hash_eq != NULL)
6118 return lang_hooks.types.type_hash_eq (a->type, b->type);
6120 return 1;
6123 /* Given TYPE, and HASHCODE its hash code, return the canonical
6124 object for an identical type if one already exists.
6125 Otherwise, return TYPE, and record it as the canonical object.
6127 To use this function, first create a type of the sort you want.
6128 Then compute its hash code from the fields of the type that
6129 make it different from other similar types.
6130 Then call this function and use the value. */
6132 tree
6133 type_hash_canon (unsigned int hashcode, tree type)
6135 type_hash in;
6136 type_hash **loc;
6138 /* The hash table only contains main variants, so ensure that's what we're
6139 being passed. */
6140 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6142 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6143 must call that routine before comparing TYPE_ALIGNs. */
6144 layout_type (type);
6146 in.hash = hashcode;
6147 in.type = type;
6149 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6150 if (*loc)
6152 tree t1 = ((type_hash *) *loc)->type;
6153 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
6154 && t1 != type);
6155 if (TYPE_UID (type) + 1 == next_type_uid)
6156 --next_type_uid;
6157 /* Free also min/max values and the cache for integer
6158 types. This can't be done in free_node, as LTO frees
6159 those on its own. */
6160 if (TREE_CODE (type) == INTEGER_TYPE)
6162 if (TYPE_MIN_VALUE (type)
6163 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
6165 /* Zero is always in TYPE_CACHED_VALUES. */
6166 if (! TYPE_UNSIGNED (type))
6167 int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
6168 ggc_free (TYPE_MIN_VALUE (type));
6170 if (TYPE_MAX_VALUE (type)
6171 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
6173 int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
6174 ggc_free (TYPE_MAX_VALUE (type));
6176 if (TYPE_CACHED_VALUES_P (type))
6177 ggc_free (TYPE_CACHED_VALUES (type));
6179 free_node (type);
6180 return t1;
6182 else
6184 struct type_hash *h;
6186 h = ggc_alloc<type_hash> ();
6187 h->hash = hashcode;
6188 h->type = type;
6189 *loc = h;
6191 return type;
6195 static void
6196 print_type_hash_statistics (void)
6198 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6199 (long) type_hash_table->size (),
6200 (long) type_hash_table->elements (),
6201 type_hash_table->collisions ());
6204 /* Given two lists of types
6205 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6206 return 1 if the lists contain the same types in the same order.
6207 Also, the TREE_PURPOSEs must match. */
6209 bool
6210 type_list_equal (const_tree l1, const_tree l2)
6212 const_tree t1, t2;
6214 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6215 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6216 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6217 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6218 && (TREE_TYPE (TREE_PURPOSE (t1))
6219 == TREE_TYPE (TREE_PURPOSE (t2))))))
6220 return false;
6222 return t1 == t2;
6225 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6226 given by TYPE. If the argument list accepts variable arguments,
6227 then this function counts only the ordinary arguments. */
6230 type_num_arguments (const_tree fntype)
6232 int i = 0;
6234 for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
6235 /* If the function does not take a variable number of arguments,
6236 the last element in the list will have type `void'. */
6237 if (VOID_TYPE_P (TREE_VALUE (t)))
6238 break;
6239 else
6240 ++i;
6242 return i;
6245 /* Return the type of the function TYPE's argument ARGNO if known.
6246 For vararg function's where ARGNO refers to one of the variadic
6247 arguments return null. Otherwise, return a void_type_node for
6248 out-of-bounds ARGNO. */
6250 tree
6251 type_argument_type (const_tree fntype, unsigned argno)
6253 /* Treat zero the same as an out-of-bounds argument number. */
6254 if (!argno)
6255 return void_type_node;
6257 function_args_iterator iter;
6259 tree argtype;
6260 unsigned i = 1;
6261 FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
6263 /* A vararg function's argument list ends in a null. Otherwise,
6264 an ordinary function's argument list ends with void. Return
6265 null if ARGNO refers to a vararg argument, void_type_node if
6266 it's out of bounds, and the formal argument type otherwise. */
6267 if (!argtype)
6268 break;
6270 if (i == argno || VOID_TYPE_P (argtype))
6271 return argtype;
6273 ++i;
6276 return NULL_TREE;
6279 /* Nonzero if integer constants T1 and T2
6280 represent the same constant value. */
6283 tree_int_cst_equal (const_tree t1, const_tree t2)
6285 if (t1 == t2)
6286 return 1;
6288 if (t1 == 0 || t2 == 0)
6289 return 0;
6291 STRIP_ANY_LOCATION_WRAPPER (t1);
6292 STRIP_ANY_LOCATION_WRAPPER (t2);
6294 if (TREE_CODE (t1) == INTEGER_CST
6295 && TREE_CODE (t2) == INTEGER_CST
6296 && wi::to_widest (t1) == wi::to_widest (t2))
6297 return 1;
6299 return 0;
6302 /* Return true if T is an INTEGER_CST whose numerical value (extended
6303 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6305 bool
6306 tree_fits_shwi_p (const_tree t)
6308 return (t != NULL_TREE
6309 && TREE_CODE (t) == INTEGER_CST
6310 && wi::fits_shwi_p (wi::to_widest (t)));
6313 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6314 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
6316 bool
6317 tree_fits_poly_int64_p (const_tree t)
6319 if (t == NULL_TREE)
6320 return false;
6321 if (POLY_INT_CST_P (t))
6323 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6324 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
6325 return false;
6326 return true;
6328 return (TREE_CODE (t) == INTEGER_CST
6329 && wi::fits_shwi_p (wi::to_widest (t)));
6332 /* Return true if T is an INTEGER_CST whose numerical value (extended
6333 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6335 bool
6336 tree_fits_uhwi_p (const_tree t)
6338 return (t != NULL_TREE
6339 && TREE_CODE (t) == INTEGER_CST
6340 && wi::fits_uhwi_p (wi::to_widest (t)));
6343 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6344 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
6346 bool
6347 tree_fits_poly_uint64_p (const_tree t)
6349 if (t == NULL_TREE)
6350 return false;
6351 if (POLY_INT_CST_P (t))
6353 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6354 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
6355 return false;
6356 return true;
6358 return (TREE_CODE (t) == INTEGER_CST
6359 && wi::fits_uhwi_p (wi::to_widest (t)));
6362 /* T is an INTEGER_CST whose numerical value (extended according to
6363 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6364 HOST_WIDE_INT. */
6366 HOST_WIDE_INT
6367 tree_to_shwi (const_tree t)
6369 gcc_assert (tree_fits_shwi_p (t));
6370 return TREE_INT_CST_LOW (t);
6373 /* T is an INTEGER_CST whose numerical value (extended according to
6374 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6375 HOST_WIDE_INT. */
6377 unsigned HOST_WIDE_INT
6378 tree_to_uhwi (const_tree t)
6380 gcc_assert (tree_fits_uhwi_p (t));
6381 return TREE_INT_CST_LOW (t);
6384 /* Return the most significant (sign) bit of T. */
6387 tree_int_cst_sign_bit (const_tree t)
6389 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
6391 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
6394 /* Return an indication of the sign of the integer constant T.
6395 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6396 Note that -1 will never be returned if T's type is unsigned. */
6399 tree_int_cst_sgn (const_tree t)
6401 if (wi::to_wide (t) == 0)
6402 return 0;
6403 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
6404 return 1;
6405 else if (wi::neg_p (wi::to_wide (t)))
6406 return -1;
6407 else
6408 return 1;
6411 /* Return the minimum number of bits needed to represent VALUE in a
6412 signed or unsigned type, UNSIGNEDP says which. */
6414 unsigned int
6415 tree_int_cst_min_precision (tree value, signop sgn)
6417 /* If the value is negative, compute its negative minus 1. The latter
6418 adjustment is because the absolute value of the largest negative value
6419 is one larger than the largest positive value. This is equivalent to
6420 a bit-wise negation, so use that operation instead. */
6422 if (tree_int_cst_sgn (value) < 0)
6423 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
6425 /* Return the number of bits needed, taking into account the fact
6426 that we need one more bit for a signed than unsigned type.
6427 If value is 0 or -1, the minimum precision is 1 no matter
6428 whether unsignedp is true or false. */
6430 if (integer_zerop (value))
6431 return 1;
6432 else
6433 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
6436 /* Return truthvalue of whether T1 is the same tree structure as T2.
6437 Return 1 if they are the same.
6438 Return 0 if they are understandably different.
6439 Return -1 if either contains tree structure not understood by
6440 this function. */
6443 simple_cst_equal (const_tree t1, const_tree t2)
6445 enum tree_code code1, code2;
6446 int cmp;
6447 int i;
6449 if (t1 == t2)
6450 return 1;
6451 if (t1 == 0 || t2 == 0)
6452 return 0;
6454 /* For location wrappers to be the same, they must be at the same
6455 source location (and wrap the same thing). */
6456 if (location_wrapper_p (t1) && location_wrapper_p (t2))
6458 if (EXPR_LOCATION (t1) != EXPR_LOCATION (t2))
6459 return 0;
6460 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6463 code1 = TREE_CODE (t1);
6464 code2 = TREE_CODE (t2);
6466 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
6468 if (CONVERT_EXPR_CODE_P (code2)
6469 || code2 == NON_LVALUE_EXPR)
6470 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6471 else
6472 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
6475 else if (CONVERT_EXPR_CODE_P (code2)
6476 || code2 == NON_LVALUE_EXPR)
6477 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
6479 if (code1 != code2)
6480 return 0;
6482 switch (code1)
6484 case INTEGER_CST:
6485 return wi::to_widest (t1) == wi::to_widest (t2);
6487 case REAL_CST:
6488 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
6490 case FIXED_CST:
6491 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
6493 case STRING_CST:
6494 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
6495 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
6496 TREE_STRING_LENGTH (t1)));
6498 case CONSTRUCTOR:
6500 unsigned HOST_WIDE_INT idx;
6501 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
6502 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
6504 if (vec_safe_length (v1) != vec_safe_length (v2))
6505 return false;
6507 for (idx = 0; idx < vec_safe_length (v1); ++idx)
6508 /* ??? Should we handle also fields here? */
6509 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
6510 return false;
6511 return true;
6514 case SAVE_EXPR:
6515 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6517 case CALL_EXPR:
6518 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
6519 if (cmp <= 0)
6520 return cmp;
6521 if (call_expr_nargs (t1) != call_expr_nargs (t2))
6522 return 0;
6524 const_tree arg1, arg2;
6525 const_call_expr_arg_iterator iter1, iter2;
6526 for (arg1 = first_const_call_expr_arg (t1, &iter1),
6527 arg2 = first_const_call_expr_arg (t2, &iter2);
6528 arg1 && arg2;
6529 arg1 = next_const_call_expr_arg (&iter1),
6530 arg2 = next_const_call_expr_arg (&iter2))
6532 cmp = simple_cst_equal (arg1, arg2);
6533 if (cmp <= 0)
6534 return cmp;
6536 return arg1 == arg2;
6539 case TARGET_EXPR:
6540 /* Special case: if either target is an unallocated VAR_DECL,
6541 it means that it's going to be unified with whatever the
6542 TARGET_EXPR is really supposed to initialize, so treat it
6543 as being equivalent to anything. */
6544 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
6545 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
6546 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
6547 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
6548 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
6549 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
6550 cmp = 1;
6551 else
6552 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6554 if (cmp <= 0)
6555 return cmp;
6557 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
6559 case WITH_CLEANUP_EXPR:
6560 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6561 if (cmp <= 0)
6562 return cmp;
6564 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
6566 case COMPONENT_REF:
6567 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
6568 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6570 return 0;
6572 case VAR_DECL:
6573 case PARM_DECL:
6574 case CONST_DECL:
6575 case FUNCTION_DECL:
6576 return 0;
6578 default:
6579 if (POLY_INT_CST_P (t1))
6580 /* A false return means maybe_ne rather than known_ne. */
6581 return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
6582 TYPE_SIGN (TREE_TYPE (t1))),
6583 poly_widest_int::from (poly_int_cst_value (t2),
6584 TYPE_SIGN (TREE_TYPE (t2))));
6585 break;
6588 /* This general rule works for most tree codes. All exceptions should be
6589 handled above. If this is a language-specific tree code, we can't
6590 trust what might be in the operand, so say we don't know
6591 the situation. */
6592 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
6593 return -1;
6595 switch (TREE_CODE_CLASS (code1))
6597 case tcc_unary:
6598 case tcc_binary:
6599 case tcc_comparison:
6600 case tcc_expression:
6601 case tcc_reference:
6602 case tcc_statement:
6603 cmp = 1;
6604 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
6606 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
6607 if (cmp <= 0)
6608 return cmp;
6611 return cmp;
6613 default:
6614 return -1;
6618 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
6619 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
6620 than U, respectively. */
6623 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
6625 if (tree_int_cst_sgn (t) < 0)
6626 return -1;
6627 else if (!tree_fits_uhwi_p (t))
6628 return 1;
6629 else if (TREE_INT_CST_LOW (t) == u)
6630 return 0;
6631 else if (TREE_INT_CST_LOW (t) < u)
6632 return -1;
6633 else
6634 return 1;
6637 /* Return true if SIZE represents a constant size that is in bounds of
6638 what the middle-end and the backend accepts (covering not more than
6639 half of the address-space).
6640 When PERR is non-null, set *PERR on failure to the description of
6641 why SIZE is not valid. */
6643 bool
6644 valid_constant_size_p (const_tree size, cst_size_error *perr /* = NULL */)
6646 if (POLY_INT_CST_P (size))
6648 if (TREE_OVERFLOW (size))
6649 return false;
6650 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
6651 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
6652 return false;
6653 return true;
6656 cst_size_error error;
6657 if (!perr)
6658 perr = &error;
6660 if (TREE_CODE (size) != INTEGER_CST)
6662 *perr = cst_size_not_constant;
6663 return false;
6666 if (TREE_OVERFLOW_P (size))
6668 *perr = cst_size_overflow;
6669 return false;
6672 if (tree_int_cst_sgn (size) < 0)
6674 *perr = cst_size_negative;
6675 return false;
6677 if (!tree_fits_uhwi_p (size)
6678 || (wi::to_widest (TYPE_MAX_VALUE (sizetype))
6679 < wi::to_widest (size) * 2))
6681 *perr = cst_size_too_big;
6682 return false;
6685 return true;
6688 /* Return the precision of the type, or for a complex or vector type the
6689 precision of the type of its elements. */
6691 unsigned int
6692 element_precision (const_tree type)
6694 if (!TYPE_P (type))
6695 type = TREE_TYPE (type);
6696 enum tree_code code = TREE_CODE (type);
6697 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
6698 type = TREE_TYPE (type);
6700 return TYPE_PRECISION (type);
6703 /* Return true if CODE represents an associative tree code. Otherwise
6704 return false. */
6705 bool
6706 associative_tree_code (enum tree_code code)
6708 switch (code)
6710 case BIT_IOR_EXPR:
6711 case BIT_AND_EXPR:
6712 case BIT_XOR_EXPR:
6713 case PLUS_EXPR:
6714 case MULT_EXPR:
6715 case MIN_EXPR:
6716 case MAX_EXPR:
6717 return true;
6719 default:
6720 break;
6722 return false;
6725 /* Return true if CODE represents a commutative tree code. Otherwise
6726 return false. */
6727 bool
6728 commutative_tree_code (enum tree_code code)
6730 switch (code)
6732 case PLUS_EXPR:
6733 case MULT_EXPR:
6734 case MULT_HIGHPART_EXPR:
6735 case MIN_EXPR:
6736 case MAX_EXPR:
6737 case BIT_IOR_EXPR:
6738 case BIT_XOR_EXPR:
6739 case BIT_AND_EXPR:
6740 case NE_EXPR:
6741 case EQ_EXPR:
6742 case UNORDERED_EXPR:
6743 case ORDERED_EXPR:
6744 case UNEQ_EXPR:
6745 case LTGT_EXPR:
6746 case TRUTH_AND_EXPR:
6747 case TRUTH_XOR_EXPR:
6748 case TRUTH_OR_EXPR:
6749 case WIDEN_MULT_EXPR:
6750 case VEC_WIDEN_MULT_HI_EXPR:
6751 case VEC_WIDEN_MULT_LO_EXPR:
6752 case VEC_WIDEN_MULT_EVEN_EXPR:
6753 case VEC_WIDEN_MULT_ODD_EXPR:
6754 return true;
6756 default:
6757 break;
6759 return false;
6762 /* Return true if CODE represents a ternary tree code for which the
6763 first two operands are commutative. Otherwise return false. */
6764 bool
6765 commutative_ternary_tree_code (enum tree_code code)
6767 switch (code)
6769 case WIDEN_MULT_PLUS_EXPR:
6770 case WIDEN_MULT_MINUS_EXPR:
6771 case DOT_PROD_EXPR:
6772 return true;
6774 default:
6775 break;
6777 return false;
6780 /* Returns true if CODE can overflow. */
6782 bool
6783 operation_can_overflow (enum tree_code code)
6785 switch (code)
6787 case PLUS_EXPR:
6788 case MINUS_EXPR:
6789 case MULT_EXPR:
6790 case LSHIFT_EXPR:
6791 /* Can overflow in various ways. */
6792 return true;
6793 case TRUNC_DIV_EXPR:
6794 case EXACT_DIV_EXPR:
6795 case FLOOR_DIV_EXPR:
6796 case CEIL_DIV_EXPR:
6797 /* For INT_MIN / -1. */
6798 return true;
6799 case NEGATE_EXPR:
6800 case ABS_EXPR:
6801 /* For -INT_MIN. */
6802 return true;
6803 default:
6804 /* These operators cannot overflow. */
6805 return false;
6809 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
6810 ftrapv doesn't generate trapping insns for CODE. */
6812 bool
6813 operation_no_trapping_overflow (tree type, enum tree_code code)
6815 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
6817 /* We don't generate instructions that trap on overflow for complex or vector
6818 types. */
6819 if (!INTEGRAL_TYPE_P (type))
6820 return true;
6822 if (!TYPE_OVERFLOW_TRAPS (type))
6823 return true;
6825 switch (code)
6827 case PLUS_EXPR:
6828 case MINUS_EXPR:
6829 case MULT_EXPR:
6830 case NEGATE_EXPR:
6831 case ABS_EXPR:
6832 /* These operators can overflow, and -ftrapv generates trapping code for
6833 these. */
6834 return false;
6835 case TRUNC_DIV_EXPR:
6836 case EXACT_DIV_EXPR:
6837 case FLOOR_DIV_EXPR:
6838 case CEIL_DIV_EXPR:
6839 case LSHIFT_EXPR:
6840 /* These operators can overflow, but -ftrapv does not generate trapping
6841 code for these. */
6842 return true;
6843 default:
6844 /* These operators cannot overflow. */
6845 return true;
6849 /* Constructors for pointer, array and function types.
6850 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
6851 constructed by language-dependent code, not here.) */
6853 /* Construct, lay out and return the type of pointers to TO_TYPE with
6854 mode MODE. If MODE is VOIDmode, a pointer mode for the address
6855 space of TO_TYPE will be picked. If CAN_ALIAS_ALL is TRUE,
6856 indicate this type can reference all of memory. If such a type has
6857 already been constructed, reuse it. */
6859 tree
6860 build_pointer_type_for_mode (tree to_type, machine_mode mode,
6861 bool can_alias_all)
6863 tree t;
6864 bool could_alias = can_alias_all;
6866 if (to_type == error_mark_node)
6867 return error_mark_node;
6869 if (mode == VOIDmode)
6871 addr_space_t as = TYPE_ADDR_SPACE (to_type);
6872 mode = targetm.addr_space.pointer_mode (as);
6875 /* If the pointed-to type has the may_alias attribute set, force
6876 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6877 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
6878 can_alias_all = true;
6880 /* In some cases, languages will have things that aren't a POINTER_TYPE
6881 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
6882 In that case, return that type without regard to the rest of our
6883 operands.
6885 ??? This is a kludge, but consistent with the way this function has
6886 always operated and there doesn't seem to be a good way to avoid this
6887 at the moment. */
6888 if (TYPE_POINTER_TO (to_type) != 0
6889 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
6890 return TYPE_POINTER_TO (to_type);
6892 /* First, if we already have a type for pointers to TO_TYPE and it's
6893 the proper mode, use it. */
6894 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
6895 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
6896 return t;
6898 t = make_node (POINTER_TYPE);
6900 TREE_TYPE (t) = to_type;
6901 SET_TYPE_MODE (t, mode);
6902 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
6903 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
6904 TYPE_POINTER_TO (to_type) = t;
6906 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6907 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
6908 SET_TYPE_STRUCTURAL_EQUALITY (t);
6909 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
6910 TYPE_CANONICAL (t)
6911 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
6912 mode, false);
6914 /* Lay out the type. This function has many callers that are concerned
6915 with expression-construction, and this simplifies them all. */
6916 layout_type (t);
6918 return t;
6921 /* By default build pointers in ptr_mode. */
6923 tree
6924 build_pointer_type (tree to_type)
6926 return build_pointer_type_for_mode (to_type, VOIDmode, false);
6929 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
6931 tree
6932 build_reference_type_for_mode (tree to_type, machine_mode mode,
6933 bool can_alias_all)
6935 tree t;
6936 bool could_alias = can_alias_all;
6938 if (to_type == error_mark_node)
6939 return error_mark_node;
6941 if (mode == VOIDmode)
6943 addr_space_t as = TYPE_ADDR_SPACE (to_type);
6944 mode = targetm.addr_space.pointer_mode (as);
6947 /* If the pointed-to type has the may_alias attribute set, force
6948 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6949 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
6950 can_alias_all = true;
6952 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
6953 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
6954 In that case, return that type without regard to the rest of our
6955 operands.
6957 ??? This is a kludge, but consistent with the way this function has
6958 always operated and there doesn't seem to be a good way to avoid this
6959 at the moment. */
6960 if (TYPE_REFERENCE_TO (to_type) != 0
6961 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
6962 return TYPE_REFERENCE_TO (to_type);
6964 /* First, if we already have a type for pointers to TO_TYPE and it's
6965 the proper mode, use it. */
6966 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
6967 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
6968 return t;
6970 t = make_node (REFERENCE_TYPE);
6972 TREE_TYPE (t) = to_type;
6973 SET_TYPE_MODE (t, mode);
6974 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
6975 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
6976 TYPE_REFERENCE_TO (to_type) = t;
6978 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6979 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
6980 SET_TYPE_STRUCTURAL_EQUALITY (t);
6981 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
6982 TYPE_CANONICAL (t)
6983 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
6984 mode, false);
6986 layout_type (t);
6988 return t;
6992 /* Build the node for the type of references-to-TO_TYPE by default
6993 in ptr_mode. */
6995 tree
6996 build_reference_type (tree to_type)
6998 return build_reference_type_for_mode (to_type, VOIDmode, false);
7001 #define MAX_INT_CACHED_PREC \
7002 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7003 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7005 static void
7006 clear_nonstandard_integer_type_cache (void)
7008 for (size_t i = 0 ; i < 2 * MAX_INT_CACHED_PREC + 2 ; i++)
7010 nonstandard_integer_type_cache[i] = NULL;
7014 /* Builds a signed or unsigned integer type of precision PRECISION.
7015 Used for C bitfields whose precision does not match that of
7016 built-in target types. */
7017 tree
7018 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7019 int unsignedp)
7021 tree itype, ret;
7023 if (unsignedp)
7024 unsignedp = MAX_INT_CACHED_PREC + 1;
7026 if (precision <= MAX_INT_CACHED_PREC)
7028 itype = nonstandard_integer_type_cache[precision + unsignedp];
7029 if (itype)
7030 return itype;
7033 itype = make_node (INTEGER_TYPE);
7034 TYPE_PRECISION (itype) = precision;
7036 if (unsignedp)
7037 fixup_unsigned_type (itype);
7038 else
7039 fixup_signed_type (itype);
7041 inchash::hash hstate;
7042 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7043 ret = type_hash_canon (hstate.end (), itype);
7044 if (precision <= MAX_INT_CACHED_PREC)
7045 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7047 return ret;
7050 #define MAX_BOOL_CACHED_PREC \
7051 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7052 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
7054 /* Builds a boolean type of precision PRECISION.
7055 Used for boolean vectors to choose proper vector element size. */
7056 tree
7057 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
7059 tree type;
7061 if (precision <= MAX_BOOL_CACHED_PREC)
7063 type = nonstandard_boolean_type_cache[precision];
7064 if (type)
7065 return type;
7068 type = make_node (BOOLEAN_TYPE);
7069 TYPE_PRECISION (type) = precision;
7070 fixup_signed_type (type);
7072 if (precision <= MAX_INT_CACHED_PREC)
7073 nonstandard_boolean_type_cache[precision] = type;
7075 return type;
7078 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7079 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7080 is true, reuse such a type that has already been constructed. */
7082 static tree
7083 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7085 tree itype = make_node (INTEGER_TYPE);
7087 TREE_TYPE (itype) = type;
7089 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7090 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7092 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7093 SET_TYPE_MODE (itype, TYPE_MODE (type));
7094 TYPE_SIZE (itype) = TYPE_SIZE (type);
7095 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7096 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
7097 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7098 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
7100 if (!shared)
7101 return itype;
7103 if ((TYPE_MIN_VALUE (itype)
7104 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7105 || (TYPE_MAX_VALUE (itype)
7106 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7108 /* Since we cannot reliably merge this type, we need to compare it using
7109 structural equality checks. */
7110 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7111 return itype;
7114 hashval_t hash = type_hash_canon_hash (itype);
7115 itype = type_hash_canon (hash, itype);
7117 return itype;
7120 /* Wrapper around build_range_type_1 with SHARED set to true. */
7122 tree
7123 build_range_type (tree type, tree lowval, tree highval)
7125 return build_range_type_1 (type, lowval, highval, true);
7128 /* Wrapper around build_range_type_1 with SHARED set to false. */
7130 tree
7131 build_nonshared_range_type (tree type, tree lowval, tree highval)
7133 return build_range_type_1 (type, lowval, highval, false);
7136 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7137 MAXVAL should be the maximum value in the domain
7138 (one less than the length of the array).
7140 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7141 We don't enforce this limit, that is up to caller (e.g. language front end).
7142 The limit exists because the result is a signed type and we don't handle
7143 sizes that use more than one HOST_WIDE_INT. */
7145 tree
7146 build_index_type (tree maxval)
7148 return build_range_type (sizetype, size_zero_node, maxval);
7151 /* Return true if the debug information for TYPE, a subtype, should be emitted
7152 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7153 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7154 debug info and doesn't reflect the source code. */
7156 bool
7157 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7159 tree base_type = TREE_TYPE (type), low, high;
7161 /* Subrange types have a base type which is an integral type. */
7162 if (!INTEGRAL_TYPE_P (base_type))
7163 return false;
7165 /* Get the real bounds of the subtype. */
7166 if (lang_hooks.types.get_subrange_bounds)
7167 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7168 else
7170 low = TYPE_MIN_VALUE (type);
7171 high = TYPE_MAX_VALUE (type);
7174 /* If the type and its base type have the same representation and the same
7175 name, then the type is not a subrange but a copy of the base type. */
7176 if ((TREE_CODE (base_type) == INTEGER_TYPE
7177 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7178 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7179 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7180 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7181 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7182 return false;
7184 if (lowval)
7185 *lowval = low;
7186 if (highval)
7187 *highval = high;
7188 return true;
7191 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7192 and number of elements specified by the range of values of INDEX_TYPE.
7193 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7194 If SHARED is true, reuse such a type that has already been constructed.
7195 If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type. */
7197 tree
7198 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
7199 bool shared, bool set_canonical)
7201 tree t;
7203 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7205 error ("arrays of functions are not meaningful");
7206 elt_type = integer_type_node;
7209 t = make_node (ARRAY_TYPE);
7210 TREE_TYPE (t) = elt_type;
7211 TYPE_DOMAIN (t) = index_type;
7212 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7213 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
7214 layout_type (t);
7216 if (shared)
7218 hashval_t hash = type_hash_canon_hash (t);
7219 t = type_hash_canon (hash, t);
7222 if (TYPE_CANONICAL (t) == t && set_canonical)
7224 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7225 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
7226 || in_lto_p)
7227 SET_TYPE_STRUCTURAL_EQUALITY (t);
7228 else if (TYPE_CANONICAL (elt_type) != elt_type
7229 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7230 TYPE_CANONICAL (t)
7231 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7232 index_type
7233 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7234 typeless_storage, shared, set_canonical);
7237 return t;
7240 /* Wrapper around build_array_type_1 with SHARED set to true. */
7242 tree
7243 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
7245 return
7246 build_array_type_1 (elt_type, index_type, typeless_storage, true, true);
7249 /* Wrapper around build_array_type_1 with SHARED set to false. */
7251 tree
7252 build_nonshared_array_type (tree elt_type, tree index_type)
7254 return build_array_type_1 (elt_type, index_type, false, false, true);
7257 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7258 sizetype. */
7260 tree
7261 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
7263 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7266 /* Recursively examines the array elements of TYPE, until a non-array
7267 element type is found. */
7269 tree
7270 strip_array_types (tree type)
7272 while (TREE_CODE (type) == ARRAY_TYPE)
7273 type = TREE_TYPE (type);
7275 return type;
7278 /* Computes the canonical argument types from the argument type list
7279 ARGTYPES.
7281 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7282 on entry to this function, or if any of the ARGTYPES are
7283 structural.
7285 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7286 true on entry to this function, or if any of the ARGTYPES are
7287 non-canonical.
7289 Returns a canonical argument list, which may be ARGTYPES when the
7290 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7291 true) or would not differ from ARGTYPES. */
7293 static tree
7294 maybe_canonicalize_argtypes (tree argtypes,
7295 bool *any_structural_p,
7296 bool *any_noncanonical_p)
7298 tree arg;
7299 bool any_noncanonical_argtypes_p = false;
7301 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7303 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7304 /* Fail gracefully by stating that the type is structural. */
7305 *any_structural_p = true;
7306 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7307 *any_structural_p = true;
7308 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7309 || TREE_PURPOSE (arg))
7310 /* If the argument has a default argument, we consider it
7311 non-canonical even though the type itself is canonical.
7312 That way, different variants of function and method types
7313 with default arguments will all point to the variant with
7314 no defaults as their canonical type. */
7315 any_noncanonical_argtypes_p = true;
7318 if (*any_structural_p)
7319 return argtypes;
7321 if (any_noncanonical_argtypes_p)
7323 /* Build the canonical list of argument types. */
7324 tree canon_argtypes = NULL_TREE;
7325 bool is_void = false;
7327 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7329 if (arg == void_list_node)
7330 is_void = true;
7331 else
7332 canon_argtypes = tree_cons (NULL_TREE,
7333 TYPE_CANONICAL (TREE_VALUE (arg)),
7334 canon_argtypes);
7337 canon_argtypes = nreverse (canon_argtypes);
7338 if (is_void)
7339 canon_argtypes = chainon (canon_argtypes, void_list_node);
7341 /* There is a non-canonical type. */
7342 *any_noncanonical_p = true;
7343 return canon_argtypes;
7346 /* The canonical argument types are the same as ARGTYPES. */
7347 return argtypes;
7350 /* Construct, lay out and return
7351 the type of functions returning type VALUE_TYPE
7352 given arguments of types ARG_TYPES.
7353 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7354 are data type nodes for the arguments of the function.
7355 If such a type has already been constructed, reuse it. */
7357 tree
7358 build_function_type (tree value_type, tree arg_types)
7360 tree t;
7361 inchash::hash hstate;
7362 bool any_structural_p, any_noncanonical_p;
7363 tree canon_argtypes;
7365 gcc_assert (arg_types != error_mark_node);
7367 if (TREE_CODE (value_type) == FUNCTION_TYPE)
7369 error ("function return type cannot be function");
7370 value_type = integer_type_node;
7373 /* Make a node of the sort we want. */
7374 t = make_node (FUNCTION_TYPE);
7375 TREE_TYPE (t) = value_type;
7376 TYPE_ARG_TYPES (t) = arg_types;
7378 /* If we already have such a type, use the old one. */
7379 hashval_t hash = type_hash_canon_hash (t);
7380 t = type_hash_canon (hash, t);
7382 /* Set up the canonical type. */
7383 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
7384 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
7385 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
7386 &any_structural_p,
7387 &any_noncanonical_p);
7388 if (any_structural_p)
7389 SET_TYPE_STRUCTURAL_EQUALITY (t);
7390 else if (any_noncanonical_p)
7391 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
7392 canon_argtypes);
7394 if (!COMPLETE_TYPE_P (t))
7395 layout_type (t);
7396 return t;
7399 /* Build a function type. The RETURN_TYPE is the type returned by the
7400 function. If VAARGS is set, no void_type_node is appended to the
7401 list. ARGP must be always be terminated be a NULL_TREE. */
7403 static tree
7404 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
7406 tree t, args, last;
7408 t = va_arg (argp, tree);
7409 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
7410 args = tree_cons (NULL_TREE, t, args);
7412 if (vaargs)
7414 last = args;
7415 if (args != NULL_TREE)
7416 args = nreverse (args);
7417 gcc_assert (last != void_list_node);
7419 else if (args == NULL_TREE)
7420 args = void_list_node;
7421 else
7423 last = args;
7424 args = nreverse (args);
7425 TREE_CHAIN (last) = void_list_node;
7427 args = build_function_type (return_type, args);
7429 return args;
7432 /* Build a function type. The RETURN_TYPE is the type returned by the
7433 function. If additional arguments are provided, they are
7434 additional argument types. The list of argument types must always
7435 be terminated by NULL_TREE. */
7437 tree
7438 build_function_type_list (tree return_type, ...)
7440 tree args;
7441 va_list p;
7443 va_start (p, return_type);
7444 args = build_function_type_list_1 (false, return_type, p);
7445 va_end (p);
7446 return args;
7449 /* Build a variable argument function type. The RETURN_TYPE is the
7450 type returned by the function. If additional arguments are provided,
7451 they are additional argument types. The list of argument types must
7452 always be terminated by NULL_TREE. */
7454 tree
7455 build_varargs_function_type_list (tree return_type, ...)
7457 tree args;
7458 va_list p;
7460 va_start (p, return_type);
7461 args = build_function_type_list_1 (true, return_type, p);
7462 va_end (p);
7464 return args;
7467 /* Build a function type. RETURN_TYPE is the type returned by the
7468 function; VAARGS indicates whether the function takes varargs. The
7469 function takes N named arguments, the types of which are provided in
7470 ARG_TYPES. */
7472 static tree
7473 build_function_type_array_1 (bool vaargs, tree return_type, int n,
7474 tree *arg_types)
7476 int i;
7477 tree t = vaargs ? NULL_TREE : void_list_node;
7479 for (i = n - 1; i >= 0; i--)
7480 t = tree_cons (NULL_TREE, arg_types[i], t);
7482 return build_function_type (return_type, t);
7485 /* Build a function type. RETURN_TYPE is the type returned by the
7486 function. The function takes N named arguments, the types of which
7487 are provided in ARG_TYPES. */
7489 tree
7490 build_function_type_array (tree return_type, int n, tree *arg_types)
7492 return build_function_type_array_1 (false, return_type, n, arg_types);
7495 /* Build a variable argument function type. RETURN_TYPE is the type
7496 returned by the function. The function takes N named arguments, the
7497 types of which are provided in ARG_TYPES. */
7499 tree
7500 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
7502 return build_function_type_array_1 (true, return_type, n, arg_types);
7505 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
7506 and ARGTYPES (a TREE_LIST) are the return type and arguments types
7507 for the method. An implicit additional parameter (of type
7508 pointer-to-BASETYPE) is added to the ARGTYPES. */
7510 tree
7511 build_method_type_directly (tree basetype,
7512 tree rettype,
7513 tree argtypes)
7515 tree t;
7516 tree ptype;
7517 bool any_structural_p, any_noncanonical_p;
7518 tree canon_argtypes;
7520 /* Make a node of the sort we want. */
7521 t = make_node (METHOD_TYPE);
7523 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7524 TREE_TYPE (t) = rettype;
7525 ptype = build_pointer_type (basetype);
7527 /* The actual arglist for this function includes a "hidden" argument
7528 which is "this". Put it into the list of argument types. */
7529 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
7530 TYPE_ARG_TYPES (t) = argtypes;
7532 /* If we already have such a type, use the old one. */
7533 hashval_t hash = type_hash_canon_hash (t);
7534 t = type_hash_canon (hash, t);
7536 /* Set up the canonical type. */
7537 any_structural_p
7538 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7539 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
7540 any_noncanonical_p
7541 = (TYPE_CANONICAL (basetype) != basetype
7542 || TYPE_CANONICAL (rettype) != rettype);
7543 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
7544 &any_structural_p,
7545 &any_noncanonical_p);
7546 if (any_structural_p)
7547 SET_TYPE_STRUCTURAL_EQUALITY (t);
7548 else if (any_noncanonical_p)
7549 TYPE_CANONICAL (t)
7550 = build_method_type_directly (TYPE_CANONICAL (basetype),
7551 TYPE_CANONICAL (rettype),
7552 canon_argtypes);
7553 if (!COMPLETE_TYPE_P (t))
7554 layout_type (t);
7556 return t;
7559 /* Construct, lay out and return the type of methods belonging to class
7560 BASETYPE and whose arguments and values are described by TYPE.
7561 If that type exists already, reuse it.
7562 TYPE must be a FUNCTION_TYPE node. */
7564 tree
7565 build_method_type (tree basetype, tree type)
7567 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
7569 return build_method_type_directly (basetype,
7570 TREE_TYPE (type),
7571 TYPE_ARG_TYPES (type));
7574 /* Construct, lay out and return the type of offsets to a value
7575 of type TYPE, within an object of type BASETYPE.
7576 If a suitable offset type exists already, reuse it. */
7578 tree
7579 build_offset_type (tree basetype, tree type)
7581 tree t;
7583 /* Make a node of the sort we want. */
7584 t = make_node (OFFSET_TYPE);
7586 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7587 TREE_TYPE (t) = type;
7589 /* If we already have such a type, use the old one. */
7590 hashval_t hash = type_hash_canon_hash (t);
7591 t = type_hash_canon (hash, t);
7593 if (!COMPLETE_TYPE_P (t))
7594 layout_type (t);
7596 if (TYPE_CANONICAL (t) == t)
7598 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7599 || TYPE_STRUCTURAL_EQUALITY_P (type))
7600 SET_TYPE_STRUCTURAL_EQUALITY (t);
7601 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
7602 || TYPE_CANONICAL (type) != type)
7603 TYPE_CANONICAL (t)
7604 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
7605 TYPE_CANONICAL (type));
7608 return t;
7611 /* Create a complex type whose components are COMPONENT_TYPE.
7613 If NAMED is true, the type is given a TYPE_NAME. We do not always
7614 do so because this creates a DECL node and thus make the DECL_UIDs
7615 dependent on the type canonicalization hashtable, which is GC-ed,
7616 so the DECL_UIDs would not be stable wrt garbage collection. */
7618 tree
7619 build_complex_type (tree component_type, bool named)
7621 gcc_assert (INTEGRAL_TYPE_P (component_type)
7622 || SCALAR_FLOAT_TYPE_P (component_type)
7623 || FIXED_POINT_TYPE_P (component_type));
7625 /* Make a node of the sort we want. */
7626 tree probe = make_node (COMPLEX_TYPE);
7628 TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
7630 /* If we already have such a type, use the old one. */
7631 hashval_t hash = type_hash_canon_hash (probe);
7632 tree t = type_hash_canon (hash, probe);
7634 if (t == probe)
7636 /* We created a new type. The hash insertion will have laid
7637 out the type. We need to check the canonicalization and
7638 maybe set the name. */
7639 gcc_checking_assert (COMPLETE_TYPE_P (t)
7640 && !TYPE_NAME (t)
7641 && TYPE_CANONICAL (t) == t);
7643 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
7644 SET_TYPE_STRUCTURAL_EQUALITY (t);
7645 else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
7646 TYPE_CANONICAL (t)
7647 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
7649 /* We need to create a name, since complex is a fundamental type. */
7650 if (named)
7652 const char *name = NULL;
7654 if (TREE_TYPE (t) == char_type_node)
7655 name = "complex char";
7656 else if (TREE_TYPE (t) == signed_char_type_node)
7657 name = "complex signed char";
7658 else if (TREE_TYPE (t) == unsigned_char_type_node)
7659 name = "complex unsigned char";
7660 else if (TREE_TYPE (t) == short_integer_type_node)
7661 name = "complex short int";
7662 else if (TREE_TYPE (t) == short_unsigned_type_node)
7663 name = "complex short unsigned int";
7664 else if (TREE_TYPE (t) == integer_type_node)
7665 name = "complex int";
7666 else if (TREE_TYPE (t) == unsigned_type_node)
7667 name = "complex unsigned int";
7668 else if (TREE_TYPE (t) == long_integer_type_node)
7669 name = "complex long int";
7670 else if (TREE_TYPE (t) == long_unsigned_type_node)
7671 name = "complex long unsigned int";
7672 else if (TREE_TYPE (t) == long_long_integer_type_node)
7673 name = "complex long long int";
7674 else if (TREE_TYPE (t) == long_long_unsigned_type_node)
7675 name = "complex long long unsigned int";
7677 if (name != NULL)
7678 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
7679 get_identifier (name), t);
7683 return build_qualified_type (t, TYPE_QUALS (component_type));
7686 /* If TYPE is a real or complex floating-point type and the target
7687 does not directly support arithmetic on TYPE then return the wider
7688 type to be used for arithmetic on TYPE. Otherwise, return
7689 NULL_TREE. */
7691 tree
7692 excess_precision_type (tree type)
7694 /* The target can give two different responses to the question of
7695 which excess precision mode it would like depending on whether we
7696 are in -fexcess-precision=standard or -fexcess-precision=fast. */
7698 enum excess_precision_type requested_type
7699 = (flag_excess_precision == EXCESS_PRECISION_FAST
7700 ? EXCESS_PRECISION_TYPE_FAST
7701 : (flag_excess_precision == EXCESS_PRECISION_FLOAT16
7702 ? EXCESS_PRECISION_TYPE_FLOAT16 :EXCESS_PRECISION_TYPE_STANDARD));
7704 enum flt_eval_method target_flt_eval_method
7705 = targetm.c.excess_precision (requested_type);
7707 /* The target should not ask for unpredictable float evaluation (though
7708 it might advertise that implicitly the evaluation is unpredictable,
7709 but we don't care about that here, it will have been reported
7710 elsewhere). If it does ask for unpredictable evaluation, we have
7711 nothing to do here. */
7712 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
7714 /* Nothing to do. The target has asked for all types we know about
7715 to be computed with their native precision and range. */
7716 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
7717 return NULL_TREE;
7719 /* The target will promote this type in a target-dependent way, so excess
7720 precision ought to leave it alone. */
7721 if (targetm.promoted_type (type) != NULL_TREE)
7722 return NULL_TREE;
7724 machine_mode float16_type_mode = (float16_type_node
7725 ? TYPE_MODE (float16_type_node)
7726 : VOIDmode);
7727 machine_mode float_type_mode = TYPE_MODE (float_type_node);
7728 machine_mode double_type_mode = TYPE_MODE (double_type_node);
7730 switch (TREE_CODE (type))
7732 case REAL_TYPE:
7734 machine_mode type_mode = TYPE_MODE (type);
7735 switch (target_flt_eval_method)
7737 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7738 if (type_mode == float16_type_mode)
7739 return float_type_node;
7740 break;
7741 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7742 if (type_mode == float16_type_mode
7743 || type_mode == float_type_mode)
7744 return double_type_node;
7745 break;
7746 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7747 if (type_mode == float16_type_mode
7748 || type_mode == float_type_mode
7749 || type_mode == double_type_mode)
7750 return long_double_type_node;
7751 break;
7752 default:
7753 gcc_unreachable ();
7755 break;
7757 case COMPLEX_TYPE:
7759 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
7760 return NULL_TREE;
7761 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
7762 switch (target_flt_eval_method)
7764 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7765 if (type_mode == float16_type_mode)
7766 return complex_float_type_node;
7767 break;
7768 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7769 if (type_mode == float16_type_mode
7770 || type_mode == float_type_mode)
7771 return complex_double_type_node;
7772 break;
7773 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7774 if (type_mode == float16_type_mode
7775 || type_mode == float_type_mode
7776 || type_mode == double_type_mode)
7777 return complex_long_double_type_node;
7778 break;
7779 default:
7780 gcc_unreachable ();
7782 break;
7784 default:
7785 break;
7788 return NULL_TREE;
7791 /* Return OP, stripped of any conversions to wider types as much as is safe.
7792 Converting the value back to OP's type makes a value equivalent to OP.
7794 If FOR_TYPE is nonzero, we return a value which, if converted to
7795 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
7797 OP must have integer, real or enumeral type. Pointers are not allowed!
7799 There are some cases where the obvious value we could return
7800 would regenerate to OP if converted to OP's type,
7801 but would not extend like OP to wider types.
7802 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
7803 For example, if OP is (unsigned short)(signed char)-1,
7804 we avoid returning (signed char)-1 if FOR_TYPE is int,
7805 even though extending that to an unsigned short would regenerate OP,
7806 since the result of extending (signed char)-1 to (int)
7807 is different from (int) OP. */
7809 tree
7810 get_unwidened (tree op, tree for_type)
7812 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
7813 tree type = TREE_TYPE (op);
7814 unsigned final_prec
7815 = TYPE_PRECISION (for_type != 0 ? for_type : type);
7816 int uns
7817 = (for_type != 0 && for_type != type
7818 && final_prec > TYPE_PRECISION (type)
7819 && TYPE_UNSIGNED (type));
7820 tree win = op;
7822 while (CONVERT_EXPR_P (op))
7824 int bitschange;
7826 /* TYPE_PRECISION on vector types has different meaning
7827 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
7828 so avoid them here. */
7829 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
7830 break;
7832 bitschange = TYPE_PRECISION (TREE_TYPE (op))
7833 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
7835 /* Truncations are many-one so cannot be removed.
7836 Unless we are later going to truncate down even farther. */
7837 if (bitschange < 0
7838 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
7839 break;
7841 /* See what's inside this conversion. If we decide to strip it,
7842 we will set WIN. */
7843 op = TREE_OPERAND (op, 0);
7845 /* If we have not stripped any zero-extensions (uns is 0),
7846 we can strip any kind of extension.
7847 If we have previously stripped a zero-extension,
7848 only zero-extensions can safely be stripped.
7849 Any extension can be stripped if the bits it would produce
7850 are all going to be discarded later by truncating to FOR_TYPE. */
7852 if (bitschange > 0)
7854 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
7855 win = op;
7856 /* TYPE_UNSIGNED says whether this is a zero-extension.
7857 Let's avoid computing it if it does not affect WIN
7858 and if UNS will not be needed again. */
7859 if ((uns
7860 || CONVERT_EXPR_P (op))
7861 && TYPE_UNSIGNED (TREE_TYPE (op)))
7863 uns = 1;
7864 win = op;
7869 /* If we finally reach a constant see if it fits in sth smaller and
7870 in that case convert it. */
7871 if (TREE_CODE (win) == INTEGER_CST)
7873 tree wtype = TREE_TYPE (win);
7874 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
7875 if (for_type)
7876 prec = MAX (prec, final_prec);
7877 if (prec < TYPE_PRECISION (wtype))
7879 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
7880 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
7881 win = fold_convert (t, win);
7885 return win;
7888 /* Return OP or a simpler expression for a narrower value
7889 which can be sign-extended or zero-extended to give back OP.
7890 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
7891 or 0 if the value should be sign-extended. */
7893 tree
7894 get_narrower (tree op, int *unsignedp_ptr)
7896 int uns = 0;
7897 int first = 1;
7898 tree win = op;
7899 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
7901 if (TREE_CODE (op) == COMPOUND_EXPR)
7904 op = TREE_OPERAND (op, 1);
7905 while (TREE_CODE (op) == COMPOUND_EXPR);
7906 tree ret = get_narrower (op, unsignedp_ptr);
7907 if (ret == op)
7908 return win;
7909 auto_vec <tree, 16> v;
7910 unsigned int i;
7911 for (op = win; TREE_CODE (op) == COMPOUND_EXPR;
7912 op = TREE_OPERAND (op, 1))
7913 v.safe_push (op);
7914 FOR_EACH_VEC_ELT_REVERSE (v, i, op)
7915 ret = build2_loc (EXPR_LOCATION (op), COMPOUND_EXPR,
7916 TREE_TYPE (ret), TREE_OPERAND (op, 0),
7917 ret);
7918 return ret;
7920 while (TREE_CODE (op) == NOP_EXPR)
7922 int bitschange
7923 = (TYPE_PRECISION (TREE_TYPE (op))
7924 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
7926 /* Truncations are many-one so cannot be removed. */
7927 if (bitschange < 0)
7928 break;
7930 /* See what's inside this conversion. If we decide to strip it,
7931 we will set WIN. */
7933 if (bitschange > 0)
7935 op = TREE_OPERAND (op, 0);
7936 /* An extension: the outermost one can be stripped,
7937 but remember whether it is zero or sign extension. */
7938 if (first)
7939 uns = TYPE_UNSIGNED (TREE_TYPE (op));
7940 /* Otherwise, if a sign extension has been stripped,
7941 only sign extensions can now be stripped;
7942 if a zero extension has been stripped, only zero-extensions. */
7943 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
7944 break;
7945 first = 0;
7947 else /* bitschange == 0 */
7949 /* A change in nominal type can always be stripped, but we must
7950 preserve the unsignedness. */
7951 if (first)
7952 uns = TYPE_UNSIGNED (TREE_TYPE (op));
7953 first = 0;
7954 op = TREE_OPERAND (op, 0);
7955 /* Keep trying to narrow, but don't assign op to win if it
7956 would turn an integral type into something else. */
7957 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
7958 continue;
7961 win = op;
7964 if (TREE_CODE (op) == COMPONENT_REF
7965 /* Since type_for_size always gives an integer type. */
7966 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
7967 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
7968 /* Ensure field is laid out already. */
7969 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
7970 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
7972 unsigned HOST_WIDE_INT innerprec
7973 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
7974 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
7975 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
7976 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
7978 /* We can get this structure field in a narrower type that fits it,
7979 but the resulting extension to its nominal type (a fullword type)
7980 must satisfy the same conditions as for other extensions.
7982 Do this only for fields that are aligned (not bit-fields),
7983 because when bit-field insns will be used there is no
7984 advantage in doing this. */
7986 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
7987 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
7988 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
7989 && type != 0)
7991 if (first)
7992 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
7993 win = fold_convert (type, op);
7997 *unsignedp_ptr = uns;
7998 return win;
8001 /* Return true if integer constant C has a value that is permissible
8002 for TYPE, an integral type. */
8004 bool
8005 int_fits_type_p (const_tree c, const_tree type)
8007 tree type_low_bound, type_high_bound;
8008 bool ok_for_low_bound, ok_for_high_bound;
8009 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8011 /* Non-standard boolean types can have arbitrary precision but various
8012 transformations assume that they can only take values 0 and +/-1. */
8013 if (TREE_CODE (type) == BOOLEAN_TYPE)
8014 return wi::fits_to_boolean_p (wi::to_wide (c), type);
8016 retry:
8017 type_low_bound = TYPE_MIN_VALUE (type);
8018 type_high_bound = TYPE_MAX_VALUE (type);
8020 /* If at least one bound of the type is a constant integer, we can check
8021 ourselves and maybe make a decision. If no such decision is possible, but
8022 this type is a subtype, try checking against that. Otherwise, use
8023 fits_to_tree_p, which checks against the precision.
8025 Compute the status for each possibly constant bound, and return if we see
8026 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8027 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8028 for "constant known to fit". */
8030 /* Check if c >= type_low_bound. */
8031 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8033 if (tree_int_cst_lt (c, type_low_bound))
8034 return false;
8035 ok_for_low_bound = true;
8037 else
8038 ok_for_low_bound = false;
8040 /* Check if c <= type_high_bound. */
8041 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8043 if (tree_int_cst_lt (type_high_bound, c))
8044 return false;
8045 ok_for_high_bound = true;
8047 else
8048 ok_for_high_bound = false;
8050 /* If the constant fits both bounds, the result is known. */
8051 if (ok_for_low_bound && ok_for_high_bound)
8052 return true;
8054 /* Perform some generic filtering which may allow making a decision
8055 even if the bounds are not constant. First, negative integers
8056 never fit in unsigned types, */
8057 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
8058 return false;
8060 /* Second, narrower types always fit in wider ones. */
8061 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8062 return true;
8064 /* Third, unsigned integers with top bit set never fit signed types. */
8065 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8067 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
8068 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8070 /* When a tree_cst is converted to a wide-int, the precision
8071 is taken from the type. However, if the precision of the
8072 mode underneath the type is smaller than that, it is
8073 possible that the value will not fit. The test below
8074 fails if any bit is set between the sign bit of the
8075 underlying mode and the top bit of the type. */
8076 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
8077 return false;
8079 else if (wi::neg_p (wi::to_wide (c)))
8080 return false;
8083 /* If we haven't been able to decide at this point, there nothing more we
8084 can check ourselves here. Look at the base type if we have one and it
8085 has the same precision. */
8086 if (TREE_CODE (type) == INTEGER_TYPE
8087 && TREE_TYPE (type) != 0
8088 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8090 type = TREE_TYPE (type);
8091 goto retry;
8094 /* Or to fits_to_tree_p, if nothing else. */
8095 return wi::fits_to_tree_p (wi::to_wide (c), type);
8098 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8099 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8100 represented (assuming two's-complement arithmetic) within the bit
8101 precision of the type are returned instead. */
8103 void
8104 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8106 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8107 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8108 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
8109 else
8111 if (TYPE_UNSIGNED (type))
8112 mpz_set_ui (min, 0);
8113 else
8115 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8116 wi::to_mpz (mn, min, SIGNED);
8120 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8121 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8122 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
8123 else
8125 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8126 wi::to_mpz (mn, max, TYPE_SIGN (type));
8130 /* Return true if VAR is an automatic variable. */
8132 bool
8133 auto_var_p (const_tree var)
8135 return ((((VAR_P (var) && ! DECL_EXTERNAL (var))
8136 || TREE_CODE (var) == PARM_DECL)
8137 && ! TREE_STATIC (var))
8138 || TREE_CODE (var) == RESULT_DECL);
8141 /* Return true if VAR is an automatic variable defined in function FN. */
8143 bool
8144 auto_var_in_fn_p (const_tree var, const_tree fn)
8146 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8147 && (auto_var_p (var)
8148 || TREE_CODE (var) == LABEL_DECL));
8151 /* Subprogram of following function. Called by walk_tree.
8153 Return *TP if it is an automatic variable or parameter of the
8154 function passed in as DATA. */
8156 static tree
8157 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8159 tree fn = (tree) data;
8161 if (TYPE_P (*tp))
8162 *walk_subtrees = 0;
8164 else if (DECL_P (*tp)
8165 && auto_var_in_fn_p (*tp, fn))
8166 return *tp;
8168 return NULL_TREE;
8171 /* Returns true if T is, contains, or refers to a type with variable
8172 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8173 arguments, but not the return type. If FN is nonzero, only return
8174 true if a modifier of the type or position of FN is a variable or
8175 parameter inside FN.
8177 This concept is more general than that of C99 'variably modified types':
8178 in C99, a struct type is never variably modified because a VLA may not
8179 appear as a structure member. However, in GNU C code like:
8181 struct S { int i[f()]; };
8183 is valid, and other languages may define similar constructs. */
8185 bool
8186 variably_modified_type_p (tree type, tree fn)
8188 tree t;
8190 /* Test if T is either variable (if FN is zero) or an expression containing
8191 a variable in FN. If TYPE isn't gimplified, return true also if
8192 gimplify_one_sizepos would gimplify the expression into a local
8193 variable. */
8194 #define RETURN_TRUE_IF_VAR(T) \
8195 do { tree _t = (T); \
8196 if (_t != NULL_TREE \
8197 && _t != error_mark_node \
8198 && !CONSTANT_CLASS_P (_t) \
8199 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8200 && (!fn \
8201 || (!TYPE_SIZES_GIMPLIFIED (type) \
8202 && (TREE_CODE (_t) != VAR_DECL \
8203 && !CONTAINS_PLACEHOLDER_P (_t))) \
8204 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8205 return true; } while (0)
8207 if (type == error_mark_node)
8208 return false;
8210 /* If TYPE itself has variable size, it is variably modified. */
8211 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8212 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8214 switch (TREE_CODE (type))
8216 case POINTER_TYPE:
8217 case REFERENCE_TYPE:
8218 case VECTOR_TYPE:
8219 /* Ada can have pointer types refering to themselves indirectly. */
8220 if (TREE_VISITED (type))
8221 return false;
8222 TREE_VISITED (type) = true;
8223 if (variably_modified_type_p (TREE_TYPE (type), fn))
8225 TREE_VISITED (type) = false;
8226 return true;
8228 TREE_VISITED (type) = false;
8229 break;
8231 case FUNCTION_TYPE:
8232 case METHOD_TYPE:
8233 /* If TYPE is a function type, it is variably modified if the
8234 return type is variably modified. */
8235 if (variably_modified_type_p (TREE_TYPE (type), fn))
8236 return true;
8237 break;
8239 case INTEGER_TYPE:
8240 case REAL_TYPE:
8241 case FIXED_POINT_TYPE:
8242 case ENUMERAL_TYPE:
8243 case BOOLEAN_TYPE:
8244 /* Scalar types are variably modified if their end points
8245 aren't constant. */
8246 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8247 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8248 break;
8250 case RECORD_TYPE:
8251 case UNION_TYPE:
8252 case QUAL_UNION_TYPE:
8253 /* We can't see if any of the fields are variably-modified by the
8254 definition we normally use, since that would produce infinite
8255 recursion via pointers. */
8256 /* This is variably modified if some field's type is. */
8257 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8258 if (TREE_CODE (t) == FIELD_DECL)
8260 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8261 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8262 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8264 /* If the type is a qualified union, then the DECL_QUALIFIER
8265 of fields can also be an expression containing a variable. */
8266 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8267 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8269 /* If the field is a qualified union, then it's only a container
8270 for what's inside so we look into it. That's necessary in LTO
8271 mode because the sizes of the field tested above have been set
8272 to PLACEHOLDER_EXPRs by free_lang_data. */
8273 if (TREE_CODE (TREE_TYPE (t)) == QUAL_UNION_TYPE
8274 && variably_modified_type_p (TREE_TYPE (t), fn))
8275 return true;
8277 break;
8279 case ARRAY_TYPE:
8280 /* Do not call ourselves to avoid infinite recursion. This is
8281 variably modified if the element type is. */
8282 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8283 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8284 break;
8286 default:
8287 break;
8290 /* The current language may have other cases to check, but in general,
8291 all other types are not variably modified. */
8292 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8294 #undef RETURN_TRUE_IF_VAR
8297 /* Given a DECL or TYPE, return the scope in which it was declared, or
8298 NULL_TREE if there is no containing scope. */
8300 tree
8301 get_containing_scope (const_tree t)
8303 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8306 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
8308 const_tree
8309 get_ultimate_context (const_tree decl)
8311 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
8313 if (TREE_CODE (decl) == BLOCK)
8314 decl = BLOCK_SUPERCONTEXT (decl);
8315 else
8316 decl = get_containing_scope (decl);
8318 return decl;
8321 /* Return the innermost context enclosing DECL that is
8322 a FUNCTION_DECL, or zero if none. */
8324 tree
8325 decl_function_context (const_tree decl)
8327 tree context;
8329 if (TREE_CODE (decl) == ERROR_MARK)
8330 return 0;
8332 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8333 where we look up the function at runtime. Such functions always take
8334 a first argument of type 'pointer to real context'.
8336 C++ should really be fixed to use DECL_CONTEXT for the real context,
8337 and use something else for the "virtual context". */
8338 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
8339 context
8340 = TYPE_MAIN_VARIANT
8341 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8342 else
8343 context = DECL_CONTEXT (decl);
8345 while (context && TREE_CODE (context) != FUNCTION_DECL)
8347 if (TREE_CODE (context) == BLOCK)
8348 context = BLOCK_SUPERCONTEXT (context);
8349 else
8350 context = get_containing_scope (context);
8353 return context;
8356 /* Return the innermost context enclosing DECL that is
8357 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8358 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8360 tree
8361 decl_type_context (const_tree decl)
8363 tree context = DECL_CONTEXT (decl);
8365 while (context)
8366 switch (TREE_CODE (context))
8368 case NAMESPACE_DECL:
8369 case TRANSLATION_UNIT_DECL:
8370 return NULL_TREE;
8372 case RECORD_TYPE:
8373 case UNION_TYPE:
8374 case QUAL_UNION_TYPE:
8375 return context;
8377 case TYPE_DECL:
8378 case FUNCTION_DECL:
8379 context = DECL_CONTEXT (context);
8380 break;
8382 case BLOCK:
8383 context = BLOCK_SUPERCONTEXT (context);
8384 break;
8386 default:
8387 gcc_unreachable ();
8390 return NULL_TREE;
8393 /* CALL is a CALL_EXPR. Return the declaration for the function
8394 called, or NULL_TREE if the called function cannot be
8395 determined. */
8397 tree
8398 get_callee_fndecl (const_tree call)
8400 tree addr;
8402 if (call == error_mark_node)
8403 return error_mark_node;
8405 /* It's invalid to call this function with anything but a
8406 CALL_EXPR. */
8407 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8409 /* The first operand to the CALL is the address of the function
8410 called. */
8411 addr = CALL_EXPR_FN (call);
8413 /* If there is no function, return early. */
8414 if (addr == NULL_TREE)
8415 return NULL_TREE;
8417 STRIP_NOPS (addr);
8419 /* If this is a readonly function pointer, extract its initial value. */
8420 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8421 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8422 && DECL_INITIAL (addr))
8423 addr = DECL_INITIAL (addr);
8425 /* If the address is just `&f' for some function `f', then we know
8426 that `f' is being called. */
8427 if (TREE_CODE (addr) == ADDR_EXPR
8428 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8429 return TREE_OPERAND (addr, 0);
8431 /* We couldn't figure out what was being called. */
8432 return NULL_TREE;
8435 /* Return true when STMTs arguments and return value match those of FNDECL,
8436 a decl of a builtin function. */
8438 static bool
8439 tree_builtin_call_types_compatible_p (const_tree call, tree fndecl)
8441 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN);
8443 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8444 if (tree decl = builtin_decl_explicit (DECL_FUNCTION_CODE (fndecl)))
8445 fndecl = decl;
8447 bool gimple_form = (cfun && (cfun->curr_properties & PROP_gimple)) != 0;
8448 if (gimple_form
8449 ? !useless_type_conversion_p (TREE_TYPE (call),
8450 TREE_TYPE (TREE_TYPE (fndecl)))
8451 : (TYPE_MAIN_VARIANT (TREE_TYPE (call))
8452 != TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (fndecl)))))
8453 return false;
8455 tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8456 unsigned nargs = call_expr_nargs (call);
8457 for (unsigned i = 0; i < nargs; ++i, targs = TREE_CHAIN (targs))
8459 /* Variadic args follow. */
8460 if (!targs)
8461 return true;
8462 tree arg = CALL_EXPR_ARG (call, i);
8463 tree type = TREE_VALUE (targs);
8464 if (gimple_form
8465 ? !useless_type_conversion_p (type, TREE_TYPE (arg))
8466 : TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (TREE_TYPE (arg)))
8468 /* For pointer arguments be more forgiving, e.g. due to
8469 FILE * vs. fileptr_type_node, or say char * vs. const char *
8470 differences etc. */
8471 if (!gimple_form
8472 && POINTER_TYPE_P (type)
8473 && POINTER_TYPE_P (TREE_TYPE (arg))
8474 && tree_nop_conversion_p (type, TREE_TYPE (arg)))
8475 continue;
8476 /* char/short integral arguments are promoted to int
8477 by several frontends if targetm.calls.promote_prototypes
8478 is true. Allow such promotion too. */
8479 if (INTEGRAL_TYPE_P (type)
8480 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
8481 && INTEGRAL_TYPE_P (TREE_TYPE (arg))
8482 && !TYPE_UNSIGNED (TREE_TYPE (arg))
8483 && targetm.calls.promote_prototypes (TREE_TYPE (fndecl))
8484 && (gimple_form
8485 ? useless_type_conversion_p (integer_type_node,
8486 TREE_TYPE (arg))
8487 : tree_nop_conversion_p (integer_type_node,
8488 TREE_TYPE (arg))))
8489 continue;
8490 return false;
8493 if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
8494 return false;
8495 return true;
8498 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
8499 return the associated function code, otherwise return CFN_LAST. */
8501 combined_fn
8502 get_call_combined_fn (const_tree call)
8504 /* It's invalid to call this function with anything but a CALL_EXPR. */
8505 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8507 if (!CALL_EXPR_FN (call))
8508 return as_combined_fn (CALL_EXPR_IFN (call));
8510 tree fndecl = get_callee_fndecl (call);
8511 if (fndecl
8512 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)
8513 && tree_builtin_call_types_compatible_p (call, fndecl))
8514 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
8516 return CFN_LAST;
8519 /* Comparator of indices based on tree_node_counts. */
8521 static int
8522 tree_nodes_cmp (const void *p1, const void *p2)
8524 const unsigned *n1 = (const unsigned *)p1;
8525 const unsigned *n2 = (const unsigned *)p2;
8527 return tree_node_counts[*n1] - tree_node_counts[*n2];
8530 /* Comparator of indices based on tree_code_counts. */
8532 static int
8533 tree_codes_cmp (const void *p1, const void *p2)
8535 const unsigned *n1 = (const unsigned *)p1;
8536 const unsigned *n2 = (const unsigned *)p2;
8538 return tree_code_counts[*n1] - tree_code_counts[*n2];
8541 #define TREE_MEM_USAGE_SPACES 40
8543 /* Print debugging information about tree nodes generated during the compile,
8544 and any language-specific information. */
8546 void
8547 dump_tree_statistics (void)
8549 if (GATHER_STATISTICS)
8551 uint64_t total_nodes, total_bytes;
8552 fprintf (stderr, "\nKind Nodes Bytes\n");
8553 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8554 total_nodes = total_bytes = 0;
8557 auto_vec<unsigned> indices (all_kinds);
8558 for (unsigned i = 0; i < all_kinds; i++)
8559 indices.quick_push (i);
8560 indices.qsort (tree_nodes_cmp);
8562 for (unsigned i = 0; i < (int) all_kinds; i++)
8564 unsigned j = indices[i];
8565 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
8566 tree_node_kind_names[j], SIZE_AMOUNT (tree_node_counts[j]),
8567 SIZE_AMOUNT (tree_node_sizes[j]));
8568 total_nodes += tree_node_counts[j];
8569 total_bytes += tree_node_sizes[j];
8571 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8572 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
8573 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
8574 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8578 fprintf (stderr, "Code Nodes\n");
8579 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8581 auto_vec<unsigned> indices (MAX_TREE_CODES);
8582 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8583 indices.quick_push (i);
8584 indices.qsort (tree_codes_cmp);
8586 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8588 unsigned j = indices[i];
8589 fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
8590 get_tree_code_name ((enum tree_code) j),
8591 SIZE_AMOUNT (tree_code_counts[j]));
8593 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8594 fprintf (stderr, "\n");
8595 ssanames_print_statistics ();
8596 fprintf (stderr, "\n");
8597 phinodes_print_statistics ();
8598 fprintf (stderr, "\n");
8601 else
8602 fprintf (stderr, "(No per-node statistics)\n");
8604 print_type_hash_statistics ();
8605 print_debug_expr_statistics ();
8606 print_value_expr_statistics ();
8607 lang_hooks.print_statistics ();
8610 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8612 /* Generate a crc32 of the low BYTES bytes of VALUE. */
8614 unsigned
8615 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
8617 /* This relies on the raw feedback's top 4 bits being zero. */
8618 #define FEEDBACK(X) ((X) * 0x04c11db7)
8619 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
8620 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
8621 static const unsigned syndromes[16] =
8623 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
8624 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
8625 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
8626 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
8628 #undef FEEDBACK
8629 #undef SYNDROME
8631 value <<= (32 - bytes * 8);
8632 for (unsigned ix = bytes * 2; ix--; value <<= 4)
8634 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
8636 chksum = (chksum << 4) ^ feedback;
8639 return chksum;
8642 /* Generate a crc32 of a string. */
8644 unsigned
8645 crc32_string (unsigned chksum, const char *string)
8648 chksum = crc32_byte (chksum, *string);
8649 while (*string++);
8650 return chksum;
8653 /* P is a string that will be used in a symbol. Mask out any characters
8654 that are not valid in that context. */
8656 void
8657 clean_symbol_name (char *p)
8659 for (; *p; p++)
8660 if (! (ISALNUM (*p)
8661 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8662 || *p == '$'
8663 #endif
8664 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
8665 || *p == '.'
8666 #endif
8668 *p = '_';
8671 static GTY(()) unsigned anon_cnt = 0; /* Saved for PCH. */
8673 /* Create a unique anonymous identifier. The identifier is still a
8674 valid assembly label. */
8676 tree
8677 make_anon_name ()
8679 const char *fmt =
8680 #if !defined (NO_DOT_IN_LABEL)
8682 #elif !defined (NO_DOLLAR_IN_LABEL)
8684 #else
8686 #endif
8687 "_anon_%d";
8689 char buf[24];
8690 int len = snprintf (buf, sizeof (buf), fmt, anon_cnt++);
8691 gcc_checking_assert (len < int (sizeof (buf)));
8693 tree id = get_identifier_with_length (buf, len);
8694 IDENTIFIER_ANON_P (id) = true;
8696 return id;
8699 /* Generate a name for a special-purpose function.
8700 The generated name may need to be unique across the whole link.
8701 Changes to this function may also require corresponding changes to
8702 xstrdup_mask_random.
8703 TYPE is some string to identify the purpose of this function to the
8704 linker or collect2; it must start with an uppercase letter,
8705 one of:
8706 I - for constructors
8707 D - for destructors
8708 N - for C++ anonymous namespaces
8709 F - for DWARF unwind frame information. */
8711 tree
8712 get_file_function_name (const char *type)
8714 char *buf;
8715 const char *p;
8716 char *q;
8718 /* If we already have a name we know to be unique, just use that. */
8719 if (first_global_object_name)
8720 p = q = ASTRDUP (first_global_object_name);
8721 /* If the target is handling the constructors/destructors, they
8722 will be local to this file and the name is only necessary for
8723 debugging purposes.
8724 We also assign sub_I and sub_D sufixes to constructors called from
8725 the global static constructors. These are always local. */
8726 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
8727 || (startswith (type, "sub_")
8728 && (type[4] == 'I' || type[4] == 'D')))
8730 const char *file = main_input_filename;
8731 if (! file)
8732 file = LOCATION_FILE (input_location);
8733 /* Just use the file's basename, because the full pathname
8734 might be quite long. */
8735 p = q = ASTRDUP (lbasename (file));
8737 else
8739 /* Otherwise, the name must be unique across the entire link.
8740 We don't have anything that we know to be unique to this translation
8741 unit, so use what we do have and throw in some randomness. */
8742 unsigned len;
8743 const char *name = weak_global_object_name;
8744 const char *file = main_input_filename;
8746 if (! name)
8747 name = "";
8748 if (! file)
8749 file = LOCATION_FILE (input_location);
8751 len = strlen (file);
8752 q = (char *) alloca (9 + 19 + len + 1);
8753 memcpy (q, file, len + 1);
8755 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
8756 crc32_string (0, name), get_random_seed (false));
8758 p = q;
8761 clean_symbol_name (q);
8762 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
8763 + strlen (type));
8765 /* Set up the name of the file-level functions we may need.
8766 Use a global object (which is already required to be unique over
8767 the program) rather than the file name (which imposes extra
8768 constraints). */
8769 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
8771 return get_identifier (buf);
8774 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
8776 /* Complain that the tree code of NODE does not match the expected 0
8777 terminated list of trailing codes. The trailing code list can be
8778 empty, for a more vague error message. FILE, LINE, and FUNCTION
8779 are of the caller. */
8781 void
8782 tree_check_failed (const_tree node, const char *file,
8783 int line, const char *function, ...)
8785 va_list args;
8786 const char *buffer;
8787 unsigned length = 0;
8788 enum tree_code code;
8790 va_start (args, function);
8791 while ((code = (enum tree_code) va_arg (args, int)))
8792 length += 4 + strlen (get_tree_code_name (code));
8793 va_end (args);
8794 if (length)
8796 char *tmp;
8797 va_start (args, function);
8798 length += strlen ("expected ");
8799 buffer = tmp = (char *) alloca (length);
8800 length = 0;
8801 while ((code = (enum tree_code) va_arg (args, int)))
8803 const char *prefix = length ? " or " : "expected ";
8805 strcpy (tmp + length, prefix);
8806 length += strlen (prefix);
8807 strcpy (tmp + length, get_tree_code_name (code));
8808 length += strlen (get_tree_code_name (code));
8810 va_end (args);
8812 else
8813 buffer = "unexpected node";
8815 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8816 buffer, get_tree_code_name (TREE_CODE (node)),
8817 function, trim_filename (file), line);
8820 /* Complain that the tree code of NODE does match the expected 0
8821 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
8822 the caller. */
8824 void
8825 tree_not_check_failed (const_tree node, const char *file,
8826 int line, const char *function, ...)
8828 va_list args;
8829 char *buffer;
8830 unsigned length = 0;
8831 enum tree_code code;
8833 va_start (args, function);
8834 while ((code = (enum tree_code) va_arg (args, int)))
8835 length += 4 + strlen (get_tree_code_name (code));
8836 va_end (args);
8837 va_start (args, function);
8838 buffer = (char *) alloca (length);
8839 length = 0;
8840 while ((code = (enum tree_code) va_arg (args, int)))
8842 if (length)
8844 strcpy (buffer + length, " or ");
8845 length += 4;
8847 strcpy (buffer + length, get_tree_code_name (code));
8848 length += strlen (get_tree_code_name (code));
8850 va_end (args);
8852 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
8853 buffer, get_tree_code_name (TREE_CODE (node)),
8854 function, trim_filename (file), line);
8857 /* Similar to tree_check_failed, except that we check for a class of tree
8858 code, given in CL. */
8860 void
8861 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
8862 const char *file, int line, const char *function)
8864 internal_error
8865 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
8866 TREE_CODE_CLASS_STRING (cl),
8867 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
8868 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8871 /* Similar to tree_check_failed, except that instead of specifying a
8872 dozen codes, use the knowledge that they're all sequential. */
8874 void
8875 tree_range_check_failed (const_tree node, const char *file, int line,
8876 const char *function, enum tree_code c1,
8877 enum tree_code c2)
8879 char *buffer;
8880 unsigned length = 0;
8881 unsigned int c;
8883 for (c = c1; c <= c2; ++c)
8884 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
8886 length += strlen ("expected ");
8887 buffer = (char *) alloca (length);
8888 length = 0;
8890 for (c = c1; c <= c2; ++c)
8892 const char *prefix = length ? " or " : "expected ";
8894 strcpy (buffer + length, prefix);
8895 length += strlen (prefix);
8896 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
8897 length += strlen (get_tree_code_name ((enum tree_code) c));
8900 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8901 buffer, get_tree_code_name (TREE_CODE (node)),
8902 function, trim_filename (file), line);
8906 /* Similar to tree_check_failed, except that we check that a tree does
8907 not have the specified code, given in CL. */
8909 void
8910 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
8911 const char *file, int line, const char *function)
8913 internal_error
8914 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
8915 TREE_CODE_CLASS_STRING (cl),
8916 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
8917 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8921 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
8923 void
8924 omp_clause_check_failed (const_tree node, const char *file, int line,
8925 const char *function, enum omp_clause_code code)
8927 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
8928 "in %s, at %s:%d",
8929 omp_clause_code_name[code],
8930 get_tree_code_name (TREE_CODE (node)),
8931 function, trim_filename (file), line);
8935 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
8937 void
8938 omp_clause_range_check_failed (const_tree node, const char *file, int line,
8939 const char *function, enum omp_clause_code c1,
8940 enum omp_clause_code c2)
8942 char *buffer;
8943 unsigned length = 0;
8944 unsigned int c;
8946 for (c = c1; c <= c2; ++c)
8947 length += 4 + strlen (omp_clause_code_name[c]);
8949 length += strlen ("expected ");
8950 buffer = (char *) alloca (length);
8951 length = 0;
8953 for (c = c1; c <= c2; ++c)
8955 const char *prefix = length ? " or " : "expected ";
8957 strcpy (buffer + length, prefix);
8958 length += strlen (prefix);
8959 strcpy (buffer + length, omp_clause_code_name[c]);
8960 length += strlen (omp_clause_code_name[c]);
8963 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8964 buffer, omp_clause_code_name[TREE_CODE (node)],
8965 function, trim_filename (file), line);
8969 #undef DEFTREESTRUCT
8970 #define DEFTREESTRUCT(VAL, NAME) NAME,
8972 static const char *ts_enum_names[] = {
8973 #include "treestruct.def"
8975 #undef DEFTREESTRUCT
8977 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
8979 /* Similar to tree_class_check_failed, except that we check for
8980 whether CODE contains the tree structure identified by EN. */
8982 void
8983 tree_contains_struct_check_failed (const_tree node,
8984 const enum tree_node_structure_enum en,
8985 const char *file, int line,
8986 const char *function)
8988 internal_error
8989 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
8990 TS_ENUM_NAME (en),
8991 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8995 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
8996 (dynamically sized) vector. */
8998 void
8999 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9000 const char *function)
9002 internal_error
9003 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
9004 "at %s:%d",
9005 idx + 1, len, function, trim_filename (file), line);
9008 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9009 (dynamically sized) vector. */
9011 void
9012 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9013 const char *function)
9015 internal_error
9016 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
9017 idx + 1, len, function, trim_filename (file), line);
9020 /* Similar to above, except that the check is for the bounds of the operand
9021 vector of an expression node EXP. */
9023 void
9024 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9025 int line, const char *function)
9027 enum tree_code code = TREE_CODE (exp);
9028 internal_error
9029 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9030 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9031 function, trim_filename (file), line);
9034 /* Similar to above, except that the check is for the number of
9035 operands of an OMP_CLAUSE node. */
9037 void
9038 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9039 int line, const char *function)
9041 internal_error
9042 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
9043 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9044 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9045 trim_filename (file), line);
9047 #endif /* ENABLE_TREE_CHECKING */
9049 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
9050 and mapped to the machine mode MODE. Initialize its fields and build
9051 the information necessary for debugging output. */
9053 static tree
9054 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
9056 tree t;
9057 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9059 t = make_node (VECTOR_TYPE);
9060 TREE_TYPE (t) = mv_innertype;
9061 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9062 SET_TYPE_MODE (t, mode);
9064 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9065 SET_TYPE_STRUCTURAL_EQUALITY (t);
9066 else if ((TYPE_CANONICAL (mv_innertype) != innertype
9067 || mode != VOIDmode)
9068 && !VECTOR_BOOLEAN_TYPE_P (t))
9069 TYPE_CANONICAL (t)
9070 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
9072 layout_type (t);
9074 hashval_t hash = type_hash_canon_hash (t);
9075 t = type_hash_canon (hash, t);
9077 /* We have built a main variant, based on the main variant of the
9078 inner type. Use it to build the variant we return. */
9079 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9080 && TREE_TYPE (t) != innertype)
9081 return build_type_attribute_qual_variant (t,
9082 TYPE_ATTRIBUTES (innertype),
9083 TYPE_QUALS (innertype));
9085 return t;
9088 static tree
9089 make_or_reuse_type (unsigned size, int unsignedp)
9091 int i;
9093 if (size == INT_TYPE_SIZE)
9094 return unsignedp ? unsigned_type_node : integer_type_node;
9095 if (size == CHAR_TYPE_SIZE)
9096 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9097 if (size == SHORT_TYPE_SIZE)
9098 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9099 if (size == LONG_TYPE_SIZE)
9100 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9101 if (size == LONG_LONG_TYPE_SIZE)
9102 return (unsignedp ? long_long_unsigned_type_node
9103 : long_long_integer_type_node);
9105 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9106 if (size == int_n_data[i].bitsize
9107 && int_n_enabled_p[i])
9108 return (unsignedp ? int_n_trees[i].unsigned_type
9109 : int_n_trees[i].signed_type);
9111 if (unsignedp)
9112 return make_unsigned_type (size);
9113 else
9114 return make_signed_type (size);
9117 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9119 static tree
9120 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9122 if (satp)
9124 if (size == SHORT_FRACT_TYPE_SIZE)
9125 return unsignedp ? sat_unsigned_short_fract_type_node
9126 : sat_short_fract_type_node;
9127 if (size == FRACT_TYPE_SIZE)
9128 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9129 if (size == LONG_FRACT_TYPE_SIZE)
9130 return unsignedp ? sat_unsigned_long_fract_type_node
9131 : sat_long_fract_type_node;
9132 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9133 return unsignedp ? sat_unsigned_long_long_fract_type_node
9134 : sat_long_long_fract_type_node;
9136 else
9138 if (size == SHORT_FRACT_TYPE_SIZE)
9139 return unsignedp ? unsigned_short_fract_type_node
9140 : short_fract_type_node;
9141 if (size == FRACT_TYPE_SIZE)
9142 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9143 if (size == LONG_FRACT_TYPE_SIZE)
9144 return unsignedp ? unsigned_long_fract_type_node
9145 : long_fract_type_node;
9146 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9147 return unsignedp ? unsigned_long_long_fract_type_node
9148 : long_long_fract_type_node;
9151 return make_fract_type (size, unsignedp, satp);
9154 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9156 static tree
9157 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9159 if (satp)
9161 if (size == SHORT_ACCUM_TYPE_SIZE)
9162 return unsignedp ? sat_unsigned_short_accum_type_node
9163 : sat_short_accum_type_node;
9164 if (size == ACCUM_TYPE_SIZE)
9165 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9166 if (size == LONG_ACCUM_TYPE_SIZE)
9167 return unsignedp ? sat_unsigned_long_accum_type_node
9168 : sat_long_accum_type_node;
9169 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9170 return unsignedp ? sat_unsigned_long_long_accum_type_node
9171 : sat_long_long_accum_type_node;
9173 else
9175 if (size == SHORT_ACCUM_TYPE_SIZE)
9176 return unsignedp ? unsigned_short_accum_type_node
9177 : short_accum_type_node;
9178 if (size == ACCUM_TYPE_SIZE)
9179 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9180 if (size == LONG_ACCUM_TYPE_SIZE)
9181 return unsignedp ? unsigned_long_accum_type_node
9182 : long_accum_type_node;
9183 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9184 return unsignedp ? unsigned_long_long_accum_type_node
9185 : long_long_accum_type_node;
9188 return make_accum_type (size, unsignedp, satp);
9192 /* Create an atomic variant node for TYPE. This routine is called
9193 during initialization of data types to create the 5 basic atomic
9194 types. The generic build_variant_type function requires these to
9195 already be set up in order to function properly, so cannot be
9196 called from there. If ALIGN is non-zero, then ensure alignment is
9197 overridden to this value. */
9199 static tree
9200 build_atomic_base (tree type, unsigned int align)
9202 tree t;
9204 /* Make sure its not already registered. */
9205 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9206 return t;
9208 t = build_variant_type_copy (type);
9209 set_type_quals (t, TYPE_QUAL_ATOMIC);
9211 if (align)
9212 SET_TYPE_ALIGN (t, align);
9214 return t;
9217 /* Information about the _FloatN and _FloatNx types. This must be in
9218 the same order as the corresponding TI_* enum values. */
9219 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
9221 { 16, false },
9222 { 32, false },
9223 { 64, false },
9224 { 128, false },
9225 { 32, true },
9226 { 64, true },
9227 { 128, true },
9231 /* Create nodes for all integer types (and error_mark_node) using the sizes
9232 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9234 void
9235 build_common_tree_nodes (bool signed_char)
9237 int i;
9239 error_mark_node = make_node (ERROR_MARK);
9240 TREE_TYPE (error_mark_node) = error_mark_node;
9242 initialize_sizetypes ();
9244 /* Define both `signed char' and `unsigned char'. */
9245 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9246 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9247 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9248 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9250 /* Define `char', which is like either `signed char' or `unsigned char'
9251 but not the same as either. */
9252 char_type_node
9253 = (signed_char
9254 ? make_signed_type (CHAR_TYPE_SIZE)
9255 : make_unsigned_type (CHAR_TYPE_SIZE));
9256 TYPE_STRING_FLAG (char_type_node) = 1;
9258 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9259 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9260 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9261 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9262 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9263 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9264 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9265 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9267 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9269 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9270 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9272 if (int_n_enabled_p[i])
9274 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9275 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9279 /* Define a boolean type. This type only represents boolean values but
9280 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9281 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9282 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9283 TYPE_PRECISION (boolean_type_node) = 1;
9284 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9286 /* Define what type to use for size_t. */
9287 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9288 size_type_node = unsigned_type_node;
9289 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9290 size_type_node = long_unsigned_type_node;
9291 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9292 size_type_node = long_long_unsigned_type_node;
9293 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9294 size_type_node = short_unsigned_type_node;
9295 else
9297 int i;
9299 size_type_node = NULL_TREE;
9300 for (i = 0; i < NUM_INT_N_ENTS; i++)
9301 if (int_n_enabled_p[i])
9303 char name[50], altname[50];
9304 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9305 sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
9307 if (strcmp (name, SIZE_TYPE) == 0
9308 || strcmp (altname, SIZE_TYPE) == 0)
9310 size_type_node = int_n_trees[i].unsigned_type;
9313 if (size_type_node == NULL_TREE)
9314 gcc_unreachable ();
9317 /* Define what type to use for ptrdiff_t. */
9318 if (strcmp (PTRDIFF_TYPE, "int") == 0)
9319 ptrdiff_type_node = integer_type_node;
9320 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
9321 ptrdiff_type_node = long_integer_type_node;
9322 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
9323 ptrdiff_type_node = long_long_integer_type_node;
9324 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
9325 ptrdiff_type_node = short_integer_type_node;
9326 else
9328 ptrdiff_type_node = NULL_TREE;
9329 for (int i = 0; i < NUM_INT_N_ENTS; i++)
9330 if (int_n_enabled_p[i])
9332 char name[50], altname[50];
9333 sprintf (name, "__int%d", int_n_data[i].bitsize);
9334 sprintf (altname, "__int%d__", int_n_data[i].bitsize);
9336 if (strcmp (name, PTRDIFF_TYPE) == 0
9337 || strcmp (altname, PTRDIFF_TYPE) == 0)
9338 ptrdiff_type_node = int_n_trees[i].signed_type;
9340 if (ptrdiff_type_node == NULL_TREE)
9341 gcc_unreachable ();
9344 /* Fill in the rest of the sized types. Reuse existing type nodes
9345 when possible. */
9346 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9347 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9348 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9349 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9350 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9352 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9353 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9354 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9355 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9356 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9358 /* Don't call build_qualified type for atomics. That routine does
9359 special processing for atomics, and until they are initialized
9360 it's better not to make that call.
9362 Check to see if there is a target override for atomic types. */
9364 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9365 targetm.atomic_align_for_mode (QImode));
9366 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9367 targetm.atomic_align_for_mode (HImode));
9368 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9369 targetm.atomic_align_for_mode (SImode));
9370 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9371 targetm.atomic_align_for_mode (DImode));
9372 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9373 targetm.atomic_align_for_mode (TImode));
9375 access_public_node = get_identifier ("public");
9376 access_protected_node = get_identifier ("protected");
9377 access_private_node = get_identifier ("private");
9379 /* Define these next since types below may used them. */
9380 integer_zero_node = build_int_cst (integer_type_node, 0);
9381 integer_one_node = build_int_cst (integer_type_node, 1);
9382 integer_three_node = build_int_cst (integer_type_node, 3);
9383 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9385 size_zero_node = size_int (0);
9386 size_one_node = size_int (1);
9387 bitsize_zero_node = bitsize_int (0);
9388 bitsize_one_node = bitsize_int (1);
9389 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9391 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9392 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9394 void_type_node = make_node (VOID_TYPE);
9395 layout_type (void_type_node);
9397 /* We are not going to have real types in C with less than byte alignment,
9398 so we might as well not have any types that claim to have it. */
9399 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
9400 TYPE_USER_ALIGN (void_type_node) = 0;
9402 void_node = make_node (VOID_CST);
9403 TREE_TYPE (void_node) = void_type_node;
9405 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9406 layout_type (TREE_TYPE (null_pointer_node));
9408 ptr_type_node = build_pointer_type (void_type_node);
9409 const_ptr_type_node
9410 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9411 for (unsigned i = 0;
9412 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
9413 ++i)
9414 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
9416 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9418 float_type_node = make_node (REAL_TYPE);
9419 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9420 layout_type (float_type_node);
9422 double_type_node = make_node (REAL_TYPE);
9423 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9424 layout_type (double_type_node);
9426 long_double_type_node = make_node (REAL_TYPE);
9427 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9428 layout_type (long_double_type_node);
9430 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9432 int n = floatn_nx_types[i].n;
9433 bool extended = floatn_nx_types[i].extended;
9434 scalar_float_mode mode;
9435 if (!targetm.floatn_mode (n, extended).exists (&mode))
9436 continue;
9437 int precision = GET_MODE_PRECISION (mode);
9438 /* Work around the rs6000 KFmode having precision 113 not
9439 128. */
9440 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
9441 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
9442 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
9443 if (!extended)
9444 gcc_assert (min_precision == n);
9445 if (precision < min_precision)
9446 precision = min_precision;
9447 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
9448 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
9449 layout_type (FLOATN_NX_TYPE_NODE (i));
9450 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
9453 float_ptr_type_node = build_pointer_type (float_type_node);
9454 double_ptr_type_node = build_pointer_type (double_type_node);
9455 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9456 integer_ptr_type_node = build_pointer_type (integer_type_node);
9458 /* Fixed size integer types. */
9459 uint16_type_node = make_or_reuse_type (16, 1);
9460 uint32_type_node = make_or_reuse_type (32, 1);
9461 uint64_type_node = make_or_reuse_type (64, 1);
9462 if (targetm.scalar_mode_supported_p (TImode))
9463 uint128_type_node = make_or_reuse_type (128, 1);
9465 /* Decimal float types. */
9466 if (targetm.decimal_float_supported_p ())
9468 dfloat32_type_node = make_node (REAL_TYPE);
9469 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9470 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9471 layout_type (dfloat32_type_node);
9473 dfloat64_type_node = make_node (REAL_TYPE);
9474 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9475 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9476 layout_type (dfloat64_type_node);
9478 dfloat128_type_node = make_node (REAL_TYPE);
9479 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9480 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9481 layout_type (dfloat128_type_node);
9484 complex_integer_type_node = build_complex_type (integer_type_node, true);
9485 complex_float_type_node = build_complex_type (float_type_node, true);
9486 complex_double_type_node = build_complex_type (double_type_node, true);
9487 complex_long_double_type_node = build_complex_type (long_double_type_node,
9488 true);
9490 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9492 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
9493 COMPLEX_FLOATN_NX_TYPE_NODE (i)
9494 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
9497 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9498 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9499 sat_ ## KIND ## _type_node = \
9500 make_sat_signed_ ## KIND ## _type (SIZE); \
9501 sat_unsigned_ ## KIND ## _type_node = \
9502 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9503 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9504 unsigned_ ## KIND ## _type_node = \
9505 make_unsigned_ ## KIND ## _type (SIZE);
9507 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9508 sat_ ## WIDTH ## KIND ## _type_node = \
9509 make_sat_signed_ ## KIND ## _type (SIZE); \
9510 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9511 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9512 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9513 unsigned_ ## WIDTH ## KIND ## _type_node = \
9514 make_unsigned_ ## KIND ## _type (SIZE);
9516 /* Make fixed-point type nodes based on four different widths. */
9517 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9518 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9519 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9520 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9521 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9523 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9524 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9525 NAME ## _type_node = \
9526 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9527 u ## NAME ## _type_node = \
9528 make_or_reuse_unsigned_ ## KIND ## _type \
9529 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9530 sat_ ## NAME ## _type_node = \
9531 make_or_reuse_sat_signed_ ## KIND ## _type \
9532 (GET_MODE_BITSIZE (MODE ## mode)); \
9533 sat_u ## NAME ## _type_node = \
9534 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9535 (GET_MODE_BITSIZE (U ## MODE ## mode));
9537 /* Fixed-point type and mode nodes. */
9538 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9539 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9540 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9541 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9542 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9543 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9544 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9545 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9546 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9547 MAKE_FIXED_MODE_NODE (accum, da, DA)
9548 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9551 tree t = targetm.build_builtin_va_list ();
9553 /* Many back-ends define record types without setting TYPE_NAME.
9554 If we copied the record type here, we'd keep the original
9555 record type without a name. This breaks name mangling. So,
9556 don't copy record types and let c_common_nodes_and_builtins()
9557 declare the type to be __builtin_va_list. */
9558 if (TREE_CODE (t) != RECORD_TYPE)
9559 t = build_variant_type_copy (t);
9561 va_list_type_node = t;
9564 /* SCEV analyzer global shared trees. */
9565 chrec_dont_know = make_node (SCEV_NOT_KNOWN);
9566 TREE_TYPE (chrec_dont_know) = void_type_node;
9567 chrec_known = make_node (SCEV_KNOWN);
9568 TREE_TYPE (chrec_known) = void_type_node;
9571 /* Modify DECL for given flags.
9572 TM_PURE attribute is set only on types, so the function will modify
9573 DECL's type when ECF_TM_PURE is used. */
9575 void
9576 set_call_expr_flags (tree decl, int flags)
9578 if (flags & ECF_NOTHROW)
9579 TREE_NOTHROW (decl) = 1;
9580 if (flags & ECF_CONST)
9581 TREE_READONLY (decl) = 1;
9582 if (flags & ECF_PURE)
9583 DECL_PURE_P (decl) = 1;
9584 if (flags & ECF_LOOPING_CONST_OR_PURE)
9585 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9586 if (flags & ECF_NOVOPS)
9587 DECL_IS_NOVOPS (decl) = 1;
9588 if (flags & ECF_NORETURN)
9589 TREE_THIS_VOLATILE (decl) = 1;
9590 if (flags & ECF_MALLOC)
9591 DECL_IS_MALLOC (decl) = 1;
9592 if (flags & ECF_RETURNS_TWICE)
9593 DECL_IS_RETURNS_TWICE (decl) = 1;
9594 if (flags & ECF_LEAF)
9595 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9596 NULL, DECL_ATTRIBUTES (decl));
9597 if (flags & ECF_COLD)
9598 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
9599 NULL, DECL_ATTRIBUTES (decl));
9600 if (flags & ECF_RET1)
9601 DECL_ATTRIBUTES (decl)
9602 = tree_cons (get_identifier ("fn spec"),
9603 build_tree_list (NULL_TREE, build_string (2, "1 ")),
9604 DECL_ATTRIBUTES (decl));
9605 if ((flags & ECF_TM_PURE) && flag_tm)
9606 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9607 /* Looping const or pure is implied by noreturn.
9608 There is currently no way to declare looping const or looping pure alone. */
9609 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9610 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9614 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9616 static void
9617 local_define_builtin (const char *name, tree type, enum built_in_function code,
9618 const char *library_name, int ecf_flags)
9620 tree decl;
9622 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9623 library_name, NULL_TREE);
9624 set_call_expr_flags (decl, ecf_flags);
9626 set_builtin_decl (code, decl, true);
9629 /* Call this function after instantiating all builtins that the language
9630 front end cares about. This will build the rest of the builtins
9631 and internal functions that are relied upon by the tree optimizers and
9632 the middle-end. */
9634 void
9635 build_common_builtin_nodes (void)
9637 tree tmp, ftype;
9638 int ecf_flags;
9640 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_PADDING))
9642 ftype = build_function_type_list (void_type_node,
9643 ptr_type_node,
9644 ptr_type_node,
9645 integer_type_node,
9646 NULL_TREE);
9647 local_define_builtin ("__builtin_clear_padding", ftype,
9648 BUILT_IN_CLEAR_PADDING,
9649 "__builtin_clear_padding",
9650 ECF_LEAF | ECF_NOTHROW);
9653 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
9654 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
9656 ftype = build_function_type (void_type_node, void_list_node);
9657 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9658 local_define_builtin ("__builtin_unreachable", ftype,
9659 BUILT_IN_UNREACHABLE,
9660 "__builtin_unreachable",
9661 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9662 | ECF_CONST | ECF_COLD);
9663 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
9664 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
9665 "abort",
9666 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
9669 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9670 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9672 ftype = build_function_type_list (ptr_type_node,
9673 ptr_type_node, const_ptr_type_node,
9674 size_type_node, NULL_TREE);
9676 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9677 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9678 "memcpy", ECF_NOTHROW | ECF_LEAF);
9679 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9680 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9681 "memmove", ECF_NOTHROW | ECF_LEAF);
9684 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9686 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9687 const_ptr_type_node, size_type_node,
9688 NULL_TREE);
9689 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9690 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9693 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9695 ftype = build_function_type_list (ptr_type_node,
9696 ptr_type_node, integer_type_node,
9697 size_type_node, NULL_TREE);
9698 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9699 "memset", ECF_NOTHROW | ECF_LEAF);
9702 /* If we're checking the stack, `alloca' can throw. */
9703 const int alloca_flags
9704 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
9706 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9708 ftype = build_function_type_list (ptr_type_node,
9709 size_type_node, NULL_TREE);
9710 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9711 "alloca", alloca_flags);
9714 ftype = build_function_type_list (ptr_type_node, size_type_node,
9715 size_type_node, NULL_TREE);
9716 local_define_builtin ("__builtin_alloca_with_align", ftype,
9717 BUILT_IN_ALLOCA_WITH_ALIGN,
9718 "__builtin_alloca_with_align",
9719 alloca_flags);
9721 ftype = build_function_type_list (ptr_type_node, size_type_node,
9722 size_type_node, size_type_node, NULL_TREE);
9723 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
9724 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
9725 "__builtin_alloca_with_align_and_max",
9726 alloca_flags);
9728 ftype = build_function_type_list (void_type_node,
9729 ptr_type_node, ptr_type_node,
9730 ptr_type_node, NULL_TREE);
9731 local_define_builtin ("__builtin_init_trampoline", ftype,
9732 BUILT_IN_INIT_TRAMPOLINE,
9733 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9734 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9735 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9736 "__builtin_init_heap_trampoline",
9737 ECF_NOTHROW | ECF_LEAF);
9738 local_define_builtin ("__builtin_init_descriptor", ftype,
9739 BUILT_IN_INIT_DESCRIPTOR,
9740 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
9742 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9743 local_define_builtin ("__builtin_adjust_trampoline", ftype,
9744 BUILT_IN_ADJUST_TRAMPOLINE,
9745 "__builtin_adjust_trampoline",
9746 ECF_CONST | ECF_NOTHROW);
9747 local_define_builtin ("__builtin_adjust_descriptor", ftype,
9748 BUILT_IN_ADJUST_DESCRIPTOR,
9749 "__builtin_adjust_descriptor",
9750 ECF_CONST | ECF_NOTHROW);
9752 ftype = build_function_type_list (void_type_node,
9753 ptr_type_node, ptr_type_node, NULL_TREE);
9754 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_CACHE))
9755 local_define_builtin ("__builtin___clear_cache", ftype,
9756 BUILT_IN_CLEAR_CACHE,
9757 "__clear_cache",
9758 ECF_NOTHROW);
9760 local_define_builtin ("__builtin_nonlocal_goto", ftype,
9761 BUILT_IN_NONLOCAL_GOTO,
9762 "__builtin_nonlocal_goto",
9763 ECF_NORETURN | ECF_NOTHROW);
9765 ftype = build_function_type_list (void_type_node,
9766 ptr_type_node, ptr_type_node, NULL_TREE);
9767 local_define_builtin ("__builtin_setjmp_setup", ftype,
9768 BUILT_IN_SETJMP_SETUP,
9769 "__builtin_setjmp_setup", ECF_NOTHROW);
9771 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9772 local_define_builtin ("__builtin_setjmp_receiver", ftype,
9773 BUILT_IN_SETJMP_RECEIVER,
9774 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
9776 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
9777 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
9778 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
9780 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9781 local_define_builtin ("__builtin_stack_restore", ftype,
9782 BUILT_IN_STACK_RESTORE,
9783 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
9785 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9786 const_ptr_type_node, size_type_node,
9787 NULL_TREE);
9788 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
9789 "__builtin_memcmp_eq",
9790 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9792 local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
9793 "__builtin_strncmp_eq",
9794 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9796 local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
9797 "__builtin_strcmp_eq",
9798 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9800 /* If there's a possibility that we might use the ARM EABI, build the
9801 alternate __cxa_end_cleanup node used to resume from C++. */
9802 if (targetm.arm_eabi_unwinder)
9804 ftype = build_function_type_list (void_type_node, NULL_TREE);
9805 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
9806 BUILT_IN_CXA_END_CLEANUP,
9807 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
9810 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9811 local_define_builtin ("__builtin_unwind_resume", ftype,
9812 BUILT_IN_UNWIND_RESUME,
9813 ((targetm_common.except_unwind_info (&global_options)
9814 == UI_SJLJ)
9815 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
9816 ECF_NORETURN);
9818 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
9820 ftype = build_function_type_list (ptr_type_node, integer_type_node,
9821 NULL_TREE);
9822 local_define_builtin ("__builtin_return_address", ftype,
9823 BUILT_IN_RETURN_ADDRESS,
9824 "__builtin_return_address",
9825 ECF_NOTHROW);
9828 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
9829 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9831 ftype = build_function_type_list (void_type_node, ptr_type_node,
9832 ptr_type_node, NULL_TREE);
9833 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
9834 local_define_builtin ("__cyg_profile_func_enter", ftype,
9835 BUILT_IN_PROFILE_FUNC_ENTER,
9836 "__cyg_profile_func_enter", 0);
9837 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9838 local_define_builtin ("__cyg_profile_func_exit", ftype,
9839 BUILT_IN_PROFILE_FUNC_EXIT,
9840 "__cyg_profile_func_exit", 0);
9843 /* The exception object and filter values from the runtime. The argument
9844 must be zero before exception lowering, i.e. from the front end. After
9845 exception lowering, it will be the region number for the exception
9846 landing pad. These functions are PURE instead of CONST to prevent
9847 them from being hoisted past the exception edge that will initialize
9848 its value in the landing pad. */
9849 ftype = build_function_type_list (ptr_type_node,
9850 integer_type_node, NULL_TREE);
9851 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
9852 /* Only use TM_PURE if we have TM language support. */
9853 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
9854 ecf_flags |= ECF_TM_PURE;
9855 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
9856 "__builtin_eh_pointer", ecf_flags);
9858 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
9859 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
9860 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
9861 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9863 ftype = build_function_type_list (void_type_node,
9864 integer_type_node, integer_type_node,
9865 NULL_TREE);
9866 local_define_builtin ("__builtin_eh_copy_values", ftype,
9867 BUILT_IN_EH_COPY_VALUES,
9868 "__builtin_eh_copy_values", ECF_NOTHROW);
9870 /* Complex multiplication and division. These are handled as builtins
9871 rather than optabs because emit_library_call_value doesn't support
9872 complex. Further, we can do slightly better with folding these
9873 beasties if the real and complex parts of the arguments are separate. */
9875 int mode;
9877 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
9879 char mode_name_buf[4], *q;
9880 const char *p;
9881 enum built_in_function mcode, dcode;
9882 tree type, inner_type;
9883 const char *prefix = "__";
9885 if (targetm.libfunc_gnu_prefix)
9886 prefix = "__gnu_";
9888 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
9889 if (type == NULL)
9890 continue;
9891 inner_type = TREE_TYPE (type);
9893 ftype = build_function_type_list (type, inner_type, inner_type,
9894 inner_type, inner_type, NULL_TREE);
9896 mcode = ((enum built_in_function)
9897 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
9898 dcode = ((enum built_in_function)
9899 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
9901 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
9902 *q = TOLOWER (*p);
9903 *q = '\0';
9905 /* For -ftrapping-math these should throw from a former
9906 -fnon-call-exception stmt. */
9907 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
9908 NULL);
9909 local_define_builtin (built_in_names[mcode], ftype, mcode,
9910 built_in_names[mcode],
9911 ECF_CONST | ECF_LEAF);
9913 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
9914 NULL);
9915 local_define_builtin (built_in_names[dcode], ftype, dcode,
9916 built_in_names[dcode],
9917 ECF_CONST | ECF_LEAF);
9921 init_internal_fns ();
9924 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
9925 better way.
9927 If we requested a pointer to a vector, build up the pointers that
9928 we stripped off while looking for the inner type. Similarly for
9929 return values from functions.
9931 The argument TYPE is the top of the chain, and BOTTOM is the
9932 new type which we will point to. */
9934 tree
9935 reconstruct_complex_type (tree type, tree bottom)
9937 tree inner, outer;
9939 if (TREE_CODE (type) == POINTER_TYPE)
9941 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9942 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
9943 TYPE_REF_CAN_ALIAS_ALL (type));
9945 else if (TREE_CODE (type) == REFERENCE_TYPE)
9947 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9948 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
9949 TYPE_REF_CAN_ALIAS_ALL (type));
9951 else if (TREE_CODE (type) == ARRAY_TYPE)
9953 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9954 outer = build_array_type (inner, TYPE_DOMAIN (type));
9956 else if (TREE_CODE (type) == FUNCTION_TYPE)
9958 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9959 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
9961 else if (TREE_CODE (type) == METHOD_TYPE)
9963 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9964 /* The build_method_type_directly() routine prepends 'this' to argument list,
9965 so we must compensate by getting rid of it. */
9966 outer
9967 = build_method_type_directly
9968 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
9969 inner,
9970 TREE_CHAIN (TYPE_ARG_TYPES (type)));
9972 else if (TREE_CODE (type) == OFFSET_TYPE)
9974 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9975 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
9977 else
9978 return bottom;
9980 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
9981 TYPE_QUALS (type));
9984 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
9985 the inner type. */
9986 tree
9987 build_vector_type_for_mode (tree innertype, machine_mode mode)
9989 poly_int64 nunits;
9990 unsigned int bitsize;
9992 switch (GET_MODE_CLASS (mode))
9994 case MODE_VECTOR_BOOL:
9995 case MODE_VECTOR_INT:
9996 case MODE_VECTOR_FLOAT:
9997 case MODE_VECTOR_FRACT:
9998 case MODE_VECTOR_UFRACT:
9999 case MODE_VECTOR_ACCUM:
10000 case MODE_VECTOR_UACCUM:
10001 nunits = GET_MODE_NUNITS (mode);
10002 break;
10004 case MODE_INT:
10005 /* Check that there are no leftover bits. */
10006 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
10007 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10008 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10009 break;
10011 default:
10012 gcc_unreachable ();
10015 return make_vector_type (innertype, nunits, mode);
10018 /* Similarly, but takes the inner type and number of units, which must be
10019 a power of two. */
10021 tree
10022 build_vector_type (tree innertype, poly_int64 nunits)
10024 return make_vector_type (innertype, nunits, VOIDmode);
10027 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE. */
10029 tree
10030 build_truth_vector_type_for_mode (poly_uint64 nunits, machine_mode mask_mode)
10032 gcc_assert (mask_mode != BLKmode);
10034 unsigned HOST_WIDE_INT esize;
10035 if (VECTOR_MODE_P (mask_mode))
10037 poly_uint64 vsize = GET_MODE_BITSIZE (mask_mode);
10038 esize = vector_element_size (vsize, nunits);
10040 else
10041 esize = 1;
10043 tree bool_type = build_nonstandard_boolean_type (esize);
10045 return make_vector_type (bool_type, nunits, mask_mode);
10048 /* Build a vector type that holds one boolean result for each element of
10049 vector type VECTYPE. The public interface for this operation is
10050 truth_type_for. */
10052 static tree
10053 build_truth_vector_type_for (tree vectype)
10055 machine_mode vector_mode = TYPE_MODE (vectype);
10056 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
10058 machine_mode mask_mode;
10059 if (VECTOR_MODE_P (vector_mode)
10060 && targetm.vectorize.get_mask_mode (vector_mode).exists (&mask_mode))
10061 return build_truth_vector_type_for_mode (nunits, mask_mode);
10063 poly_uint64 vsize = tree_to_poly_uint64 (TYPE_SIZE (vectype));
10064 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
10065 tree bool_type = build_nonstandard_boolean_type (esize);
10067 return make_vector_type (bool_type, nunits, VOIDmode);
10070 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
10071 set. */
10073 tree
10074 build_opaque_vector_type (tree innertype, poly_int64 nunits)
10076 tree t = make_vector_type (innertype, nunits, VOIDmode);
10077 tree cand;
10078 /* We always build the non-opaque variant before the opaque one,
10079 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10080 cand = TYPE_NEXT_VARIANT (t);
10081 if (cand
10082 && TYPE_VECTOR_OPAQUE (cand)
10083 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10084 return cand;
10085 /* Othewise build a variant type and make sure to queue it after
10086 the non-opaque type. */
10087 cand = build_distinct_type_copy (t);
10088 TYPE_VECTOR_OPAQUE (cand) = true;
10089 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10090 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10091 TYPE_NEXT_VARIANT (t) = cand;
10092 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10093 return cand;
10096 /* Return the value of element I of VECTOR_CST T as a wide_int. */
10098 static poly_wide_int
10099 vector_cst_int_elt (const_tree t, unsigned int i)
10101 /* First handle elements that are directly encoded. */
10102 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10103 if (i < encoded_nelts)
10104 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, i));
10106 /* Identify the pattern that contains element I and work out the index of
10107 the last encoded element for that pattern. */
10108 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10109 unsigned int pattern = i % npatterns;
10110 unsigned int count = i / npatterns;
10111 unsigned int final_i = encoded_nelts - npatterns + pattern;
10113 /* If there are no steps, the final encoded value is the right one. */
10114 if (!VECTOR_CST_STEPPED_P (t))
10115 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
10117 /* Otherwise work out the value from the last two encoded elements. */
10118 tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
10119 tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
10120 poly_wide_int diff = wi::to_poly_wide (v2) - wi::to_poly_wide (v1);
10121 return wi::to_poly_wide (v2) + (count - 2) * diff;
10124 /* Return the value of element I of VECTOR_CST T. */
10126 tree
10127 vector_cst_elt (const_tree t, unsigned int i)
10129 /* First handle elements that are directly encoded. */
10130 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10131 if (i < encoded_nelts)
10132 return VECTOR_CST_ENCODED_ELT (t, i);
10134 /* If there are no steps, the final encoded value is the right one. */
10135 if (!VECTOR_CST_STEPPED_P (t))
10137 /* Identify the pattern that contains element I and work out the index of
10138 the last encoded element for that pattern. */
10139 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10140 unsigned int pattern = i % npatterns;
10141 unsigned int final_i = encoded_nelts - npatterns + pattern;
10142 return VECTOR_CST_ENCODED_ELT (t, final_i);
10145 /* Otherwise work out the value from the last two encoded elements. */
10146 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
10147 vector_cst_int_elt (t, i));
10150 /* Given an initializer INIT, return TRUE if INIT is zero or some
10151 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
10152 null, set *NONZERO if and only if INIT is known not to be all
10153 zeros. The combination of return value of false and *NONZERO
10154 false implies that INIT may but need not be all zeros. Other
10155 combinations indicate definitive answers. */
10157 bool
10158 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
10160 bool dummy;
10161 if (!nonzero)
10162 nonzero = &dummy;
10164 /* Conservatively clear NONZERO and set it only if INIT is definitely
10165 not all zero. */
10166 *nonzero = false;
10168 STRIP_NOPS (init);
10170 unsigned HOST_WIDE_INT off = 0;
10172 switch (TREE_CODE (init))
10174 case INTEGER_CST:
10175 if (integer_zerop (init))
10176 return true;
10178 *nonzero = true;
10179 return false;
10181 case REAL_CST:
10182 /* ??? Note that this is not correct for C4X float formats. There,
10183 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10184 negative exponent. */
10185 if (real_zerop (init)
10186 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
10187 return true;
10189 *nonzero = true;
10190 return false;
10192 case FIXED_CST:
10193 if (fixed_zerop (init))
10194 return true;
10196 *nonzero = true;
10197 return false;
10199 case COMPLEX_CST:
10200 if (integer_zerop (init)
10201 || (real_zerop (init)
10202 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10203 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
10204 return true;
10206 *nonzero = true;
10207 return false;
10209 case VECTOR_CST:
10210 if (VECTOR_CST_NPATTERNS (init) == 1
10211 && VECTOR_CST_DUPLICATE_P (init)
10212 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
10213 return true;
10215 *nonzero = true;
10216 return false;
10218 case CONSTRUCTOR:
10220 if (TREE_CLOBBER_P (init))
10221 return false;
10223 unsigned HOST_WIDE_INT idx;
10224 tree elt;
10226 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10227 if (!initializer_zerop (elt, nonzero))
10228 return false;
10230 return true;
10233 case MEM_REF:
10235 tree arg = TREE_OPERAND (init, 0);
10236 if (TREE_CODE (arg) != ADDR_EXPR)
10237 return false;
10238 tree offset = TREE_OPERAND (init, 1);
10239 if (TREE_CODE (offset) != INTEGER_CST
10240 || !tree_fits_uhwi_p (offset))
10241 return false;
10242 off = tree_to_uhwi (offset);
10243 if (INT_MAX < off)
10244 return false;
10245 arg = TREE_OPERAND (arg, 0);
10246 if (TREE_CODE (arg) != STRING_CST)
10247 return false;
10248 init = arg;
10250 /* Fall through. */
10252 case STRING_CST:
10254 gcc_assert (off <= INT_MAX);
10256 int i = off;
10257 int n = TREE_STRING_LENGTH (init);
10258 if (n <= i)
10259 return false;
10261 /* We need to loop through all elements to handle cases like
10262 "\0" and "\0foobar". */
10263 for (i = 0; i < n; ++i)
10264 if (TREE_STRING_POINTER (init)[i] != '\0')
10266 *nonzero = true;
10267 return false;
10270 return true;
10273 default:
10274 return false;
10278 /* Return true if EXPR is an initializer expression in which every element
10279 is a constant that is numerically equal to 0 or 1. The elements do not
10280 need to be equal to each other. */
10282 bool
10283 initializer_each_zero_or_onep (const_tree expr)
10285 STRIP_ANY_LOCATION_WRAPPER (expr);
10287 switch (TREE_CODE (expr))
10289 case INTEGER_CST:
10290 return integer_zerop (expr) || integer_onep (expr);
10292 case REAL_CST:
10293 return real_zerop (expr) || real_onep (expr);
10295 case VECTOR_CST:
10297 unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (expr);
10298 if (VECTOR_CST_STEPPED_P (expr)
10299 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)).is_constant (&nelts))
10300 return false;
10302 for (unsigned int i = 0; i < nelts; ++i)
10304 tree elt = vector_cst_elt (expr, i);
10305 if (!initializer_each_zero_or_onep (elt))
10306 return false;
10309 return true;
10312 default:
10313 return false;
10317 /* Check if vector VEC consists of all the equal elements and
10318 that the number of elements corresponds to the type of VEC.
10319 The function returns first element of the vector
10320 or NULL_TREE if the vector is not uniform. */
10321 tree
10322 uniform_vector_p (const_tree vec)
10324 tree first, t;
10325 unsigned HOST_WIDE_INT i, nelts;
10327 if (vec == NULL_TREE)
10328 return NULL_TREE;
10330 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10332 if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
10333 return TREE_OPERAND (vec, 0);
10335 else if (TREE_CODE (vec) == VECTOR_CST)
10337 if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
10338 return VECTOR_CST_ENCODED_ELT (vec, 0);
10339 return NULL_TREE;
10342 else if (TREE_CODE (vec) == CONSTRUCTOR
10343 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
10345 first = error_mark_node;
10347 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10349 if (i == 0)
10351 first = t;
10352 continue;
10354 if (!operand_equal_p (first, t, 0))
10355 return NULL_TREE;
10357 if (i != nelts)
10358 return NULL_TREE;
10360 if (TREE_CODE (first) == CONSTRUCTOR || TREE_CODE (first) == VECTOR_CST)
10361 return uniform_vector_p (first);
10362 return first;
10365 return NULL_TREE;
10368 /* If the argument is INTEGER_CST, return it. If the argument is vector
10369 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
10370 return NULL_TREE.
10371 Look through location wrappers. */
10373 tree
10374 uniform_integer_cst_p (tree t)
10376 STRIP_ANY_LOCATION_WRAPPER (t);
10378 if (TREE_CODE (t) == INTEGER_CST)
10379 return t;
10381 if (VECTOR_TYPE_P (TREE_TYPE (t)))
10383 t = uniform_vector_p (t);
10384 if (t && TREE_CODE (t) == INTEGER_CST)
10385 return t;
10388 return NULL_TREE;
10391 /* Checks to see if T is a constant or a constant vector and if each element E
10392 adheres to ~E + 1 == pow2 then return ~E otherwise NULL_TREE. */
10394 tree
10395 bitmask_inv_cst_vector_p (tree t)
10398 tree_code code = TREE_CODE (t);
10399 tree type = TREE_TYPE (t);
10401 if (!INTEGRAL_TYPE_P (type)
10402 && !VECTOR_INTEGER_TYPE_P (type))
10403 return NULL_TREE;
10405 unsigned HOST_WIDE_INT nelts = 1;
10406 tree cst;
10407 unsigned int idx = 0;
10408 bool uniform = uniform_integer_cst_p (t);
10409 tree newtype = unsigned_type_for (type);
10410 tree_vector_builder builder;
10411 if (code == INTEGER_CST)
10412 cst = t;
10413 else
10415 if (!VECTOR_CST_NELTS (t).is_constant (&nelts))
10416 return NULL_TREE;
10418 cst = vector_cst_elt (t, 0);
10419 builder.new_vector (newtype, nelts, 1);
10422 tree ty = unsigned_type_for (TREE_TYPE (cst));
10426 if (idx > 0)
10427 cst = vector_cst_elt (t, idx);
10428 wide_int icst = wi::to_wide (cst);
10429 wide_int inv = wi::bit_not (icst);
10430 icst = wi::add (1, inv);
10431 if (wi::popcount (icst) != 1)
10432 return NULL_TREE;
10434 tree newcst = wide_int_to_tree (ty, inv);
10436 if (uniform)
10437 return build_uniform_cst (newtype, newcst);
10439 builder.quick_push (newcst);
10441 while (++idx < nelts);
10443 return builder.build ();
10446 /* If VECTOR_CST T has a single nonzero element, return the index of that
10447 element, otherwise return -1. */
10450 single_nonzero_element (const_tree t)
10452 unsigned HOST_WIDE_INT nelts;
10453 unsigned int repeat_nelts;
10454 if (VECTOR_CST_NELTS (t).is_constant (&nelts))
10455 repeat_nelts = nelts;
10456 else if (VECTOR_CST_NELTS_PER_PATTERN (t) == 2)
10458 nelts = vector_cst_encoded_nelts (t);
10459 repeat_nelts = VECTOR_CST_NPATTERNS (t);
10461 else
10462 return -1;
10464 int res = -1;
10465 for (unsigned int i = 0; i < nelts; ++i)
10467 tree elt = vector_cst_elt (t, i);
10468 if (!integer_zerop (elt) && !real_zerop (elt))
10470 if (res >= 0 || i >= repeat_nelts)
10471 return -1;
10472 res = i;
10475 return res;
10478 /* Build an empty statement at location LOC. */
10480 tree
10481 build_empty_stmt (location_t loc)
10483 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10484 SET_EXPR_LOCATION (t, loc);
10485 return t;
10489 /* Build an OMP clause with code CODE. LOC is the location of the
10490 clause. */
10492 tree
10493 build_omp_clause (location_t loc, enum omp_clause_code code)
10495 tree t;
10496 int size, length;
10498 length = omp_clause_num_ops[code];
10499 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10501 record_node_allocation_statistics (OMP_CLAUSE, size);
10503 t = (tree) ggc_internal_alloc (size);
10504 memset (t, 0, size);
10505 TREE_SET_CODE (t, OMP_CLAUSE);
10506 OMP_CLAUSE_SET_CODE (t, code);
10507 OMP_CLAUSE_LOCATION (t) = loc;
10509 return t;
10512 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10513 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10514 Except for the CODE and operand count field, other storage for the
10515 object is initialized to zeros. */
10517 tree
10518 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
10520 tree t;
10521 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10523 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10524 gcc_assert (len >= 1);
10526 record_node_allocation_statistics (code, length);
10528 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10530 TREE_SET_CODE (t, code);
10532 /* Can't use TREE_OPERAND to store the length because if checking is
10533 enabled, it will try to check the length before we store it. :-P */
10534 t->exp.operands[0] = build_int_cst (sizetype, len);
10536 return t;
10539 /* Helper function for build_call_* functions; build a CALL_EXPR with
10540 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10541 the argument slots. */
10543 static tree
10544 build_call_1 (tree return_type, tree fn, int nargs)
10546 tree t;
10548 t = build_vl_exp (CALL_EXPR, nargs + 3);
10549 TREE_TYPE (t) = return_type;
10550 CALL_EXPR_FN (t) = fn;
10551 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10553 return t;
10556 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10557 FN and a null static chain slot. NARGS is the number of call arguments
10558 which are specified as "..." arguments. */
10560 tree
10561 build_call_nary (tree return_type, tree fn, int nargs, ...)
10563 tree ret;
10564 va_list args;
10565 va_start (args, nargs);
10566 ret = build_call_valist (return_type, fn, nargs, args);
10567 va_end (args);
10568 return ret;
10571 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10572 FN and a null static chain slot. NARGS is the number of call arguments
10573 which are specified as a va_list ARGS. */
10575 tree
10576 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10578 tree t;
10579 int i;
10581 t = build_call_1 (return_type, fn, nargs);
10582 for (i = 0; i < nargs; i++)
10583 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10584 process_call_operands (t);
10585 return t;
10588 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10589 FN and a null static chain slot. NARGS is the number of call arguments
10590 which are specified as a tree array ARGS. */
10592 tree
10593 build_call_array_loc (location_t loc, tree return_type, tree fn,
10594 int nargs, const tree *args)
10596 tree t;
10597 int i;
10599 t = build_call_1 (return_type, fn, nargs);
10600 for (i = 0; i < nargs; i++)
10601 CALL_EXPR_ARG (t, i) = args[i];
10602 process_call_operands (t);
10603 SET_EXPR_LOCATION (t, loc);
10604 return t;
10607 /* Like build_call_array, but takes a vec. */
10609 tree
10610 build_call_vec (tree return_type, tree fn, const vec<tree, va_gc> *args)
10612 tree ret, t;
10613 unsigned int ix;
10615 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10616 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10617 CALL_EXPR_ARG (ret, ix) = t;
10618 process_call_operands (ret);
10619 return ret;
10622 /* Conveniently construct a function call expression. FNDECL names the
10623 function to be called and N arguments are passed in the array
10624 ARGARRAY. */
10626 tree
10627 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10629 tree fntype = TREE_TYPE (fndecl);
10630 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10632 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10635 /* Conveniently construct a function call expression. FNDECL names the
10636 function to be called and the arguments are passed in the vector
10637 VEC. */
10639 tree
10640 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10642 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10643 vec_safe_address (vec));
10647 /* Conveniently construct a function call expression. FNDECL names the
10648 function to be called, N is the number of arguments, and the "..."
10649 parameters are the argument expressions. */
10651 tree
10652 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10654 va_list ap;
10655 tree *argarray = XALLOCAVEC (tree, n);
10656 int i;
10658 va_start (ap, n);
10659 for (i = 0; i < n; i++)
10660 argarray[i] = va_arg (ap, tree);
10661 va_end (ap);
10662 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10665 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10666 varargs macros aren't supported by all bootstrap compilers. */
10668 tree
10669 build_call_expr (tree fndecl, int n, ...)
10671 va_list ap;
10672 tree *argarray = XALLOCAVEC (tree, n);
10673 int i;
10675 va_start (ap, n);
10676 for (i = 0; i < n; i++)
10677 argarray[i] = va_arg (ap, tree);
10678 va_end (ap);
10679 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10682 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10683 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10684 It will get gimplified later into an ordinary internal function. */
10686 tree
10687 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
10688 tree type, int n, const tree *args)
10690 tree t = build_call_1 (type, NULL_TREE, n);
10691 for (int i = 0; i < n; ++i)
10692 CALL_EXPR_ARG (t, i) = args[i];
10693 SET_EXPR_LOCATION (t, loc);
10694 CALL_EXPR_IFN (t) = ifn;
10695 process_call_operands (t);
10696 return t;
10699 /* Build internal call expression. This is just like CALL_EXPR, except
10700 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10701 internal function. */
10703 tree
10704 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10705 tree type, int n, ...)
10707 va_list ap;
10708 tree *argarray = XALLOCAVEC (tree, n);
10709 int i;
10711 va_start (ap, n);
10712 for (i = 0; i < n; i++)
10713 argarray[i] = va_arg (ap, tree);
10714 va_end (ap);
10715 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10718 /* Return a function call to FN, if the target is guaranteed to support it,
10719 or null otherwise.
10721 N is the number of arguments, passed in the "...", and TYPE is the
10722 type of the return value. */
10724 tree
10725 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
10726 int n, ...)
10728 va_list ap;
10729 tree *argarray = XALLOCAVEC (tree, n);
10730 int i;
10732 va_start (ap, n);
10733 for (i = 0; i < n; i++)
10734 argarray[i] = va_arg (ap, tree);
10735 va_end (ap);
10736 if (internal_fn_p (fn))
10738 internal_fn ifn = as_internal_fn (fn);
10739 if (direct_internal_fn_p (ifn))
10741 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
10742 if (!direct_internal_fn_supported_p (ifn, types,
10743 OPTIMIZE_FOR_BOTH))
10744 return NULL_TREE;
10746 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10748 else
10750 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
10751 if (!fndecl)
10752 return NULL_TREE;
10753 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10757 /* Return a function call to the appropriate builtin alloca variant.
10759 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
10760 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
10761 bound for SIZE in case it is not a fixed value. */
10763 tree
10764 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
10766 if (max_size >= 0)
10768 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
10769 return
10770 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
10772 else if (align > 0)
10774 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10775 return build_call_expr (t, 2, size, size_int (align));
10777 else
10779 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
10780 return build_call_expr (t, 1, size);
10784 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
10785 if SIZE == -1) and return a tree node representing char* pointer to
10786 it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). When STR is nonnull
10787 the STRING_CST value is the LEN bytes at STR (the representation
10788 of the string, which may be wide). Otherwise it's all zeros. */
10790 tree
10791 build_string_literal (unsigned len, const char *str /* = NULL */,
10792 tree eltype /* = char_type_node */,
10793 unsigned HOST_WIDE_INT size /* = -1 */)
10795 tree t = build_string (len, str);
10796 /* Set the maximum valid index based on the string length or SIZE. */
10797 unsigned HOST_WIDE_INT maxidx
10798 = (size == HOST_WIDE_INT_M1U ? len : size) - 1;
10800 tree index = build_index_type (size_int (maxidx));
10801 eltype = build_type_variant (eltype, 1, 0);
10802 tree type = build_array_type (eltype, index);
10803 TREE_TYPE (t) = type;
10804 TREE_CONSTANT (t) = 1;
10805 TREE_READONLY (t) = 1;
10806 TREE_STATIC (t) = 1;
10808 type = build_pointer_type (eltype);
10809 t = build1 (ADDR_EXPR, type,
10810 build4 (ARRAY_REF, eltype,
10811 t, integer_zero_node, NULL_TREE, NULL_TREE));
10812 return t;
10817 /* Return true if T (assumed to be a DECL) must be assigned a memory
10818 location. */
10820 bool
10821 needs_to_live_in_memory (const_tree t)
10823 return (TREE_ADDRESSABLE (t)
10824 || is_global_var (t)
10825 || (TREE_CODE (t) == RESULT_DECL
10826 && !DECL_BY_REFERENCE (t)
10827 && aggregate_value_p (t, current_function_decl)));
10830 /* Return value of a constant X and sign-extend it. */
10832 HOST_WIDE_INT
10833 int_cst_value (const_tree x)
10835 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10836 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10838 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10839 gcc_assert (cst_and_fits_in_hwi (x));
10841 if (bits < HOST_BITS_PER_WIDE_INT)
10843 bool negative = ((val >> (bits - 1)) & 1) != 0;
10844 if (negative)
10845 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
10846 else
10847 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
10850 return val;
10853 /* If TYPE is an integral or pointer type, return an integer type with
10854 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10855 if TYPE is already an integer type of signedness UNSIGNEDP.
10856 If TYPE is a floating-point type, return an integer type with the same
10857 bitsize and with the signedness given by UNSIGNEDP; this is useful
10858 when doing bit-level operations on a floating-point value. */
10860 tree
10861 signed_or_unsigned_type_for (int unsignedp, tree type)
10863 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
10864 return type;
10866 if (TREE_CODE (type) == VECTOR_TYPE)
10868 tree inner = TREE_TYPE (type);
10869 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10870 if (!inner2)
10871 return NULL_TREE;
10872 if (inner == inner2)
10873 return type;
10874 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10877 if (TREE_CODE (type) == COMPLEX_TYPE)
10879 tree inner = TREE_TYPE (type);
10880 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10881 if (!inner2)
10882 return NULL_TREE;
10883 if (inner == inner2)
10884 return type;
10885 return build_complex_type (inner2);
10888 unsigned int bits;
10889 if (INTEGRAL_TYPE_P (type)
10890 || POINTER_TYPE_P (type)
10891 || TREE_CODE (type) == OFFSET_TYPE)
10892 bits = TYPE_PRECISION (type);
10893 else if (TREE_CODE (type) == REAL_TYPE)
10894 bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type));
10895 else
10896 return NULL_TREE;
10898 return build_nonstandard_integer_type (bits, unsignedp);
10901 /* If TYPE is an integral or pointer type, return an integer type with
10902 the same precision which is unsigned, or itself if TYPE is already an
10903 unsigned integer type. If TYPE is a floating-point type, return an
10904 unsigned integer type with the same bitsize as TYPE. */
10906 tree
10907 unsigned_type_for (tree type)
10909 return signed_or_unsigned_type_for (1, type);
10912 /* If TYPE is an integral or pointer type, return an integer type with
10913 the same precision which is signed, or itself if TYPE is already a
10914 signed integer type. If TYPE is a floating-point type, return a
10915 signed integer type with the same bitsize as TYPE. */
10917 tree
10918 signed_type_for (tree type)
10920 return signed_or_unsigned_type_for (0, type);
10923 /* - For VECTOR_TYPEs:
10924 - The truth type must be a VECTOR_BOOLEAN_TYPE.
10925 - The number of elements must match (known_eq).
10926 - targetm.vectorize.get_mask_mode exists, and exactly
10927 the same mode as the truth type.
10928 - Otherwise, the truth type must be a BOOLEAN_TYPE
10929 or useless_type_conversion_p to BOOLEAN_TYPE. */
10930 bool
10931 is_truth_type_for (tree type, tree truth_type)
10933 machine_mode mask_mode = TYPE_MODE (truth_type);
10934 machine_mode vmode = TYPE_MODE (type);
10935 machine_mode tmask_mode;
10937 if (TREE_CODE (type) == VECTOR_TYPE)
10939 if (VECTOR_BOOLEAN_TYPE_P (truth_type)
10940 && known_eq (TYPE_VECTOR_SUBPARTS (type),
10941 TYPE_VECTOR_SUBPARTS (truth_type))
10942 && targetm.vectorize.get_mask_mode (vmode).exists (&tmask_mode)
10943 && tmask_mode == mask_mode)
10944 return true;
10946 return false;
10949 return useless_type_conversion_p (boolean_type_node, truth_type);
10952 /* If TYPE is a vector type, return a signed integer vector type with the
10953 same width and number of subparts. Otherwise return boolean_type_node. */
10955 tree
10956 truth_type_for (tree type)
10958 if (TREE_CODE (type) == VECTOR_TYPE)
10960 if (VECTOR_BOOLEAN_TYPE_P (type))
10961 return type;
10962 return build_truth_vector_type_for (type);
10964 else
10965 return boolean_type_node;
10968 /* Returns the largest value obtainable by casting something in INNER type to
10969 OUTER type. */
10971 tree
10972 upper_bound_in_type (tree outer, tree inner)
10974 unsigned int det = 0;
10975 unsigned oprec = TYPE_PRECISION (outer);
10976 unsigned iprec = TYPE_PRECISION (inner);
10977 unsigned prec;
10979 /* Compute a unique number for every combination. */
10980 det |= (oprec > iprec) ? 4 : 0;
10981 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10982 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10984 /* Determine the exponent to use. */
10985 switch (det)
10987 case 0:
10988 case 1:
10989 /* oprec <= iprec, outer: signed, inner: don't care. */
10990 prec = oprec - 1;
10991 break;
10992 case 2:
10993 case 3:
10994 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10995 prec = oprec;
10996 break;
10997 case 4:
10998 /* oprec > iprec, outer: signed, inner: signed. */
10999 prec = iprec - 1;
11000 break;
11001 case 5:
11002 /* oprec > iprec, outer: signed, inner: unsigned. */
11003 prec = iprec;
11004 break;
11005 case 6:
11006 /* oprec > iprec, outer: unsigned, inner: signed. */
11007 prec = oprec;
11008 break;
11009 case 7:
11010 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11011 prec = iprec;
11012 break;
11013 default:
11014 gcc_unreachable ();
11017 return wide_int_to_tree (outer,
11018 wi::mask (prec, false, TYPE_PRECISION (outer)));
11021 /* Returns the smallest value obtainable by casting something in INNER type to
11022 OUTER type. */
11024 tree
11025 lower_bound_in_type (tree outer, tree inner)
11027 unsigned oprec = TYPE_PRECISION (outer);
11028 unsigned iprec = TYPE_PRECISION (inner);
11030 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11031 and obtain 0. */
11032 if (TYPE_UNSIGNED (outer)
11033 /* If we are widening something of an unsigned type, OUTER type
11034 contains all values of INNER type. In particular, both INNER
11035 and OUTER types have zero in common. */
11036 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11037 return build_int_cst (outer, 0);
11038 else
11040 /* If we are widening a signed type to another signed type, we
11041 want to obtain -2^^(iprec-1). If we are keeping the
11042 precision or narrowing to a signed type, we want to obtain
11043 -2^(oprec-1). */
11044 unsigned prec = oprec > iprec ? iprec : oprec;
11045 return wide_int_to_tree (outer,
11046 wi::mask (prec - 1, true,
11047 TYPE_PRECISION (outer)));
11051 /* Return nonzero if two operands that are suitable for PHI nodes are
11052 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11053 SSA_NAME or invariant. Note that this is strictly an optimization.
11054 That is, callers of this function can directly call operand_equal_p
11055 and get the same result, only slower. */
11058 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11060 if (arg0 == arg1)
11061 return 1;
11062 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11063 return 0;
11064 return operand_equal_p (arg0, arg1, 0);
11067 /* Returns number of zeros at the end of binary representation of X. */
11069 tree
11070 num_ending_zeros (const_tree x)
11072 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
11076 #define WALK_SUBTREE(NODE) \
11077 do \
11079 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11080 if (result) \
11081 return result; \
11083 while (0)
11085 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11086 be walked whenever a type is seen in the tree. Rest of operands and return
11087 value are as for walk_tree. */
11089 static tree
11090 walk_type_fields (tree type, walk_tree_fn func, void *data,
11091 hash_set<tree> *pset, walk_tree_lh lh)
11093 tree result = NULL_TREE;
11095 switch (TREE_CODE (type))
11097 case POINTER_TYPE:
11098 case REFERENCE_TYPE:
11099 case VECTOR_TYPE:
11100 /* We have to worry about mutually recursive pointers. These can't
11101 be written in C. They can in Ada. It's pathological, but
11102 there's an ACATS test (c38102a) that checks it. Deal with this
11103 by checking if we're pointing to another pointer, that one
11104 points to another pointer, that one does too, and we have no htab.
11105 If so, get a hash table. We check three levels deep to avoid
11106 the cost of the hash table if we don't need one. */
11107 if (POINTER_TYPE_P (TREE_TYPE (type))
11108 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11109 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11110 && !pset)
11112 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11113 func, data);
11114 if (result)
11115 return result;
11117 break;
11120 /* fall through */
11122 case COMPLEX_TYPE:
11123 WALK_SUBTREE (TREE_TYPE (type));
11124 break;
11126 case METHOD_TYPE:
11127 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11129 /* Fall through. */
11131 case FUNCTION_TYPE:
11132 WALK_SUBTREE (TREE_TYPE (type));
11134 tree arg;
11136 /* We never want to walk into default arguments. */
11137 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11138 WALK_SUBTREE (TREE_VALUE (arg));
11140 break;
11142 case ARRAY_TYPE:
11143 /* Don't follow this nodes's type if a pointer for fear that
11144 we'll have infinite recursion. If we have a PSET, then we
11145 need not fear. */
11146 if (pset
11147 || (!POINTER_TYPE_P (TREE_TYPE (type))
11148 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11149 WALK_SUBTREE (TREE_TYPE (type));
11150 WALK_SUBTREE (TYPE_DOMAIN (type));
11151 break;
11153 case OFFSET_TYPE:
11154 WALK_SUBTREE (TREE_TYPE (type));
11155 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11156 break;
11158 default:
11159 break;
11162 return NULL_TREE;
11165 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11166 called with the DATA and the address of each sub-tree. If FUNC returns a
11167 non-NULL value, the traversal is stopped, and the value returned by FUNC
11168 is returned. If PSET is non-NULL it is used to record the nodes visited,
11169 and to avoid visiting a node more than once. */
11171 tree
11172 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11173 hash_set<tree> *pset, walk_tree_lh lh)
11175 enum tree_code code;
11176 int walk_subtrees;
11177 tree result;
11179 #define WALK_SUBTREE_TAIL(NODE) \
11180 do \
11182 tp = & (NODE); \
11183 goto tail_recurse; \
11185 while (0)
11187 tail_recurse:
11188 /* Skip empty subtrees. */
11189 if (!*tp)
11190 return NULL_TREE;
11192 /* Don't walk the same tree twice, if the user has requested
11193 that we avoid doing so. */
11194 if (pset && pset->add (*tp))
11195 return NULL_TREE;
11197 /* Call the function. */
11198 walk_subtrees = 1;
11199 result = (*func) (tp, &walk_subtrees, data);
11201 /* If we found something, return it. */
11202 if (result)
11203 return result;
11205 code = TREE_CODE (*tp);
11207 /* Even if we didn't, FUNC may have decided that there was nothing
11208 interesting below this point in the tree. */
11209 if (!walk_subtrees)
11211 /* But we still need to check our siblings. */
11212 if (code == TREE_LIST)
11213 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11214 else if (code == OMP_CLAUSE)
11215 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11216 else
11217 return NULL_TREE;
11220 if (lh)
11222 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11223 if (result || !walk_subtrees)
11224 return result;
11227 switch (code)
11229 case ERROR_MARK:
11230 case IDENTIFIER_NODE:
11231 case INTEGER_CST:
11232 case REAL_CST:
11233 case FIXED_CST:
11234 case STRING_CST:
11235 case BLOCK:
11236 case PLACEHOLDER_EXPR:
11237 case SSA_NAME:
11238 case FIELD_DECL:
11239 case RESULT_DECL:
11240 /* None of these have subtrees other than those already walked
11241 above. */
11242 break;
11244 case TREE_LIST:
11245 WALK_SUBTREE (TREE_VALUE (*tp));
11246 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11248 case TREE_VEC:
11250 int len = TREE_VEC_LENGTH (*tp);
11252 if (len == 0)
11253 break;
11255 /* Walk all elements but the first. */
11256 while (--len)
11257 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11259 /* Now walk the first one as a tail call. */
11260 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11263 case VECTOR_CST:
11265 unsigned len = vector_cst_encoded_nelts (*tp);
11266 if (len == 0)
11267 break;
11268 /* Walk all elements but the first. */
11269 while (--len)
11270 WALK_SUBTREE (VECTOR_CST_ENCODED_ELT (*tp, len));
11271 /* Now walk the first one as a tail call. */
11272 WALK_SUBTREE_TAIL (VECTOR_CST_ENCODED_ELT (*tp, 0));
11275 case COMPLEX_CST:
11276 WALK_SUBTREE (TREE_REALPART (*tp));
11277 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11279 case CONSTRUCTOR:
11281 unsigned HOST_WIDE_INT idx;
11282 constructor_elt *ce;
11284 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11285 idx++)
11286 WALK_SUBTREE (ce->value);
11288 break;
11290 case SAVE_EXPR:
11291 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11293 case BIND_EXPR:
11295 tree decl;
11296 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11298 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11299 into declarations that are just mentioned, rather than
11300 declared; they don't really belong to this part of the tree.
11301 And, we can see cycles: the initializer for a declaration
11302 can refer to the declaration itself. */
11303 WALK_SUBTREE (DECL_INITIAL (decl));
11304 WALK_SUBTREE (DECL_SIZE (decl));
11305 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11307 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11310 case STATEMENT_LIST:
11312 tree_stmt_iterator i;
11313 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11314 WALK_SUBTREE (*tsi_stmt_ptr (i));
11316 break;
11318 case OMP_CLAUSE:
11320 int len = omp_clause_num_ops[OMP_CLAUSE_CODE (*tp)];
11321 for (int i = 0; i < len; i++)
11322 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11323 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11326 case TARGET_EXPR:
11328 int i, len;
11330 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11331 But, we only want to walk once. */
11332 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11333 for (i = 0; i < len; ++i)
11334 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11335 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11338 case DECL_EXPR:
11339 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11340 defining. We only want to walk into these fields of a type in this
11341 case and not in the general case of a mere reference to the type.
11343 The criterion is as follows: if the field can be an expression, it
11344 must be walked only here. This should be in keeping with the fields
11345 that are directly gimplified in gimplify_type_sizes in order for the
11346 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11347 variable-sized types.
11349 Note that DECLs get walked as part of processing the BIND_EXPR. */
11350 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11352 /* Call the function for the decl so e.g. copy_tree_body_r can
11353 replace it with the remapped one. */
11354 result = (*func) (&DECL_EXPR_DECL (*tp), &walk_subtrees, data);
11355 if (result || !walk_subtrees)
11356 return result;
11358 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11359 if (TREE_CODE (*type_p) == ERROR_MARK)
11360 return NULL_TREE;
11362 /* Call the function for the type. See if it returns anything or
11363 doesn't want us to continue. If we are to continue, walk both
11364 the normal fields and those for the declaration case. */
11365 result = (*func) (type_p, &walk_subtrees, data);
11366 if (result || !walk_subtrees)
11367 return result;
11369 /* But do not walk a pointed-to type since it may itself need to
11370 be walked in the declaration case if it isn't anonymous. */
11371 if (!POINTER_TYPE_P (*type_p))
11373 result = walk_type_fields (*type_p, func, data, pset, lh);
11374 if (result)
11375 return result;
11378 /* If this is a record type, also walk the fields. */
11379 if (RECORD_OR_UNION_TYPE_P (*type_p))
11381 tree field;
11383 for (field = TYPE_FIELDS (*type_p); field;
11384 field = DECL_CHAIN (field))
11386 /* We'd like to look at the type of the field, but we can
11387 easily get infinite recursion. So assume it's pointed
11388 to elsewhere in the tree. Also, ignore things that
11389 aren't fields. */
11390 if (TREE_CODE (field) != FIELD_DECL)
11391 continue;
11393 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11394 WALK_SUBTREE (DECL_SIZE (field));
11395 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11396 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11397 WALK_SUBTREE (DECL_QUALIFIER (field));
11401 /* Same for scalar types. */
11402 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11403 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11404 || TREE_CODE (*type_p) == INTEGER_TYPE
11405 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11406 || TREE_CODE (*type_p) == REAL_TYPE)
11408 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11409 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11412 WALK_SUBTREE (TYPE_SIZE (*type_p));
11413 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11415 /* FALLTHRU */
11417 default:
11418 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11420 int i, len;
11422 /* Walk over all the sub-trees of this operand. */
11423 len = TREE_OPERAND_LENGTH (*tp);
11425 /* Go through the subtrees. We need to do this in forward order so
11426 that the scope of a FOR_EXPR is handled properly. */
11427 if (len)
11429 for (i = 0; i < len - 1; ++i)
11430 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11431 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11434 /* If this is a type, walk the needed fields in the type. */
11435 else if (TYPE_P (*tp))
11436 return walk_type_fields (*tp, func, data, pset, lh);
11437 break;
11440 /* We didn't find what we were looking for. */
11441 return NULL_TREE;
11443 #undef WALK_SUBTREE_TAIL
11445 #undef WALK_SUBTREE
11447 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11449 tree
11450 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11451 walk_tree_lh lh)
11453 tree result;
11455 hash_set<tree> pset;
11456 result = walk_tree_1 (tp, func, data, &pset, lh);
11457 return result;
11461 tree
11462 tree_block (tree t)
11464 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11466 if (IS_EXPR_CODE_CLASS (c))
11467 return LOCATION_BLOCK (t->exp.locus);
11468 gcc_unreachable ();
11469 return NULL;
11472 void
11473 tree_set_block (tree t, tree b)
11475 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11477 if (IS_EXPR_CODE_CLASS (c))
11479 t->exp.locus = set_block (t->exp.locus, b);
11481 else
11482 gcc_unreachable ();
11485 /* Create a nameless artificial label and put it in the current
11486 function context. The label has a location of LOC. Returns the
11487 newly created label. */
11489 tree
11490 create_artificial_label (location_t loc)
11492 tree lab = build_decl (loc,
11493 LABEL_DECL, NULL_TREE, void_type_node);
11495 DECL_ARTIFICIAL (lab) = 1;
11496 DECL_IGNORED_P (lab) = 1;
11497 DECL_CONTEXT (lab) = current_function_decl;
11498 return lab;
11501 /* Given a tree, try to return a useful variable name that we can use
11502 to prefix a temporary that is being assigned the value of the tree.
11503 I.E. given <temp> = &A, return A. */
11505 const char *
11506 get_name (tree t)
11508 tree stripped_decl;
11510 stripped_decl = t;
11511 STRIP_NOPS (stripped_decl);
11512 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11513 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11514 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11516 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11517 if (!name)
11518 return NULL;
11519 return IDENTIFIER_POINTER (name);
11521 else
11523 switch (TREE_CODE (stripped_decl))
11525 case ADDR_EXPR:
11526 return get_name (TREE_OPERAND (stripped_decl, 0));
11527 default:
11528 return NULL;
11533 /* Return true if TYPE has a variable argument list. */
11535 bool
11536 stdarg_p (const_tree fntype)
11538 function_args_iterator args_iter;
11539 tree n = NULL_TREE, t;
11541 if (!fntype)
11542 return false;
11544 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11546 n = t;
11549 return n != NULL_TREE && n != void_type_node;
11552 /* Return true if TYPE has a prototype. */
11554 bool
11555 prototype_p (const_tree fntype)
11557 tree t;
11559 gcc_assert (fntype != NULL_TREE);
11561 t = TYPE_ARG_TYPES (fntype);
11562 return (t != NULL_TREE);
11565 /* If BLOCK is inlined from an __attribute__((__artificial__))
11566 routine, return pointer to location from where it has been
11567 called. */
11568 location_t *
11569 block_nonartificial_location (tree block)
11571 location_t *ret = NULL;
11573 while (block && TREE_CODE (block) == BLOCK
11574 && BLOCK_ABSTRACT_ORIGIN (block))
11576 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11577 if (TREE_CODE (ao) == FUNCTION_DECL)
11579 /* If AO is an artificial inline, point RET to the
11580 call site locus at which it has been inlined and continue
11581 the loop, in case AO's caller is also an artificial
11582 inline. */
11583 if (DECL_DECLARED_INLINE_P (ao)
11584 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11585 ret = &BLOCK_SOURCE_LOCATION (block);
11586 else
11587 break;
11589 else if (TREE_CODE (ao) != BLOCK)
11590 break;
11592 block = BLOCK_SUPERCONTEXT (block);
11594 return ret;
11598 /* If EXP is inlined from an __attribute__((__artificial__))
11599 function, return the location of the original call expression. */
11601 location_t
11602 tree_nonartificial_location (tree exp)
11604 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11606 if (loc)
11607 return *loc;
11608 else
11609 return EXPR_LOCATION (exp);
11612 /* Return the location into which EXP has been inlined. Analogous
11613 to tree_nonartificial_location() above but not limited to artificial
11614 functions declared inline. If SYSTEM_HEADER is true, return
11615 the macro expansion point of the location if it's in a system header */
11617 location_t
11618 tree_inlined_location (tree exp, bool system_header /* = true */)
11620 location_t loc = UNKNOWN_LOCATION;
11622 tree block = TREE_BLOCK (exp);
11624 while (block && TREE_CODE (block) == BLOCK
11625 && BLOCK_ABSTRACT_ORIGIN (block))
11627 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11628 if (TREE_CODE (ao) == FUNCTION_DECL)
11629 loc = BLOCK_SOURCE_LOCATION (block);
11630 else if (TREE_CODE (ao) != BLOCK)
11631 break;
11633 block = BLOCK_SUPERCONTEXT (block);
11636 if (loc == UNKNOWN_LOCATION)
11638 loc = EXPR_LOCATION (exp);
11639 if (system_header)
11640 /* Only consider macro expansion when the block traversal failed
11641 to find a location. Otherwise it's not relevant. */
11642 return expansion_point_location_if_in_system_header (loc);
11645 return loc;
11648 /* These are the hash table functions for the hash table of OPTIMIZATION_NODE
11649 nodes. */
11651 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11653 hashval_t
11654 cl_option_hasher::hash (tree x)
11656 const_tree const t = x;
11658 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11659 return cl_optimization_hash (TREE_OPTIMIZATION (t));
11660 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11661 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11662 else
11663 gcc_unreachable ();
11666 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11667 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11668 same. */
11670 bool
11671 cl_option_hasher::equal (tree x, tree y)
11673 const_tree const xt = x;
11674 const_tree const yt = y;
11676 if (TREE_CODE (xt) != TREE_CODE (yt))
11677 return 0;
11679 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11680 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
11681 TREE_OPTIMIZATION (yt));
11682 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11683 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11684 TREE_TARGET_OPTION (yt));
11685 else
11686 gcc_unreachable ();
11689 /* Build an OPTIMIZATION_NODE based on the options in OPTS and OPTS_SET. */
11691 tree
11692 build_optimization_node (struct gcc_options *opts,
11693 struct gcc_options *opts_set)
11695 tree t;
11697 /* Use the cache of optimization nodes. */
11699 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11700 opts, opts_set);
11702 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11703 t = *slot;
11704 if (!t)
11706 /* Insert this one into the hash table. */
11707 t = cl_optimization_node;
11708 *slot = t;
11710 /* Make a new node for next time round. */
11711 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11714 return t;
11717 /* Build a TARGET_OPTION_NODE based on the options in OPTS and OPTS_SET. */
11719 tree
11720 build_target_option_node (struct gcc_options *opts,
11721 struct gcc_options *opts_set)
11723 tree t;
11725 /* Use the cache of optimization nodes. */
11727 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11728 opts, opts_set);
11730 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11731 t = *slot;
11732 if (!t)
11734 /* Insert this one into the hash table. */
11735 t = cl_target_option_node;
11736 *slot = t;
11738 /* Make a new node for next time round. */
11739 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11742 return t;
11745 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11746 so that they aren't saved during PCH writing. */
11748 void
11749 prepare_target_option_nodes_for_pch (void)
11751 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11752 for (; iter != cl_option_hash_table->end (); ++iter)
11753 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11754 TREE_TARGET_GLOBALS (*iter) = NULL;
11757 /* Determine the "ultimate origin" of a block. */
11759 tree
11760 block_ultimate_origin (const_tree block)
11762 tree origin = BLOCK_ABSTRACT_ORIGIN (block);
11764 if (origin == NULL_TREE)
11765 return NULL_TREE;
11766 else
11768 gcc_checking_assert ((DECL_P (origin)
11769 && DECL_ORIGIN (origin) == origin)
11770 || BLOCK_ORIGIN (origin) == origin);
11771 return origin;
11775 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11776 no instruction. */
11778 bool
11779 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11781 /* Do not strip casts into or out of differing address spaces. */
11782 if (POINTER_TYPE_P (outer_type)
11783 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
11785 if (!POINTER_TYPE_P (inner_type)
11786 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
11787 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
11788 return false;
11790 else if (POINTER_TYPE_P (inner_type)
11791 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
11793 /* We already know that outer_type is not a pointer with
11794 a non-generic address space. */
11795 return false;
11798 /* Use precision rather then machine mode when we can, which gives
11799 the correct answer even for submode (bit-field) types. */
11800 if ((INTEGRAL_TYPE_P (outer_type)
11801 || POINTER_TYPE_P (outer_type)
11802 || TREE_CODE (outer_type) == OFFSET_TYPE)
11803 && (INTEGRAL_TYPE_P (inner_type)
11804 || POINTER_TYPE_P (inner_type)
11805 || TREE_CODE (inner_type) == OFFSET_TYPE))
11806 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11808 /* Otherwise fall back on comparing machine modes (e.g. for
11809 aggregate types, floats). */
11810 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11813 /* Return true iff conversion in EXP generates no instruction. Mark
11814 it inline so that we fully inline into the stripping functions even
11815 though we have two uses of this function. */
11817 static inline bool
11818 tree_nop_conversion (const_tree exp)
11820 tree outer_type, inner_type;
11822 if (location_wrapper_p (exp))
11823 return true;
11824 if (!CONVERT_EXPR_P (exp)
11825 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11826 return false;
11828 outer_type = TREE_TYPE (exp);
11829 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11830 if (!inner_type || inner_type == error_mark_node)
11831 return false;
11833 return tree_nop_conversion_p (outer_type, inner_type);
11836 /* Return true iff conversion in EXP generates no instruction. Don't
11837 consider conversions changing the signedness. */
11839 static bool
11840 tree_sign_nop_conversion (const_tree exp)
11842 tree outer_type, inner_type;
11844 if (!tree_nop_conversion (exp))
11845 return false;
11847 outer_type = TREE_TYPE (exp);
11848 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11850 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11851 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11854 /* Strip conversions from EXP according to tree_nop_conversion and
11855 return the resulting expression. */
11857 tree
11858 tree_strip_nop_conversions (tree exp)
11860 while (tree_nop_conversion (exp))
11861 exp = TREE_OPERAND (exp, 0);
11862 return exp;
11865 /* Strip conversions from EXP according to tree_sign_nop_conversion
11866 and return the resulting expression. */
11868 tree
11869 tree_strip_sign_nop_conversions (tree exp)
11871 while (tree_sign_nop_conversion (exp))
11872 exp = TREE_OPERAND (exp, 0);
11873 return exp;
11876 /* Avoid any floating point extensions from EXP. */
11877 tree
11878 strip_float_extensions (tree exp)
11880 tree sub, expt, subt;
11882 /* For floating point constant look up the narrowest type that can hold
11883 it properly and handle it like (type)(narrowest_type)constant.
11884 This way we can optimize for instance a=a*2.0 where "a" is float
11885 but 2.0 is double constant. */
11886 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11888 REAL_VALUE_TYPE orig;
11889 tree type = NULL;
11891 orig = TREE_REAL_CST (exp);
11892 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11893 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11894 type = float_type_node;
11895 else if (TYPE_PRECISION (TREE_TYPE (exp))
11896 > TYPE_PRECISION (double_type_node)
11897 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11898 type = double_type_node;
11899 if (type)
11900 return build_real_truncate (type, orig);
11903 if (!CONVERT_EXPR_P (exp))
11904 return exp;
11906 sub = TREE_OPERAND (exp, 0);
11907 subt = TREE_TYPE (sub);
11908 expt = TREE_TYPE (exp);
11910 if (!FLOAT_TYPE_P (subt))
11911 return exp;
11913 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11914 return exp;
11916 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11917 return exp;
11919 return strip_float_extensions (sub);
11922 /* Strip out all handled components that produce invariant
11923 offsets. */
11925 const_tree
11926 strip_invariant_refs (const_tree op)
11928 while (handled_component_p (op))
11930 switch (TREE_CODE (op))
11932 case ARRAY_REF:
11933 case ARRAY_RANGE_REF:
11934 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11935 || TREE_OPERAND (op, 2) != NULL_TREE
11936 || TREE_OPERAND (op, 3) != NULL_TREE)
11937 return NULL;
11938 break;
11940 case COMPONENT_REF:
11941 if (TREE_OPERAND (op, 2) != NULL_TREE)
11942 return NULL;
11943 break;
11945 default:;
11947 op = TREE_OPERAND (op, 0);
11950 return op;
11953 static GTY(()) tree gcc_eh_personality_decl;
11955 /* Return the GCC personality function decl. */
11957 tree
11958 lhd_gcc_personality (void)
11960 if (!gcc_eh_personality_decl)
11961 gcc_eh_personality_decl = build_personality_function ("gcc");
11962 return gcc_eh_personality_decl;
11965 /* TARGET is a call target of GIMPLE call statement
11966 (obtained by gimple_call_fn). Return true if it is
11967 OBJ_TYPE_REF representing an virtual call of C++ method.
11968 (As opposed to OBJ_TYPE_REF representing objc calls
11969 through a cast where middle-end devirtualization machinery
11970 can't apply.) FOR_DUMP_P is true when being called from
11971 the dump routines. */
11973 bool
11974 virtual_method_call_p (const_tree target, bool for_dump_p)
11976 if (TREE_CODE (target) != OBJ_TYPE_REF)
11977 return false;
11978 tree t = TREE_TYPE (target);
11979 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
11980 t = TREE_TYPE (t);
11981 if (TREE_CODE (t) == FUNCTION_TYPE)
11982 return false;
11983 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
11984 /* If we do not have BINFO associated, it means that type was built
11985 without devirtualization enabled. Do not consider this a virtual
11986 call. */
11987 if (!TYPE_BINFO (obj_type_ref_class (target, for_dump_p)))
11988 return false;
11989 return true;
11992 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
11994 static tree
11995 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
11997 unsigned int i;
11998 tree base_binfo, b;
12000 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12001 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12002 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12003 return base_binfo;
12004 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12005 return b;
12006 return NULL;
12009 /* Try to find a base info of BINFO that would have its field decl at offset
12010 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12011 found, return, otherwise return NULL_TREE. */
12013 tree
12014 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
12016 tree type = BINFO_TYPE (binfo);
12018 while (true)
12020 HOST_WIDE_INT pos, size;
12021 tree fld;
12022 int i;
12024 if (types_same_for_odr (type, expected_type))
12025 return binfo;
12026 if (maybe_lt (offset, 0))
12027 return NULL_TREE;
12029 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12031 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12032 continue;
12034 pos = int_bit_position (fld);
12035 size = tree_to_uhwi (DECL_SIZE (fld));
12036 if (known_in_range_p (offset, pos, size))
12037 break;
12039 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12040 return NULL_TREE;
12042 /* Offset 0 indicates the primary base, whose vtable contents are
12043 represented in the binfo for the derived class. */
12044 else if (maybe_ne (offset, 0))
12046 tree found_binfo = NULL, base_binfo;
12047 /* Offsets in BINFO are in bytes relative to the whole structure
12048 while POS is in bits relative to the containing field. */
12049 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12050 / BITS_PER_UNIT);
12052 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12053 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12054 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12056 found_binfo = base_binfo;
12057 break;
12059 if (found_binfo)
12060 binfo = found_binfo;
12061 else
12062 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12063 binfo_offset);
12066 type = TREE_TYPE (fld);
12067 offset -= pos;
12071 /* Returns true if X is a typedef decl. */
12073 bool
12074 is_typedef_decl (const_tree x)
12076 return (x && TREE_CODE (x) == TYPE_DECL
12077 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12080 /* Returns true iff TYPE is a type variant created for a typedef. */
12082 bool
12083 typedef_variant_p (const_tree type)
12085 return is_typedef_decl (TYPE_NAME (type));
12088 /* PR 84195: Replace control characters in "unescaped" with their
12089 escaped equivalents. Allow newlines if -fmessage-length has
12090 been set to a non-zero value. This is done here, rather than
12091 where the attribute is recorded as the message length can
12092 change between these two locations. */
12094 void
12095 escaped_string::escape (const char *unescaped)
12097 char *escaped;
12098 size_t i, new_i, len;
12100 if (m_owned)
12101 free (m_str);
12103 m_str = const_cast<char *> (unescaped);
12104 m_owned = false;
12106 if (unescaped == NULL || *unescaped == 0)
12107 return;
12109 len = strlen (unescaped);
12110 escaped = NULL;
12111 new_i = 0;
12113 for (i = 0; i < len; i++)
12115 char c = unescaped[i];
12117 if (!ISCNTRL (c))
12119 if (escaped)
12120 escaped[new_i++] = c;
12121 continue;
12124 if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
12126 if (escaped == NULL)
12128 /* We only allocate space for a new string if we
12129 actually encounter a control character that
12130 needs replacing. */
12131 escaped = (char *) xmalloc (len * 2 + 1);
12132 strncpy (escaped, unescaped, i);
12133 new_i = i;
12136 escaped[new_i++] = '\\';
12138 switch (c)
12140 case '\a': escaped[new_i++] = 'a'; break;
12141 case '\b': escaped[new_i++] = 'b'; break;
12142 case '\f': escaped[new_i++] = 'f'; break;
12143 case '\n': escaped[new_i++] = 'n'; break;
12144 case '\r': escaped[new_i++] = 'r'; break;
12145 case '\t': escaped[new_i++] = 't'; break;
12146 case '\v': escaped[new_i++] = 'v'; break;
12147 default: escaped[new_i++] = '?'; break;
12150 else if (escaped)
12151 escaped[new_i++] = c;
12154 if (escaped)
12156 escaped[new_i] = 0;
12157 m_str = escaped;
12158 m_owned = true;
12162 /* Warn about a use of an identifier which was marked deprecated. Returns
12163 whether a warning was given. */
12165 bool
12166 warn_deprecated_use (tree node, tree attr)
12168 escaped_string msg;
12170 if (node == 0 || !warn_deprecated_decl)
12171 return false;
12173 if (!attr)
12175 if (DECL_P (node))
12176 attr = DECL_ATTRIBUTES (node);
12177 else if (TYPE_P (node))
12179 tree decl = TYPE_STUB_DECL (node);
12180 if (decl)
12181 attr = TYPE_ATTRIBUTES (TREE_TYPE (decl));
12182 else if ((decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (node)))
12183 != NULL_TREE)
12185 node = TREE_TYPE (decl);
12186 attr = TYPE_ATTRIBUTES (node);
12191 if (attr)
12192 attr = lookup_attribute ("deprecated", attr);
12194 if (attr)
12195 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12197 bool w = false;
12198 if (DECL_P (node))
12200 auto_diagnostic_group d;
12201 if (msg)
12202 w = warning (OPT_Wdeprecated_declarations,
12203 "%qD is deprecated: %s", node, (const char *) msg);
12204 else
12205 w = warning (OPT_Wdeprecated_declarations,
12206 "%qD is deprecated", node);
12207 if (w)
12208 inform (DECL_SOURCE_LOCATION (node), "declared here");
12210 else if (TYPE_P (node))
12212 tree what = NULL_TREE;
12213 tree decl = TYPE_STUB_DECL (node);
12215 if (TYPE_NAME (node))
12217 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12218 what = TYPE_NAME (node);
12219 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12220 && DECL_NAME (TYPE_NAME (node)))
12221 what = DECL_NAME (TYPE_NAME (node));
12224 auto_diagnostic_group d;
12225 if (what)
12227 if (msg)
12228 w = warning (OPT_Wdeprecated_declarations,
12229 "%qE is deprecated: %s", what, (const char *) msg);
12230 else
12231 w = warning (OPT_Wdeprecated_declarations,
12232 "%qE is deprecated", what);
12234 else
12236 if (msg)
12237 w = warning (OPT_Wdeprecated_declarations,
12238 "type is deprecated: %s", (const char *) msg);
12239 else
12240 w = warning (OPT_Wdeprecated_declarations,
12241 "type is deprecated");
12244 if (w && decl)
12245 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12248 return w;
12251 /* Error out with an identifier which was marked 'unavailable'. */
12252 void
12253 error_unavailable_use (tree node, tree attr)
12255 escaped_string msg;
12257 if (node == 0)
12258 return;
12260 if (!attr)
12262 if (DECL_P (node))
12263 attr = DECL_ATTRIBUTES (node);
12264 else if (TYPE_P (node))
12266 tree decl = TYPE_STUB_DECL (node);
12267 if (decl)
12268 attr = lookup_attribute ("unavailable",
12269 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12273 if (attr)
12274 attr = lookup_attribute ("unavailable", attr);
12276 if (attr)
12277 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12279 if (DECL_P (node))
12281 auto_diagnostic_group d;
12282 if (msg)
12283 error ("%qD is unavailable: %s", node, (const char *) msg);
12284 else
12285 error ("%qD is unavailable", node);
12286 inform (DECL_SOURCE_LOCATION (node), "declared here");
12288 else if (TYPE_P (node))
12290 tree what = NULL_TREE;
12291 tree decl = TYPE_STUB_DECL (node);
12293 if (TYPE_NAME (node))
12295 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12296 what = TYPE_NAME (node);
12297 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12298 && DECL_NAME (TYPE_NAME (node)))
12299 what = DECL_NAME (TYPE_NAME (node));
12302 auto_diagnostic_group d;
12303 if (what)
12305 if (msg)
12306 error ("%qE is unavailable: %s", what, (const char *) msg);
12307 else
12308 error ("%qE is unavailable", what);
12310 else
12312 if (msg)
12313 error ("type is unavailable: %s", (const char *) msg);
12314 else
12315 error ("type is unavailable");
12318 if (decl)
12319 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12323 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12324 somewhere in it. */
12326 bool
12327 contains_bitfld_component_ref_p (const_tree ref)
12329 while (handled_component_p (ref))
12331 if (TREE_CODE (ref) == COMPONENT_REF
12332 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12333 return true;
12334 ref = TREE_OPERAND (ref, 0);
12337 return false;
12340 /* Try to determine whether a TRY_CATCH expression can fall through.
12341 This is a subroutine of block_may_fallthru. */
12343 static bool
12344 try_catch_may_fallthru (const_tree stmt)
12346 tree_stmt_iterator i;
12348 /* If the TRY block can fall through, the whole TRY_CATCH can
12349 fall through. */
12350 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12351 return true;
12353 i = tsi_start (TREE_OPERAND (stmt, 1));
12354 switch (TREE_CODE (tsi_stmt (i)))
12356 case CATCH_EXPR:
12357 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12358 catch expression and a body. The whole TRY_CATCH may fall
12359 through iff any of the catch bodies falls through. */
12360 for (; !tsi_end_p (i); tsi_next (&i))
12362 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12363 return true;
12365 return false;
12367 case EH_FILTER_EXPR:
12368 /* The exception filter expression only matters if there is an
12369 exception. If the exception does not match EH_FILTER_TYPES,
12370 we will execute EH_FILTER_FAILURE, and we will fall through
12371 if that falls through. If the exception does match
12372 EH_FILTER_TYPES, the stack unwinder will continue up the
12373 stack, so we will not fall through. We don't know whether we
12374 will throw an exception which matches EH_FILTER_TYPES or not,
12375 so we just ignore EH_FILTER_TYPES and assume that we might
12376 throw an exception which doesn't match. */
12377 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12379 default:
12380 /* This case represents statements to be executed when an
12381 exception occurs. Those statements are implicitly followed
12382 by a RESX statement to resume execution after the exception.
12383 So in this case the TRY_CATCH never falls through. */
12384 return false;
12388 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12389 need not be 100% accurate; simply be conservative and return true if we
12390 don't know. This is used only to avoid stupidly generating extra code.
12391 If we're wrong, we'll just delete the extra code later. */
12393 bool
12394 block_may_fallthru (const_tree block)
12396 /* This CONST_CAST is okay because expr_last returns its argument
12397 unmodified and we assign it to a const_tree. */
12398 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12400 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12402 case GOTO_EXPR:
12403 case RETURN_EXPR:
12404 /* Easy cases. If the last statement of the block implies
12405 control transfer, then we can't fall through. */
12406 return false;
12408 case SWITCH_EXPR:
12409 /* If there is a default: label or case labels cover all possible
12410 SWITCH_COND values, then the SWITCH_EXPR will transfer control
12411 to some case label in all cases and all we care is whether the
12412 SWITCH_BODY falls through. */
12413 if (SWITCH_ALL_CASES_P (stmt))
12414 return block_may_fallthru (SWITCH_BODY (stmt));
12415 return true;
12417 case COND_EXPR:
12418 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12419 return true;
12420 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12422 case BIND_EXPR:
12423 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12425 case TRY_CATCH_EXPR:
12426 return try_catch_may_fallthru (stmt);
12428 case TRY_FINALLY_EXPR:
12429 /* The finally clause is always executed after the try clause,
12430 so if it does not fall through, then the try-finally will not
12431 fall through. Otherwise, if the try clause does not fall
12432 through, then when the finally clause falls through it will
12433 resume execution wherever the try clause was going. So the
12434 whole try-finally will only fall through if both the try
12435 clause and the finally clause fall through. */
12436 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12437 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12439 case EH_ELSE_EXPR:
12440 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12442 case MODIFY_EXPR:
12443 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12444 stmt = TREE_OPERAND (stmt, 1);
12445 else
12446 return true;
12447 /* FALLTHRU */
12449 case CALL_EXPR:
12450 /* Functions that do not return do not fall through. */
12451 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12453 case CLEANUP_POINT_EXPR:
12454 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12456 case TARGET_EXPR:
12457 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12459 case ERROR_MARK:
12460 return true;
12462 default:
12463 return lang_hooks.block_may_fallthru (stmt);
12467 /* True if we are using EH to handle cleanups. */
12468 static bool using_eh_for_cleanups_flag = false;
12470 /* This routine is called from front ends to indicate eh should be used for
12471 cleanups. */
12472 void
12473 using_eh_for_cleanups (void)
12475 using_eh_for_cleanups_flag = true;
12478 /* Query whether EH is used for cleanups. */
12479 bool
12480 using_eh_for_cleanups_p (void)
12482 return using_eh_for_cleanups_flag;
12485 /* Wrapper for tree_code_name to ensure that tree code is valid */
12486 const char *
12487 get_tree_code_name (enum tree_code code)
12489 const char *invalid = "<invalid tree code>";
12491 /* The tree_code enum promotes to signed, but we could be getting
12492 invalid values, so force an unsigned comparison. */
12493 if (unsigned (code) >= MAX_TREE_CODES)
12495 if ((unsigned)code == 0xa5a5)
12496 return "ggc_freed";
12497 return invalid;
12500 return tree_code_name[code];
12503 /* Drops the TREE_OVERFLOW flag from T. */
12505 tree
12506 drop_tree_overflow (tree t)
12508 gcc_checking_assert (TREE_OVERFLOW (t));
12510 /* For tree codes with a sharing machinery re-build the result. */
12511 if (poly_int_tree_p (t))
12512 return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
12514 /* For VECTOR_CST, remove the overflow bits from the encoded elements
12515 and canonicalize the result. */
12516 if (TREE_CODE (t) == VECTOR_CST)
12518 tree_vector_builder builder;
12519 builder.new_unary_operation (TREE_TYPE (t), t, true);
12520 unsigned int count = builder.encoded_nelts ();
12521 for (unsigned int i = 0; i < count; ++i)
12523 tree elt = VECTOR_CST_ELT (t, i);
12524 if (TREE_OVERFLOW (elt))
12525 elt = drop_tree_overflow (elt);
12526 builder.quick_push (elt);
12528 return builder.build ();
12531 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12532 and drop the flag. */
12533 t = copy_node (t);
12534 TREE_OVERFLOW (t) = 0;
12536 /* For constants that contain nested constants, drop the flag
12537 from those as well. */
12538 if (TREE_CODE (t) == COMPLEX_CST)
12540 if (TREE_OVERFLOW (TREE_REALPART (t)))
12541 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
12542 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
12543 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
12546 return t;
12549 /* Given a memory reference expression T, return its base address.
12550 The base address of a memory reference expression is the main
12551 object being referenced. For instance, the base address for
12552 'array[i].fld[j]' is 'array'. You can think of this as stripping
12553 away the offset part from a memory address.
12555 This function calls handled_component_p to strip away all the inner
12556 parts of the memory reference until it reaches the base object. */
12558 tree
12559 get_base_address (tree t)
12561 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12562 t = TREE_OPERAND (t, 0);
12563 while (handled_component_p (t))
12564 t = TREE_OPERAND (t, 0);
12566 if ((TREE_CODE (t) == MEM_REF
12567 || TREE_CODE (t) == TARGET_MEM_REF)
12568 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12569 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12571 return t;
12574 /* Return a tree of sizetype representing the size, in bytes, of the element
12575 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12577 tree
12578 array_ref_element_size (tree exp)
12580 tree aligned_size = TREE_OPERAND (exp, 3);
12581 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12582 location_t loc = EXPR_LOCATION (exp);
12584 /* If a size was specified in the ARRAY_REF, it's the size measured
12585 in alignment units of the element type. So multiply by that value. */
12586 if (aligned_size)
12588 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12589 sizetype from another type of the same width and signedness. */
12590 if (TREE_TYPE (aligned_size) != sizetype)
12591 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12592 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12593 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12596 /* Otherwise, take the size from that of the element type. Substitute
12597 any PLACEHOLDER_EXPR that we have. */
12598 else
12599 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12602 /* Return a tree representing the lower bound of the array mentioned in
12603 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12605 tree
12606 array_ref_low_bound (tree exp)
12608 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12610 /* If a lower bound is specified in EXP, use it. */
12611 if (TREE_OPERAND (exp, 2))
12612 return TREE_OPERAND (exp, 2);
12614 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12615 substituting for a PLACEHOLDER_EXPR as needed. */
12616 if (domain_type && TYPE_MIN_VALUE (domain_type))
12617 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12619 /* Otherwise, return a zero of the appropriate type. */
12620 tree idxtype = TREE_TYPE (TREE_OPERAND (exp, 1));
12621 return (idxtype == error_mark_node
12622 ? integer_zero_node : build_int_cst (idxtype, 0));
12625 /* Return a tree representing the upper bound of the array mentioned in
12626 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12628 tree
12629 array_ref_up_bound (tree exp)
12631 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12633 /* If there is a domain type and it has an upper bound, use it, substituting
12634 for a PLACEHOLDER_EXPR as needed. */
12635 if (domain_type && TYPE_MAX_VALUE (domain_type))
12636 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12638 /* Otherwise fail. */
12639 return NULL_TREE;
12642 /* Returns true if REF is an array reference, component reference,
12643 or memory reference to an array at the end of a structure.
12644 If this is the case, the array may be allocated larger
12645 than its upper bound implies. */
12647 bool
12648 array_at_struct_end_p (tree ref)
12650 tree atype;
12652 if (TREE_CODE (ref) == ARRAY_REF
12653 || TREE_CODE (ref) == ARRAY_RANGE_REF)
12655 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
12656 ref = TREE_OPERAND (ref, 0);
12658 else if (TREE_CODE (ref) == COMPONENT_REF
12659 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
12660 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
12661 else if (TREE_CODE (ref) == MEM_REF)
12663 tree arg = TREE_OPERAND (ref, 0);
12664 if (TREE_CODE (arg) == ADDR_EXPR)
12665 arg = TREE_OPERAND (arg, 0);
12666 tree argtype = TREE_TYPE (arg);
12667 if (TREE_CODE (argtype) == RECORD_TYPE)
12669 if (tree fld = last_field (argtype))
12671 atype = TREE_TYPE (fld);
12672 if (TREE_CODE (atype) != ARRAY_TYPE)
12673 return false;
12674 if (VAR_P (arg) && DECL_SIZE (fld))
12675 return false;
12677 else
12678 return false;
12680 else
12681 return false;
12683 else
12684 return false;
12686 if (TREE_CODE (ref) == STRING_CST)
12687 return false;
12689 tree ref_to_array = ref;
12690 while (handled_component_p (ref))
12692 /* If the reference chain contains a component reference to a
12693 non-union type and there follows another field the reference
12694 is not at the end of a structure. */
12695 if (TREE_CODE (ref) == COMPONENT_REF)
12697 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12699 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12700 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12701 nextf = DECL_CHAIN (nextf);
12702 if (nextf)
12703 return false;
12706 /* If we have a multi-dimensional array we do not consider
12707 a non-innermost dimension as flex array if the whole
12708 multi-dimensional array is at struct end.
12709 Same for an array of aggregates with a trailing array
12710 member. */
12711 else if (TREE_CODE (ref) == ARRAY_REF)
12712 return false;
12713 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
12715 /* If we view an underlying object as sth else then what we
12716 gathered up to now is what we have to rely on. */
12717 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
12718 break;
12719 else
12720 gcc_unreachable ();
12722 ref = TREE_OPERAND (ref, 0);
12725 /* The array now is at struct end. Treat flexible arrays as
12726 always subject to extend, even into just padding constrained by
12727 an underlying decl. */
12728 if (! TYPE_SIZE (atype)
12729 || ! TYPE_DOMAIN (atype)
12730 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12731 return true;
12733 /* If the reference is based on a declared entity, the size of the array
12734 is constrained by its given domain. (Do not trust commons PR/69368). */
12735 ref = get_base_address (ref);
12736 if (ref
12737 && DECL_P (ref)
12738 && !(flag_unconstrained_commons
12739 && VAR_P (ref) && DECL_COMMON (ref))
12740 && DECL_SIZE_UNIT (ref)
12741 && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
12743 /* Check whether the array domain covers all of the available
12744 padding. */
12745 poly_int64 offset;
12746 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
12747 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
12748 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
12749 return true;
12750 if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
12751 return true;
12753 /* If at least one extra element fits it is a flexarray. */
12754 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12755 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
12756 + 2)
12757 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
12758 wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
12759 return true;
12761 return false;
12764 return true;
12767 /* Return a tree representing the offset, in bytes, of the field referenced
12768 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12770 tree
12771 component_ref_field_offset (tree exp)
12773 tree aligned_offset = TREE_OPERAND (exp, 2);
12774 tree field = TREE_OPERAND (exp, 1);
12775 location_t loc = EXPR_LOCATION (exp);
12777 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12778 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12779 value. */
12780 if (aligned_offset)
12782 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12783 sizetype from another type of the same width and signedness. */
12784 if (TREE_TYPE (aligned_offset) != sizetype)
12785 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12786 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12787 size_int (DECL_OFFSET_ALIGN (field)
12788 / BITS_PER_UNIT));
12791 /* Otherwise, take the offset from that of the field. Substitute
12792 any PLACEHOLDER_EXPR that we have. */
12793 else
12794 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12797 /* Given the initializer INIT, return the initializer for the field
12798 DECL if it exists, otherwise null. Used to obtain the initializer
12799 for a flexible array member and determine its size. */
12801 static tree
12802 get_initializer_for (tree init, tree decl)
12804 STRIP_NOPS (init);
12806 tree fld, fld_init;
12807 unsigned HOST_WIDE_INT i;
12808 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), i, fld, fld_init)
12810 if (decl == fld)
12811 return fld_init;
12813 if (TREE_CODE (fld) == CONSTRUCTOR)
12815 fld_init = get_initializer_for (fld_init, decl);
12816 if (fld_init)
12817 return fld_init;
12821 return NULL_TREE;
12824 /* Determines the size of the member referenced by the COMPONENT_REF
12825 REF, using its initializer expression if necessary in order to
12826 determine the size of an initialized flexible array member.
12827 If non-null, set *ARK when REF refers to an interior zero-length
12828 array or a trailing one-element array.
12829 Returns the size as sizetype (which might be zero for an object
12830 with an uninitialized flexible array member) or null if the size
12831 cannot be determined. */
12833 tree
12834 component_ref_size (tree ref, special_array_member *sam /* = NULL */)
12836 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
12838 special_array_member sambuf;
12839 if (!sam)
12840 sam = &sambuf;
12841 *sam = special_array_member::none;
12843 /* The object/argument referenced by the COMPONENT_REF and its type. */
12844 tree arg = TREE_OPERAND (ref, 0);
12845 tree argtype = TREE_TYPE (arg);
12846 /* The referenced member. */
12847 tree member = TREE_OPERAND (ref, 1);
12849 tree memsize = DECL_SIZE_UNIT (member);
12850 if (memsize)
12852 tree memtype = TREE_TYPE (member);
12853 if (TREE_CODE (memtype) != ARRAY_TYPE)
12854 /* DECL_SIZE may be less than TYPE_SIZE in C++ when referring
12855 to the type of a class with a virtual base which doesn't
12856 reflect the size of the virtual's members (see pr97595).
12857 If that's the case fail for now and implement something
12858 more robust in the future. */
12859 return (tree_int_cst_equal (memsize, TYPE_SIZE_UNIT (memtype))
12860 ? memsize : NULL_TREE);
12862 bool trailing = array_at_struct_end_p (ref);
12863 bool zero_length = integer_zerop (memsize);
12864 if (!trailing && !zero_length)
12865 /* MEMBER is either an interior array or is an array with
12866 more than one element. */
12867 return memsize;
12869 if (zero_length)
12871 if (trailing)
12872 *sam = special_array_member::trail_0;
12873 else
12875 *sam = special_array_member::int_0;
12876 memsize = NULL_TREE;
12880 if (!zero_length)
12881 if (tree dom = TYPE_DOMAIN (memtype))
12882 if (tree min = TYPE_MIN_VALUE (dom))
12883 if (tree max = TYPE_MAX_VALUE (dom))
12884 if (TREE_CODE (min) == INTEGER_CST
12885 && TREE_CODE (max) == INTEGER_CST)
12887 offset_int minidx = wi::to_offset (min);
12888 offset_int maxidx = wi::to_offset (max);
12889 offset_int neltsm1 = maxidx - minidx;
12890 if (neltsm1 > 0)
12891 /* MEMBER is an array with more than one element. */
12892 return memsize;
12894 if (neltsm1 == 0)
12895 *sam = special_array_member::trail_1;
12898 /* For a reference to a zero- or one-element array member of a union
12899 use the size of the union instead of the size of the member. */
12900 if (TREE_CODE (argtype) == UNION_TYPE)
12901 memsize = TYPE_SIZE_UNIT (argtype);
12904 /* MEMBER is either a bona fide flexible array member, or a zero-length
12905 array member, or an array of length one treated as such. */
12907 /* If the reference is to a declared object and the member a true
12908 flexible array, try to determine its size from its initializer. */
12909 poly_int64 baseoff = 0;
12910 tree base = get_addr_base_and_unit_offset (ref, &baseoff);
12911 if (!base || !VAR_P (base))
12913 if (*sam != special_array_member::int_0)
12914 return NULL_TREE;
12916 if (TREE_CODE (arg) != COMPONENT_REF)
12917 return NULL_TREE;
12919 base = arg;
12920 while (TREE_CODE (base) == COMPONENT_REF)
12921 base = TREE_OPERAND (base, 0);
12922 baseoff = tree_to_poly_int64 (byte_position (TREE_OPERAND (ref, 1)));
12925 /* BASE is the declared object of which MEMBER is either a member
12926 or that is cast to ARGTYPE (e.g., a char buffer used to store
12927 an ARGTYPE object). */
12928 tree basetype = TREE_TYPE (base);
12930 /* Determine the base type of the referenced object. If it's
12931 the same as ARGTYPE and MEMBER has a known size, return it. */
12932 tree bt = basetype;
12933 if (*sam != special_array_member::int_0)
12934 while (TREE_CODE (bt) == ARRAY_TYPE)
12935 bt = TREE_TYPE (bt);
12936 bool typematch = useless_type_conversion_p (argtype, bt);
12937 if (memsize && typematch)
12938 return memsize;
12940 memsize = NULL_TREE;
12942 if (typematch)
12943 /* MEMBER is a true flexible array member. Compute its size from
12944 the initializer of the BASE object if it has one. */
12945 if (tree init = DECL_P (base) ? DECL_INITIAL (base) : NULL_TREE)
12946 if (init != error_mark_node)
12948 init = get_initializer_for (init, member);
12949 if (init)
12951 memsize = TYPE_SIZE_UNIT (TREE_TYPE (init));
12952 if (tree refsize = TYPE_SIZE_UNIT (argtype))
12954 /* Use the larger of the initializer size and the tail
12955 padding in the enclosing struct. */
12956 poly_int64 rsz = tree_to_poly_int64 (refsize);
12957 rsz -= baseoff;
12958 if (known_lt (tree_to_poly_int64 (memsize), rsz))
12959 memsize = wide_int_to_tree (TREE_TYPE (memsize), rsz);
12962 baseoff = 0;
12966 if (!memsize)
12968 if (typematch)
12970 if (DECL_P (base)
12971 && DECL_EXTERNAL (base)
12972 && bt == basetype
12973 && *sam != special_array_member::int_0)
12974 /* The size of a flexible array member of an extern struct
12975 with no initializer cannot be determined (it's defined
12976 in another translation unit and can have an initializer
12977 with an arbitrary number of elements). */
12978 return NULL_TREE;
12980 /* Use the size of the base struct or, for interior zero-length
12981 arrays, the size of the enclosing type. */
12982 memsize = TYPE_SIZE_UNIT (bt);
12984 else if (DECL_P (base))
12985 /* Use the size of the BASE object (possibly an array of some
12986 other type such as char used to store the struct). */
12987 memsize = DECL_SIZE_UNIT (base);
12988 else
12989 return NULL_TREE;
12992 /* If the flexible array member has a known size use the greater
12993 of it and the tail padding in the enclosing struct.
12994 Otherwise, when the size of the flexible array member is unknown
12995 and the referenced object is not a struct, use the size of its
12996 type when known. This detects sizes of array buffers when cast
12997 to struct types with flexible array members. */
12998 if (memsize)
13000 if (!tree_fits_poly_int64_p (memsize))
13001 return NULL_TREE;
13002 poly_int64 memsz64 = memsize ? tree_to_poly_int64 (memsize) : 0;
13003 if (known_lt (baseoff, memsz64))
13005 memsz64 -= baseoff;
13006 return wide_int_to_tree (TREE_TYPE (memsize), memsz64);
13008 return size_zero_node;
13011 /* Return "don't know" for an external non-array object since its
13012 flexible array member can be initialized to have any number of
13013 elements. Otherwise, return zero because the flexible array
13014 member has no elements. */
13015 return (DECL_P (base)
13016 && DECL_EXTERNAL (base)
13017 && (!typematch
13018 || TREE_CODE (basetype) != ARRAY_TYPE)
13019 ? NULL_TREE : size_zero_node);
13022 /* Return the machine mode of T. For vectors, returns the mode of the
13023 inner type. The main use case is to feed the result to HONOR_NANS,
13024 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
13026 machine_mode
13027 element_mode (const_tree t)
13029 if (!TYPE_P (t))
13030 t = TREE_TYPE (t);
13031 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
13032 t = TREE_TYPE (t);
13033 return TYPE_MODE (t);
13036 /* Vector types need to re-check the target flags each time we report
13037 the machine mode. We need to do this because attribute target can
13038 change the result of vector_mode_supported_p and have_regs_of_mode
13039 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
13040 change on a per-function basis. */
13041 /* ??? Possibly a better solution is to run through all the types
13042 referenced by a function and re-compute the TYPE_MODE once, rather
13043 than make the TYPE_MODE macro call a function. */
13045 machine_mode
13046 vector_type_mode (const_tree t)
13048 machine_mode mode;
13050 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
13052 mode = t->type_common.mode;
13053 if (VECTOR_MODE_P (mode)
13054 && (!targetm.vector_mode_supported_p (mode)
13055 || !have_regs_of_mode[mode]))
13057 scalar_int_mode innermode;
13059 /* For integers, try mapping it to a same-sized scalar mode. */
13060 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
13062 poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
13063 * GET_MODE_BITSIZE (innermode));
13064 scalar_int_mode mode;
13065 if (int_mode_for_size (size, 0).exists (&mode)
13066 && have_regs_of_mode[mode])
13067 return mode;
13070 return BLKmode;
13073 return mode;
13076 /* Return the size in bits of each element of vector type TYPE. */
13078 unsigned int
13079 vector_element_bits (const_tree type)
13081 gcc_checking_assert (VECTOR_TYPE_P (type));
13082 if (VECTOR_BOOLEAN_TYPE_P (type))
13083 return TYPE_PRECISION (TREE_TYPE (type));
13084 return tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
13087 /* Calculate the size in bits of each element of vector type TYPE
13088 and return the result as a tree of type bitsizetype. */
13090 tree
13091 vector_element_bits_tree (const_tree type)
13093 gcc_checking_assert (VECTOR_TYPE_P (type));
13094 if (VECTOR_BOOLEAN_TYPE_P (type))
13095 return bitsize_int (vector_element_bits (type));
13096 return TYPE_SIZE (TREE_TYPE (type));
13099 /* Verify that basic properties of T match TV and thus T can be a variant of
13100 TV. TV should be the more specified variant (i.e. the main variant). */
13102 static bool
13103 verify_type_variant (const_tree t, tree tv)
13105 /* Type variant can differ by:
13107 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13108 ENCODE_QUAL_ADDR_SPACE.
13109 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13110 in this case some values may not be set in the variant types
13111 (see TYPE_COMPLETE_P checks).
13112 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13113 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13114 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13115 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13116 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13117 this is necessary to make it possible to merge types form different TUs
13118 - arrays, pointers and references may have TREE_TYPE that is a variant
13119 of TREE_TYPE of their main variants.
13120 - aggregates may have new TYPE_FIELDS list that list variants of
13121 the main variant TYPE_FIELDS.
13122 - vector types may differ by TYPE_VECTOR_OPAQUE
13125 /* Convenience macro for matching individual fields. */
13126 #define verify_variant_match(flag) \
13127 do { \
13128 if (flag (tv) != flag (t)) \
13130 error ("type variant differs by %s", #flag); \
13131 debug_tree (tv); \
13132 return false; \
13134 } while (false)
13136 /* tree_base checks. */
13138 verify_variant_match (TREE_CODE);
13139 /* FIXME: Ada builds non-artificial variants of artificial types. */
13140 #if 0
13141 if (TYPE_ARTIFICIAL (tv))
13142 verify_variant_match (TYPE_ARTIFICIAL);
13143 #endif
13144 if (POINTER_TYPE_P (tv))
13145 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13146 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13147 verify_variant_match (TYPE_UNSIGNED);
13148 verify_variant_match (TYPE_PACKED);
13149 if (TREE_CODE (t) == REFERENCE_TYPE)
13150 verify_variant_match (TYPE_REF_IS_RVALUE);
13151 if (AGGREGATE_TYPE_P (t))
13152 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13153 else
13154 verify_variant_match (TYPE_SATURATING);
13155 /* FIXME: This check trigger during libstdc++ build. */
13156 #if 0
13157 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t))
13158 verify_variant_match (TYPE_FINAL_P);
13159 #endif
13161 /* tree_type_common checks. */
13163 if (COMPLETE_TYPE_P (t))
13165 verify_variant_match (TYPE_MODE);
13166 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
13167 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
13168 verify_variant_match (TYPE_SIZE);
13169 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
13170 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
13171 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
13173 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
13174 TYPE_SIZE_UNIT (tv), 0));
13175 error ("type variant has different %<TYPE_SIZE_UNIT%>");
13176 debug_tree (tv);
13177 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13178 debug_tree (TYPE_SIZE_UNIT (tv));
13179 error ("type%'s %<TYPE_SIZE_UNIT%>");
13180 debug_tree (TYPE_SIZE_UNIT (t));
13181 return false;
13183 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13185 verify_variant_match (TYPE_PRECISION);
13186 if (RECORD_OR_UNION_TYPE_P (t))
13187 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13188 else if (TREE_CODE (t) == ARRAY_TYPE)
13189 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13190 /* During LTO we merge variant lists from diferent translation units
13191 that may differ BY TYPE_CONTEXT that in turn may point
13192 to TRANSLATION_UNIT_DECL.
13193 Ada also builds variants of types with different TYPE_CONTEXT. */
13194 #if 0
13195 if (!in_lto_p || !TYPE_FILE_SCOPE_P (t))
13196 verify_variant_match (TYPE_CONTEXT);
13197 #endif
13198 if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == INTEGER_TYPE)
13199 verify_variant_match (TYPE_STRING_FLAG);
13200 if (TREE_CODE (t) == RECORD_TYPE || TREE_CODE (t) == UNION_TYPE)
13201 verify_variant_match (TYPE_CXX_ODR_P);
13202 if (TYPE_ALIAS_SET_KNOWN_P (t))
13204 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13205 debug_tree (tv);
13206 return false;
13209 /* tree_type_non_common checks. */
13211 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13212 and dangle the pointer from time to time. */
13213 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13214 && (in_lto_p || !TYPE_VFIELD (tv)
13215 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13217 error ("type variant has different %<TYPE_VFIELD%>");
13218 debug_tree (tv);
13219 return false;
13221 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13222 || TREE_CODE (t) == INTEGER_TYPE
13223 || TREE_CODE (t) == BOOLEAN_TYPE
13224 || TREE_CODE (t) == REAL_TYPE
13225 || TREE_CODE (t) == FIXED_POINT_TYPE)
13227 verify_variant_match (TYPE_MAX_VALUE);
13228 verify_variant_match (TYPE_MIN_VALUE);
13230 if (TREE_CODE (t) == METHOD_TYPE)
13231 verify_variant_match (TYPE_METHOD_BASETYPE);
13232 if (TREE_CODE (t) == OFFSET_TYPE)
13233 verify_variant_match (TYPE_OFFSET_BASETYPE);
13234 if (TREE_CODE (t) == ARRAY_TYPE)
13235 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13236 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13237 or even type's main variant. This is needed to make bootstrap pass
13238 and the bug seems new in GCC 5.
13239 C++ FE should be updated to make this consistent and we should check
13240 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13241 is a match with main variant.
13243 Also disable the check for Java for now because of parser hack that builds
13244 first an dummy BINFO and then sometimes replace it by real BINFO in some
13245 of the copies. */
13246 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13247 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13248 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13249 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13250 at LTO time only. */
13251 && (in_lto_p && odr_type_p (t)))
13253 error ("type variant has different %<TYPE_BINFO%>");
13254 debug_tree (tv);
13255 error ("type variant%'s %<TYPE_BINFO%>");
13256 debug_tree (TYPE_BINFO (tv));
13257 error ("type%'s %<TYPE_BINFO%>");
13258 debug_tree (TYPE_BINFO (t));
13259 return false;
13262 /* Check various uses of TYPE_VALUES_RAW. */
13263 if (TREE_CODE (t) == ENUMERAL_TYPE
13264 && TYPE_VALUES (t))
13265 verify_variant_match (TYPE_VALUES);
13266 else if (TREE_CODE (t) == ARRAY_TYPE)
13267 verify_variant_match (TYPE_DOMAIN);
13268 /* Permit incomplete variants of complete type. While FEs may complete
13269 all variants, this does not happen for C++ templates in all cases. */
13270 else if (RECORD_OR_UNION_TYPE_P (t)
13271 && COMPLETE_TYPE_P (t)
13272 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13274 tree f1, f2;
13276 /* Fortran builds qualified variants as new records with items of
13277 qualified type. Verify that they looks same. */
13278 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13279 f1 && f2;
13280 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13281 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13282 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13283 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13284 /* FIXME: gfc_nonrestricted_type builds all types as variants
13285 with exception of pointer types. It deeply copies the type
13286 which means that we may end up with a variant type
13287 referring non-variant pointer. We may change it to
13288 produce types as variants, too, like
13289 objc_get_protocol_qualified_type does. */
13290 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13291 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13292 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13293 break;
13294 if (f1 || f2)
13296 error ("type variant has different %<TYPE_FIELDS%>");
13297 debug_tree (tv);
13298 error ("first mismatch is field");
13299 debug_tree (f1);
13300 error ("and field");
13301 debug_tree (f2);
13302 return false;
13305 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13306 verify_variant_match (TYPE_ARG_TYPES);
13307 /* For C++ the qualified variant of array type is really an array type
13308 of qualified TREE_TYPE.
13309 objc builds variants of pointer where pointer to type is a variant, too
13310 in objc_get_protocol_qualified_type. */
13311 if (TREE_TYPE (t) != TREE_TYPE (tv)
13312 && ((TREE_CODE (t) != ARRAY_TYPE
13313 && !POINTER_TYPE_P (t))
13314 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13315 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13317 error ("type variant has different %<TREE_TYPE%>");
13318 debug_tree (tv);
13319 error ("type variant%'s %<TREE_TYPE%>");
13320 debug_tree (TREE_TYPE (tv));
13321 error ("type%'s %<TREE_TYPE%>");
13322 debug_tree (TREE_TYPE (t));
13323 return false;
13325 if (type_with_alias_set_p (t)
13326 && !gimple_canonical_types_compatible_p (t, tv, false))
13328 error ("type is not compatible with its variant");
13329 debug_tree (tv);
13330 error ("type variant%'s %<TREE_TYPE%>");
13331 debug_tree (TREE_TYPE (tv));
13332 error ("type%'s %<TREE_TYPE%>");
13333 debug_tree (TREE_TYPE (t));
13334 return false;
13336 return true;
13337 #undef verify_variant_match
13341 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13342 the middle-end types_compatible_p function. It needs to avoid
13343 claiming types are different for types that should be treated
13344 the same with respect to TBAA. Canonical types are also used
13345 for IL consistency checks via the useless_type_conversion_p
13346 predicate which does not handle all type kinds itself but falls
13347 back to pointer-comparison of TYPE_CANONICAL for aggregates
13348 for example. */
13350 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13351 type calculation because we need to allow inter-operability between signed
13352 and unsigned variants. */
13354 bool
13355 type_with_interoperable_signedness (const_tree type)
13357 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13358 signed char and unsigned char. Similarly fortran FE builds
13359 C_SIZE_T as signed type, while C defines it unsigned. */
13361 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13362 == INTEGER_TYPE
13363 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13364 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13367 /* Return true iff T1 and T2 are structurally identical for what
13368 TBAA is concerned.
13369 This function is used both by lto.cc canonical type merging and by the
13370 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13371 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13372 only for LTO because only in these cases TYPE_CANONICAL equivalence
13373 correspond to one defined by gimple_canonical_types_compatible_p. */
13375 bool
13376 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13377 bool trust_type_canonical)
13379 /* Type variants should be same as the main variant. When not doing sanity
13380 checking to verify this fact, go to main variants and save some work. */
13381 if (trust_type_canonical)
13383 t1 = TYPE_MAIN_VARIANT (t1);
13384 t2 = TYPE_MAIN_VARIANT (t2);
13387 /* Check first for the obvious case of pointer identity. */
13388 if (t1 == t2)
13389 return true;
13391 /* Check that we have two types to compare. */
13392 if (t1 == NULL_TREE || t2 == NULL_TREE)
13393 return false;
13395 /* We consider complete types always compatible with incomplete type.
13396 This does not make sense for canonical type calculation and thus we
13397 need to ensure that we are never called on it.
13399 FIXME: For more correctness the function probably should have three modes
13400 1) mode assuming that types are complete mathcing their structure
13401 2) mode allowing incomplete types but producing equivalence classes
13402 and thus ignoring all info from complete types
13403 3) mode allowing incomplete types to match complete but checking
13404 compatibility between complete types.
13406 1 and 2 can be used for canonical type calculation. 3 is the real
13407 definition of type compatibility that can be used i.e. for warnings during
13408 declaration merging. */
13410 gcc_assert (!trust_type_canonical
13411 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13413 /* If the types have been previously registered and found equal
13414 they still are. */
13416 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13417 && trust_type_canonical)
13419 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13420 they are always NULL, but they are set to non-NULL for types
13421 constructed by build_pointer_type and variants. In this case the
13422 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13423 all pointers are considered equal. Be sure to not return false
13424 negatives. */
13425 gcc_checking_assert (canonical_type_used_p (t1)
13426 && canonical_type_used_p (t2));
13427 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13430 /* For types where we do ODR based TBAA the canonical type is always
13431 set correctly, so we know that types are different if their
13432 canonical types does not match. */
13433 if (trust_type_canonical
13434 && (odr_type_p (t1) && odr_based_tbaa_p (t1))
13435 != (odr_type_p (t2) && odr_based_tbaa_p (t2)))
13436 return false;
13438 /* Can't be the same type if the types don't have the same code. */
13439 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13440 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13441 return false;
13443 /* Qualifiers do not matter for canonical type comparison purposes. */
13445 /* Void types and nullptr types are always the same. */
13446 if (TREE_CODE (t1) == VOID_TYPE
13447 || TREE_CODE (t1) == NULLPTR_TYPE)
13448 return true;
13450 /* Can't be the same type if they have different mode. */
13451 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13452 return false;
13454 /* Non-aggregate types can be handled cheaply. */
13455 if (INTEGRAL_TYPE_P (t1)
13456 || SCALAR_FLOAT_TYPE_P (t1)
13457 || FIXED_POINT_TYPE_P (t1)
13458 || TREE_CODE (t1) == VECTOR_TYPE
13459 || TREE_CODE (t1) == COMPLEX_TYPE
13460 || TREE_CODE (t1) == OFFSET_TYPE
13461 || POINTER_TYPE_P (t1))
13463 /* Can't be the same type if they have different recision. */
13464 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13465 return false;
13467 /* In some cases the signed and unsigned types are required to be
13468 inter-operable. */
13469 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13470 && !type_with_interoperable_signedness (t1))
13471 return false;
13473 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13474 interoperable with "signed char". Unless all frontends are revisited
13475 to agree on these types, we must ignore the flag completely. */
13477 /* Fortran standard define C_PTR type that is compatible with every
13478 C pointer. For this reason we need to glob all pointers into one.
13479 Still pointers in different address spaces are not compatible. */
13480 if (POINTER_TYPE_P (t1))
13482 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13483 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13484 return false;
13487 /* Tail-recurse to components. */
13488 if (TREE_CODE (t1) == VECTOR_TYPE
13489 || TREE_CODE (t1) == COMPLEX_TYPE)
13490 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13491 TREE_TYPE (t2),
13492 trust_type_canonical);
13494 return true;
13497 /* Do type-specific comparisons. */
13498 switch (TREE_CODE (t1))
13500 case ARRAY_TYPE:
13501 /* Array types are the same if the element types are the same and
13502 the number of elements are the same. */
13503 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13504 trust_type_canonical)
13505 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13506 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13507 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13508 return false;
13509 else
13511 tree i1 = TYPE_DOMAIN (t1);
13512 tree i2 = TYPE_DOMAIN (t2);
13514 /* For an incomplete external array, the type domain can be
13515 NULL_TREE. Check this condition also. */
13516 if (i1 == NULL_TREE && i2 == NULL_TREE)
13517 return true;
13518 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13519 return false;
13520 else
13522 tree min1 = TYPE_MIN_VALUE (i1);
13523 tree min2 = TYPE_MIN_VALUE (i2);
13524 tree max1 = TYPE_MAX_VALUE (i1);
13525 tree max2 = TYPE_MAX_VALUE (i2);
13527 /* The minimum/maximum values have to be the same. */
13528 if ((min1 == min2
13529 || (min1 && min2
13530 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13531 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13532 || operand_equal_p (min1, min2, 0))))
13533 && (max1 == max2
13534 || (max1 && max2
13535 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13536 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13537 || operand_equal_p (max1, max2, 0)))))
13538 return true;
13539 else
13540 return false;
13544 case METHOD_TYPE:
13545 case FUNCTION_TYPE:
13546 /* Function types are the same if the return type and arguments types
13547 are the same. */
13548 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13549 trust_type_canonical))
13550 return false;
13552 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13553 return true;
13554 else
13556 tree parms1, parms2;
13558 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13559 parms1 && parms2;
13560 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13562 if (!gimple_canonical_types_compatible_p
13563 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13564 trust_type_canonical))
13565 return false;
13568 if (parms1 || parms2)
13569 return false;
13571 return true;
13574 case RECORD_TYPE:
13575 case UNION_TYPE:
13576 case QUAL_UNION_TYPE:
13578 tree f1, f2;
13580 /* Don't try to compare variants of an incomplete type, before
13581 TYPE_FIELDS has been copied around. */
13582 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13583 return true;
13586 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13587 return false;
13589 /* For aggregate types, all the fields must be the same. */
13590 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13591 f1 || f2;
13592 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13594 /* Skip non-fields and zero-sized fields. */
13595 while (f1 && (TREE_CODE (f1) != FIELD_DECL
13596 || (DECL_SIZE (f1)
13597 && integer_zerop (DECL_SIZE (f1)))))
13598 f1 = TREE_CHAIN (f1);
13599 while (f2 && (TREE_CODE (f2) != FIELD_DECL
13600 || (DECL_SIZE (f2)
13601 && integer_zerop (DECL_SIZE (f2)))))
13602 f2 = TREE_CHAIN (f2);
13603 if (!f1 || !f2)
13604 break;
13605 /* The fields must have the same name, offset and type. */
13606 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13607 || !gimple_compare_field_offset (f1, f2)
13608 || !gimple_canonical_types_compatible_p
13609 (TREE_TYPE (f1), TREE_TYPE (f2),
13610 trust_type_canonical))
13611 return false;
13614 /* If one aggregate has more fields than the other, they
13615 are not the same. */
13616 if (f1 || f2)
13617 return false;
13619 return true;
13622 default:
13623 /* Consider all types with language specific trees in them mutually
13624 compatible. This is executed only from verify_type and false
13625 positives can be tolerated. */
13626 gcc_assert (!in_lto_p);
13627 return true;
13631 /* Verify type T. */
13633 void
13634 verify_type (const_tree t)
13636 bool error_found = false;
13637 tree mv = TYPE_MAIN_VARIANT (t);
13638 if (!mv)
13640 error ("main variant is not defined");
13641 error_found = true;
13643 else if (mv != TYPE_MAIN_VARIANT (mv))
13645 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
13646 debug_tree (mv);
13647 error_found = true;
13649 else if (t != mv && !verify_type_variant (t, mv))
13650 error_found = true;
13652 tree ct = TYPE_CANONICAL (t);
13653 if (!ct)
13655 else if (TYPE_CANONICAL (ct) != ct)
13657 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
13658 debug_tree (ct);
13659 error_found = true;
13661 /* Method and function types cannot be used to address memory and thus
13662 TYPE_CANONICAL really matters only for determining useless conversions.
13664 FIXME: C++ FE produce declarations of builtin functions that are not
13665 compatible with main variants. */
13666 else if (TREE_CODE (t) == FUNCTION_TYPE)
13668 else if (t != ct
13669 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
13670 with variably sized arrays because their sizes possibly
13671 gimplified to different variables. */
13672 && !variably_modified_type_p (ct, NULL)
13673 && !gimple_canonical_types_compatible_p (t, ct, false)
13674 && COMPLETE_TYPE_P (t))
13676 error ("%<TYPE_CANONICAL%> is not compatible");
13677 debug_tree (ct);
13678 error_found = true;
13681 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13682 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13684 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
13685 debug_tree (ct);
13686 error_found = true;
13688 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
13690 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
13691 debug_tree (ct);
13692 debug_tree (TYPE_MAIN_VARIANT (ct));
13693 error_found = true;
13697 /* Check various uses of TYPE_MIN_VALUE_RAW. */
13698 if (RECORD_OR_UNION_TYPE_P (t))
13700 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13701 and danagle the pointer from time to time. */
13702 if (TYPE_VFIELD (t)
13703 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13704 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13706 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
13707 debug_tree (TYPE_VFIELD (t));
13708 error_found = true;
13711 else if (TREE_CODE (t) == POINTER_TYPE)
13713 if (TYPE_NEXT_PTR_TO (t)
13714 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13716 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
13717 debug_tree (TYPE_NEXT_PTR_TO (t));
13718 error_found = true;
13721 else if (TREE_CODE (t) == REFERENCE_TYPE)
13723 if (TYPE_NEXT_REF_TO (t)
13724 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13726 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
13727 debug_tree (TYPE_NEXT_REF_TO (t));
13728 error_found = true;
13731 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13732 || TREE_CODE (t) == FIXED_POINT_TYPE)
13734 /* FIXME: The following check should pass:
13735 useless_type_conversion_p (const_cast <tree> (t),
13736 TREE_TYPE (TYPE_MIN_VALUE (t))
13737 but does not for C sizetypes in LTO. */
13740 /* Check various uses of TYPE_MAXVAL_RAW. */
13741 if (RECORD_OR_UNION_TYPE_P (t))
13743 if (!TYPE_BINFO (t))
13745 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13747 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
13748 debug_tree (TYPE_BINFO (t));
13749 error_found = true;
13751 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
13753 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
13754 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13755 error_found = true;
13758 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13760 if (TYPE_METHOD_BASETYPE (t)
13761 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13762 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13764 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
13765 debug_tree (TYPE_METHOD_BASETYPE (t));
13766 error_found = true;
13769 else if (TREE_CODE (t) == OFFSET_TYPE)
13771 if (TYPE_OFFSET_BASETYPE (t)
13772 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13773 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13775 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
13776 debug_tree (TYPE_OFFSET_BASETYPE (t));
13777 error_found = true;
13780 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13781 || TREE_CODE (t) == FIXED_POINT_TYPE)
13783 /* FIXME: The following check should pass:
13784 useless_type_conversion_p (const_cast <tree> (t),
13785 TREE_TYPE (TYPE_MAX_VALUE (t))
13786 but does not for C sizetypes in LTO. */
13788 else if (TREE_CODE (t) == ARRAY_TYPE)
13790 if (TYPE_ARRAY_MAX_SIZE (t)
13791 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13793 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
13794 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13795 error_found = true;
13798 else if (TYPE_MAX_VALUE_RAW (t))
13800 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
13801 debug_tree (TYPE_MAX_VALUE_RAW (t));
13802 error_found = true;
13805 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13807 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
13808 debug_tree (TYPE_LANG_SLOT_1 (t));
13809 error_found = true;
13812 /* Check various uses of TYPE_VALUES_RAW. */
13813 if (TREE_CODE (t) == ENUMERAL_TYPE)
13814 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13816 tree value = TREE_VALUE (l);
13817 tree name = TREE_PURPOSE (l);
13819 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13820 CONST_DECL of ENUMERAL TYPE. */
13821 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13823 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
13824 debug_tree (value);
13825 debug_tree (name);
13826 error_found = true;
13828 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13829 && TREE_CODE (TREE_TYPE (value)) != BOOLEAN_TYPE
13830 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13832 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
13833 "to the enum");
13834 debug_tree (value);
13835 debug_tree (name);
13836 error_found = true;
13838 if (TREE_CODE (name) != IDENTIFIER_NODE)
13840 error ("enum value name is not %<IDENTIFIER_NODE%>");
13841 debug_tree (value);
13842 debug_tree (name);
13843 error_found = true;
13846 else if (TREE_CODE (t) == ARRAY_TYPE)
13848 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13850 error ("array %<TYPE_DOMAIN%> is not integer type");
13851 debug_tree (TYPE_DOMAIN (t));
13852 error_found = true;
13855 else if (RECORD_OR_UNION_TYPE_P (t))
13857 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
13859 error ("%<TYPE_FIELDS%> defined in incomplete type");
13860 error_found = true;
13862 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13864 /* TODO: verify properties of decls. */
13865 if (TREE_CODE (fld) == FIELD_DECL)
13867 else if (TREE_CODE (fld) == TYPE_DECL)
13869 else if (TREE_CODE (fld) == CONST_DECL)
13871 else if (VAR_P (fld))
13873 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13875 else if (TREE_CODE (fld) == USING_DECL)
13877 else if (TREE_CODE (fld) == FUNCTION_DECL)
13879 else
13881 error ("wrong tree in %<TYPE_FIELDS%> list");
13882 debug_tree (fld);
13883 error_found = true;
13887 else if (TREE_CODE (t) == INTEGER_TYPE
13888 || TREE_CODE (t) == BOOLEAN_TYPE
13889 || TREE_CODE (t) == OFFSET_TYPE
13890 || TREE_CODE (t) == REFERENCE_TYPE
13891 || TREE_CODE (t) == NULLPTR_TYPE
13892 || TREE_CODE (t) == POINTER_TYPE)
13894 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13896 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
13897 "is %p",
13898 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13899 error_found = true;
13901 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13903 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
13904 debug_tree (TYPE_CACHED_VALUES (t));
13905 error_found = true;
13907 /* Verify just enough of cache to ensure that no one copied it to new type.
13908 All copying should go by copy_node that should clear it. */
13909 else if (TYPE_CACHED_VALUES_P (t))
13911 int i;
13912 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13913 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13914 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13916 error ("wrong %<TYPE_CACHED_VALUES%> entry");
13917 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13918 error_found = true;
13919 break;
13923 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13924 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13926 /* C++ FE uses TREE_PURPOSE to store initial values. */
13927 if (TREE_PURPOSE (l) && in_lto_p)
13929 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
13930 debug_tree (l);
13931 error_found = true;
13933 if (!TYPE_P (TREE_VALUE (l)))
13935 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
13936 debug_tree (l);
13937 error_found = true;
13940 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13942 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
13943 debug_tree (TYPE_VALUES_RAW (t));
13944 error_found = true;
13946 if (TREE_CODE (t) != INTEGER_TYPE
13947 && TREE_CODE (t) != BOOLEAN_TYPE
13948 && TREE_CODE (t) != OFFSET_TYPE
13949 && TREE_CODE (t) != REFERENCE_TYPE
13950 && TREE_CODE (t) != NULLPTR_TYPE
13951 && TREE_CODE (t) != POINTER_TYPE
13952 && TYPE_CACHED_VALUES_P (t))
13954 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
13955 error_found = true;
13958 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13959 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13960 of a type. */
13961 if (TREE_CODE (t) == METHOD_TYPE
13962 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
13964 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
13965 error_found = true;
13968 if (error_found)
13970 debug_tree (const_cast <tree> (t));
13971 internal_error ("%qs failed", __func__);
13976 /* Return 1 if ARG interpreted as signed in its precision is known to be
13977 always positive or 2 if ARG is known to be always negative, or 3 if
13978 ARG may be positive or negative. */
13981 get_range_pos_neg (tree arg)
13983 if (arg == error_mark_node)
13984 return 3;
13986 int prec = TYPE_PRECISION (TREE_TYPE (arg));
13987 int cnt = 0;
13988 if (TREE_CODE (arg) == INTEGER_CST)
13990 wide_int w = wi::sext (wi::to_wide (arg), prec);
13991 if (wi::neg_p (w))
13992 return 2;
13993 else
13994 return 1;
13996 while (CONVERT_EXPR_P (arg)
13997 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
13998 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
14000 arg = TREE_OPERAND (arg, 0);
14001 /* Narrower value zero extended into wider type
14002 will always result in positive values. */
14003 if (TYPE_UNSIGNED (TREE_TYPE (arg))
14004 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
14005 return 1;
14006 prec = TYPE_PRECISION (TREE_TYPE (arg));
14007 if (++cnt > 30)
14008 return 3;
14011 if (TREE_CODE (arg) != SSA_NAME)
14012 return 3;
14013 value_range r;
14014 while (!get_global_range_query ()->range_of_expr (r, arg) || r.kind () != VR_RANGE)
14016 gimple *g = SSA_NAME_DEF_STMT (arg);
14017 if (is_gimple_assign (g)
14018 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
14020 tree t = gimple_assign_rhs1 (g);
14021 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
14022 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
14024 if (TYPE_UNSIGNED (TREE_TYPE (t))
14025 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
14026 return 1;
14027 prec = TYPE_PRECISION (TREE_TYPE (t));
14028 arg = t;
14029 if (++cnt > 30)
14030 return 3;
14031 continue;
14034 return 3;
14036 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
14038 /* For unsigned values, the "positive" range comes
14039 below the "negative" range. */
14040 if (!wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
14041 return 1;
14042 if (wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
14043 return 2;
14045 else
14047 if (!wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
14048 return 1;
14049 if (wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
14050 return 2;
14052 return 3;
14058 /* Return true if ARG is marked with the nonnull attribute in the
14059 current function signature. */
14061 bool
14062 nonnull_arg_p (const_tree arg)
14064 tree t, attrs, fntype;
14065 unsigned HOST_WIDE_INT arg_num;
14067 gcc_assert (TREE_CODE (arg) == PARM_DECL
14068 && (POINTER_TYPE_P (TREE_TYPE (arg))
14069 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
14071 /* The static chain decl is always non null. */
14072 if (arg == cfun->static_chain_decl)
14073 return true;
14075 /* THIS argument of method is always non-NULL. */
14076 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
14077 && arg == DECL_ARGUMENTS (cfun->decl)
14078 && flag_delete_null_pointer_checks)
14079 return true;
14081 /* Values passed by reference are always non-NULL. */
14082 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
14083 && flag_delete_null_pointer_checks)
14084 return true;
14086 fntype = TREE_TYPE (cfun->decl);
14087 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
14089 attrs = lookup_attribute ("nonnull", attrs);
14091 /* If "nonnull" wasn't specified, we know nothing about the argument. */
14092 if (attrs == NULL_TREE)
14093 return false;
14095 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
14096 if (TREE_VALUE (attrs) == NULL_TREE)
14097 return true;
14099 /* Get the position number for ARG in the function signature. */
14100 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
14102 t = DECL_CHAIN (t), arg_num++)
14104 if (t == arg)
14105 break;
14108 gcc_assert (t == arg);
14110 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14111 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14113 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14114 return true;
14118 return false;
14121 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14122 information. */
14124 location_t
14125 set_block (location_t loc, tree block)
14127 location_t pure_loc = get_pure_location (loc);
14128 source_range src_range = get_range_from_loc (line_table, loc);
14129 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
14132 location_t
14133 set_source_range (tree expr, location_t start, location_t finish)
14135 source_range src_range;
14136 src_range.m_start = start;
14137 src_range.m_finish = finish;
14138 return set_source_range (expr, src_range);
14141 location_t
14142 set_source_range (tree expr, source_range src_range)
14144 if (!EXPR_P (expr))
14145 return UNKNOWN_LOCATION;
14147 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
14148 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
14149 pure_loc,
14150 src_range,
14151 NULL);
14152 SET_EXPR_LOCATION (expr, adhoc);
14153 return adhoc;
14156 /* Return EXPR, potentially wrapped with a node expression LOC,
14157 if !CAN_HAVE_LOCATION_P (expr).
14159 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
14160 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
14162 Wrapper nodes can be identified using location_wrapper_p. */
14164 tree
14165 maybe_wrap_with_location (tree expr, location_t loc)
14167 if (expr == NULL)
14168 return NULL;
14169 if (loc == UNKNOWN_LOCATION)
14170 return expr;
14171 if (CAN_HAVE_LOCATION_P (expr))
14172 return expr;
14173 /* We should only be adding wrappers for constants and for decls,
14174 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
14175 gcc_assert (CONSTANT_CLASS_P (expr)
14176 || DECL_P (expr)
14177 || EXCEPTIONAL_CLASS_P (expr));
14179 /* For now, don't add wrappers to exceptional tree nodes, to minimize
14180 any impact of the wrapper nodes. */
14181 if (EXCEPTIONAL_CLASS_P (expr))
14182 return expr;
14184 /* Compiler-generated temporary variables don't need a wrapper. */
14185 if (DECL_P (expr) && DECL_ARTIFICIAL (expr) && DECL_IGNORED_P (expr))
14186 return expr;
14188 /* If any auto_suppress_location_wrappers are active, don't create
14189 wrappers. */
14190 if (suppress_location_wrappers > 0)
14191 return expr;
14193 tree_code code
14194 = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
14195 || (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
14196 ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
14197 tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
14198 /* Mark this node as being a wrapper. */
14199 EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
14200 return wrapper;
14203 int suppress_location_wrappers;
14205 /* Return the name of combined function FN, for debugging purposes. */
14207 const char *
14208 combined_fn_name (combined_fn fn)
14210 if (builtin_fn_p (fn))
14212 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14213 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14215 else
14216 return internal_fn_name (as_internal_fn (fn));
14219 /* Return a bitmap with a bit set corresponding to each argument in
14220 a function call type FNTYPE declared with attribute nonnull,
14221 or null if none of the function's argument are nonnull. The caller
14222 must free the bitmap. */
14224 bitmap
14225 get_nonnull_args (const_tree fntype)
14227 if (fntype == NULL_TREE)
14228 return NULL;
14230 bitmap argmap = NULL;
14231 if (TREE_CODE (fntype) == METHOD_TYPE)
14233 /* The this pointer in C++ non-static member functions is
14234 implicitly nonnull whether or not it's declared as such. */
14235 argmap = BITMAP_ALLOC (NULL);
14236 bitmap_set_bit (argmap, 0);
14239 tree attrs = TYPE_ATTRIBUTES (fntype);
14240 if (!attrs)
14241 return argmap;
14243 /* A function declaration can specify multiple attribute nonnull,
14244 each with zero or more arguments. The loop below creates a bitmap
14245 representing a union of all the arguments. An empty (but non-null)
14246 bitmap means that all arguments have been declaraed nonnull. */
14247 for ( ; attrs; attrs = TREE_CHAIN (attrs))
14249 attrs = lookup_attribute ("nonnull", attrs);
14250 if (!attrs)
14251 break;
14253 if (!argmap)
14254 argmap = BITMAP_ALLOC (NULL);
14256 if (!TREE_VALUE (attrs))
14258 /* Clear the bitmap in case a previous attribute nonnull
14259 set it and this one overrides it for all arguments. */
14260 bitmap_clear (argmap);
14261 return argmap;
14264 /* Iterate over the indices of the format arguments declared nonnull
14265 and set a bit for each. */
14266 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
14268 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
14269 bitmap_set_bit (argmap, val);
14273 return argmap;
14276 /* Returns true if TYPE is a type where it and all of its subobjects
14277 (recursively) are of structure, union, or array type. */
14279 bool
14280 is_empty_type (const_tree type)
14282 if (RECORD_OR_UNION_TYPE_P (type))
14284 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
14285 if (TREE_CODE (field) == FIELD_DECL
14286 && !DECL_PADDING_P (field)
14287 && !is_empty_type (TREE_TYPE (field)))
14288 return false;
14289 return true;
14291 else if (TREE_CODE (type) == ARRAY_TYPE)
14292 return (integer_minus_onep (array_type_nelts (type))
14293 || TYPE_DOMAIN (type) == NULL_TREE
14294 || is_empty_type (TREE_TYPE (type)));
14295 return false;
14298 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
14299 that shouldn't be passed via stack. */
14301 bool
14302 default_is_empty_record (const_tree type)
14304 if (!abi_version_at_least (12))
14305 return false;
14307 if (type == error_mark_node)
14308 return false;
14310 if (TREE_ADDRESSABLE (type))
14311 return false;
14313 return is_empty_type (TYPE_MAIN_VARIANT (type));
14316 /* Determine whether TYPE is a structure with a flexible array member,
14317 or a union containing such a structure (possibly recursively). */
14319 bool
14320 flexible_array_type_p (const_tree type)
14322 tree x, last;
14323 switch (TREE_CODE (type))
14325 case RECORD_TYPE:
14326 last = NULL_TREE;
14327 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14328 if (TREE_CODE (x) == FIELD_DECL)
14329 last = x;
14330 if (last == NULL_TREE)
14331 return false;
14332 if (TREE_CODE (TREE_TYPE (last)) == ARRAY_TYPE
14333 && TYPE_SIZE (TREE_TYPE (last)) == NULL_TREE
14334 && TYPE_DOMAIN (TREE_TYPE (last)) != NULL_TREE
14335 && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last))) == NULL_TREE)
14336 return true;
14337 return false;
14338 case UNION_TYPE:
14339 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14341 if (TREE_CODE (x) == FIELD_DECL
14342 && flexible_array_type_p (TREE_TYPE (x)))
14343 return true;
14345 return false;
14346 default:
14347 return false;
14351 /* Like int_size_in_bytes, but handle empty records specially. */
14353 HOST_WIDE_INT
14354 arg_int_size_in_bytes (const_tree type)
14356 return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
14359 /* Like size_in_bytes, but handle empty records specially. */
14361 tree
14362 arg_size_in_bytes (const_tree type)
14364 return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
14367 /* Return true if an expression with CODE has to have the same result type as
14368 its first operand. */
14370 bool
14371 expr_type_first_operand_type_p (tree_code code)
14373 switch (code)
14375 case NEGATE_EXPR:
14376 case ABS_EXPR:
14377 case BIT_NOT_EXPR:
14378 case PAREN_EXPR:
14379 case CONJ_EXPR:
14381 case PLUS_EXPR:
14382 case MINUS_EXPR:
14383 case MULT_EXPR:
14384 case TRUNC_DIV_EXPR:
14385 case CEIL_DIV_EXPR:
14386 case FLOOR_DIV_EXPR:
14387 case ROUND_DIV_EXPR:
14388 case TRUNC_MOD_EXPR:
14389 case CEIL_MOD_EXPR:
14390 case FLOOR_MOD_EXPR:
14391 case ROUND_MOD_EXPR:
14392 case RDIV_EXPR:
14393 case EXACT_DIV_EXPR:
14394 case MIN_EXPR:
14395 case MAX_EXPR:
14396 case BIT_IOR_EXPR:
14397 case BIT_XOR_EXPR:
14398 case BIT_AND_EXPR:
14400 case LSHIFT_EXPR:
14401 case RSHIFT_EXPR:
14402 case LROTATE_EXPR:
14403 case RROTATE_EXPR:
14404 return true;
14406 default:
14407 return false;
14411 /* Return a typenode for the "standard" C type with a given name. */
14412 tree
14413 get_typenode_from_name (const char *name)
14415 if (name == NULL || *name == '\0')
14416 return NULL_TREE;
14418 if (strcmp (name, "char") == 0)
14419 return char_type_node;
14420 if (strcmp (name, "unsigned char") == 0)
14421 return unsigned_char_type_node;
14422 if (strcmp (name, "signed char") == 0)
14423 return signed_char_type_node;
14425 if (strcmp (name, "short int") == 0)
14426 return short_integer_type_node;
14427 if (strcmp (name, "short unsigned int") == 0)
14428 return short_unsigned_type_node;
14430 if (strcmp (name, "int") == 0)
14431 return integer_type_node;
14432 if (strcmp (name, "unsigned int") == 0)
14433 return unsigned_type_node;
14435 if (strcmp (name, "long int") == 0)
14436 return long_integer_type_node;
14437 if (strcmp (name, "long unsigned int") == 0)
14438 return long_unsigned_type_node;
14440 if (strcmp (name, "long long int") == 0)
14441 return long_long_integer_type_node;
14442 if (strcmp (name, "long long unsigned int") == 0)
14443 return long_long_unsigned_type_node;
14445 gcc_unreachable ();
14448 /* List of pointer types used to declare builtins before we have seen their
14449 real declaration.
14451 Keep the size up to date in tree.h ! */
14452 const builtin_structptr_type builtin_structptr_types[6] =
14454 { fileptr_type_node, ptr_type_node, "FILE" },
14455 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
14456 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
14457 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
14458 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
14459 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
14462 /* Return the maximum object size. */
14464 tree
14465 max_object_size (void)
14467 /* To do: Make this a configurable parameter. */
14468 return TYPE_MAX_VALUE (ptrdiff_type_node);
14471 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
14472 parameter default to false and that weeds out error_mark_node. */
14474 bool
14475 verify_type_context (location_t loc, type_context_kind context,
14476 const_tree type, bool silent_p)
14478 if (type == error_mark_node)
14479 return true;
14481 gcc_assert (TYPE_P (type));
14482 return (!targetm.verify_type_context
14483 || targetm.verify_type_context (loc, context, type, silent_p));
14486 /* Return true if NEW_ASM and DELETE_ASM name a valid pair of new and
14487 delete operators. Return false if they may or may not name such
14488 a pair and, when nonnull, set *PCERTAIN to true if they certainly
14489 do not. */
14491 bool
14492 valid_new_delete_pair_p (tree new_asm, tree delete_asm,
14493 bool *pcertain /* = NULL */)
14495 bool certain;
14496 if (!pcertain)
14497 pcertain = &certain;
14499 const char *new_name = IDENTIFIER_POINTER (new_asm);
14500 const char *delete_name = IDENTIFIER_POINTER (delete_asm);
14501 unsigned int new_len = IDENTIFIER_LENGTH (new_asm);
14502 unsigned int delete_len = IDENTIFIER_LENGTH (delete_asm);
14504 /* The following failures are due to invalid names so they're not
14505 considered certain mismatches. */
14506 *pcertain = false;
14508 if (new_len < 5 || delete_len < 6)
14509 return false;
14510 if (new_name[0] == '_')
14511 ++new_name, --new_len;
14512 if (new_name[0] == '_')
14513 ++new_name, --new_len;
14514 if (delete_name[0] == '_')
14515 ++delete_name, --delete_len;
14516 if (delete_name[0] == '_')
14517 ++delete_name, --delete_len;
14518 if (new_len < 4 || delete_len < 5)
14519 return false;
14521 /* The following failures are due to names of user-defined operators
14522 so they're also not considered certain mismatches. */
14524 /* *_len is now just the length after initial underscores. */
14525 if (new_name[0] != 'Z' || new_name[1] != 'n')
14526 return false;
14527 if (delete_name[0] != 'Z' || delete_name[1] != 'd')
14528 return false;
14530 /* The following failures are certain mismatches. */
14531 *pcertain = true;
14533 /* _Znw must match _Zdl, _Zna must match _Zda. */
14534 if ((new_name[2] != 'w' || delete_name[2] != 'l')
14535 && (new_name[2] != 'a' || delete_name[2] != 'a'))
14536 return false;
14537 /* 'j', 'm' and 'y' correspond to size_t. */
14538 if (new_name[3] != 'j' && new_name[3] != 'm' && new_name[3] != 'y')
14539 return false;
14540 if (delete_name[3] != 'P' || delete_name[4] != 'v')
14541 return false;
14542 if (new_len == 4
14543 || (new_len == 18 && !memcmp (new_name + 4, "RKSt9nothrow_t", 14)))
14545 /* _ZnXY or _ZnXYRKSt9nothrow_t matches
14546 _ZdXPv, _ZdXPvY and _ZdXPvRKSt9nothrow_t. */
14547 if (delete_len == 5)
14548 return true;
14549 if (delete_len == 6 && delete_name[5] == new_name[3])
14550 return true;
14551 if (delete_len == 19 && !memcmp (delete_name + 5, "RKSt9nothrow_t", 14))
14552 return true;
14554 else if ((new_len == 19 && !memcmp (new_name + 4, "St11align_val_t", 15))
14555 || (new_len == 33
14556 && !memcmp (new_name + 4, "St11align_val_tRKSt9nothrow_t", 29)))
14558 /* _ZnXYSt11align_val_t or _ZnXYSt11align_val_tRKSt9nothrow_t matches
14559 _ZdXPvSt11align_val_t or _ZdXPvYSt11align_val_t or or
14560 _ZdXPvSt11align_val_tRKSt9nothrow_t. */
14561 if (delete_len == 20 && !memcmp (delete_name + 5, "St11align_val_t", 15))
14562 return true;
14563 if (delete_len == 21
14564 && delete_name[5] == new_name[3]
14565 && !memcmp (delete_name + 6, "St11align_val_t", 15))
14566 return true;
14567 if (delete_len == 34
14568 && !memcmp (delete_name + 5, "St11align_val_tRKSt9nothrow_t", 29))
14569 return true;
14572 /* The negative result is conservative. */
14573 *pcertain = false;
14574 return false;
14577 /* Return the zero-based number corresponding to the argument being
14578 deallocated if FNDECL is a deallocation function or an out-of-bounds
14579 value if it isn't. */
14581 unsigned
14582 fndecl_dealloc_argno (tree fndecl)
14584 /* A call to operator delete isn't recognized as one to a built-in. */
14585 if (DECL_IS_OPERATOR_DELETE_P (fndecl))
14587 if (DECL_IS_REPLACEABLE_OPERATOR (fndecl))
14588 return 0;
14590 /* Avoid placement delete that's not been inlined. */
14591 tree fname = DECL_ASSEMBLER_NAME (fndecl);
14592 if (id_equal (fname, "_ZdlPvS_") // ordinary form
14593 || id_equal (fname, "_ZdaPvS_")) // array form
14594 return UINT_MAX;
14595 return 0;
14598 /* TODO: Handle user-defined functions with attribute malloc? Handle
14599 known non-built-ins like fopen? */
14600 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14602 switch (DECL_FUNCTION_CODE (fndecl))
14604 case BUILT_IN_FREE:
14605 case BUILT_IN_REALLOC:
14606 return 0;
14607 default:
14608 break;
14610 return UINT_MAX;
14613 tree attrs = DECL_ATTRIBUTES (fndecl);
14614 if (!attrs)
14615 return UINT_MAX;
14617 for (tree atfree = attrs;
14618 (atfree = lookup_attribute ("*dealloc", atfree));
14619 atfree = TREE_CHAIN (atfree))
14621 tree alloc = TREE_VALUE (atfree);
14622 if (!alloc)
14623 continue;
14625 tree pos = TREE_CHAIN (alloc);
14626 if (!pos)
14627 return 0;
14629 pos = TREE_VALUE (pos);
14630 return TREE_INT_CST_LOW (pos) - 1;
14633 return UINT_MAX;
14636 /* If EXPR refers to a character array or pointer declared attribute
14637 nonstring, return a decl for that array or pointer and set *REF
14638 to the referenced enclosing object or pointer. Otherwise return
14639 null. */
14641 tree
14642 get_attr_nonstring_decl (tree expr, tree *ref)
14644 tree decl = expr;
14645 tree var = NULL_TREE;
14646 if (TREE_CODE (decl) == SSA_NAME)
14648 gimple *def = SSA_NAME_DEF_STMT (decl);
14650 if (is_gimple_assign (def))
14652 tree_code code = gimple_assign_rhs_code (def);
14653 if (code == ADDR_EXPR
14654 || code == COMPONENT_REF
14655 || code == VAR_DECL)
14656 decl = gimple_assign_rhs1 (def);
14658 else
14659 var = SSA_NAME_VAR (decl);
14662 if (TREE_CODE (decl) == ADDR_EXPR)
14663 decl = TREE_OPERAND (decl, 0);
14665 /* To simplify calling code, store the referenced DECL regardless of
14666 the attribute determined below, but avoid storing the SSA_NAME_VAR
14667 obtained above (it's not useful for dataflow purposes). */
14668 if (ref)
14669 *ref = decl;
14671 /* Use the SSA_NAME_VAR that was determined above to see if it's
14672 declared nonstring. Otherwise drill down into the referenced
14673 DECL. */
14674 if (var)
14675 decl = var;
14676 else if (TREE_CODE (decl) == ARRAY_REF)
14677 decl = TREE_OPERAND (decl, 0);
14678 else if (TREE_CODE (decl) == COMPONENT_REF)
14679 decl = TREE_OPERAND (decl, 1);
14680 else if (TREE_CODE (decl) == MEM_REF)
14681 return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
14683 if (DECL_P (decl)
14684 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
14685 return decl;
14687 return NULL_TREE;
14690 /* Return length of attribute names string,
14691 if arglist chain > 1, -1 otherwise. */
14694 get_target_clone_attr_len (tree arglist)
14696 tree arg;
14697 int str_len_sum = 0;
14698 int argnum = 0;
14700 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
14702 const char *str = TREE_STRING_POINTER (TREE_VALUE (arg));
14703 size_t len = strlen (str);
14704 str_len_sum += len + 1;
14705 for (const char *p = strchr (str, ','); p; p = strchr (p + 1, ','))
14706 argnum++;
14707 argnum++;
14709 if (argnum <= 1)
14710 return -1;
14711 return str_len_sum;
14714 void
14715 tree_cc_finalize (void)
14717 clear_nonstandard_integer_type_cache ();
14720 #if CHECKING_P
14722 namespace selftest {
14724 /* Selftests for tree. */
14726 /* Verify that integer constants are sane. */
14728 static void
14729 test_integer_constants ()
14731 ASSERT_TRUE (integer_type_node != NULL);
14732 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
14734 tree type = integer_type_node;
14736 tree zero = build_zero_cst (type);
14737 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
14738 ASSERT_EQ (type, TREE_TYPE (zero));
14740 tree one = build_int_cst (type, 1);
14741 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
14742 ASSERT_EQ (type, TREE_TYPE (zero));
14745 /* Verify identifiers. */
14747 static void
14748 test_identifiers ()
14750 tree identifier = get_identifier ("foo");
14751 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
14752 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
14755 /* Verify LABEL_DECL. */
14757 static void
14758 test_labels ()
14760 tree identifier = get_identifier ("err");
14761 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
14762 identifier, void_type_node);
14763 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
14764 ASSERT_FALSE (FORCED_LABEL (label_decl));
14767 /* Return a new VECTOR_CST node whose type is TYPE and whose values
14768 are given by VALS. */
14770 static tree
14771 build_vector (tree type, const vec<tree> &vals MEM_STAT_DECL)
14773 gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
14774 tree_vector_builder builder (type, vals.length (), 1);
14775 builder.splice (vals);
14776 return builder.build ();
14779 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
14781 static void
14782 check_vector_cst (const vec<tree> &expected, tree actual)
14784 ASSERT_KNOWN_EQ (expected.length (),
14785 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
14786 for (unsigned int i = 0; i < expected.length (); ++i)
14787 ASSERT_EQ (wi::to_wide (expected[i]),
14788 wi::to_wide (vector_cst_elt (actual, i)));
14791 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
14792 and that its elements match EXPECTED. */
14794 static void
14795 check_vector_cst_duplicate (const vec<tree> &expected, tree actual,
14796 unsigned int npatterns)
14798 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14799 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
14800 ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
14801 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
14802 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
14803 check_vector_cst (expected, actual);
14806 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
14807 and NPATTERNS background elements, and that its elements match
14808 EXPECTED. */
14810 static void
14811 check_vector_cst_fill (const vec<tree> &expected, tree actual,
14812 unsigned int npatterns)
14814 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14815 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
14816 ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
14817 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
14818 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
14819 check_vector_cst (expected, actual);
14822 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
14823 and that its elements match EXPECTED. */
14825 static void
14826 check_vector_cst_stepped (const vec<tree> &expected, tree actual,
14827 unsigned int npatterns)
14829 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14830 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
14831 ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
14832 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
14833 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
14834 check_vector_cst (expected, actual);
14837 /* Test the creation of VECTOR_CSTs. */
14839 static void
14840 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
14842 auto_vec<tree, 8> elements (8);
14843 elements.quick_grow (8);
14844 tree element_type = build_nonstandard_integer_type (16, true);
14845 tree vector_type = build_vector_type (element_type, 8);
14847 /* Test a simple linear series with a base of 0 and a step of 1:
14848 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
14849 for (unsigned int i = 0; i < 8; ++i)
14850 elements[i] = build_int_cst (element_type, i);
14851 tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
14852 check_vector_cst_stepped (elements, vector, 1);
14854 /* Try the same with the first element replaced by 100:
14855 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
14856 elements[0] = build_int_cst (element_type, 100);
14857 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14858 check_vector_cst_stepped (elements, vector, 1);
14860 /* Try a series that wraps around.
14861 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
14862 for (unsigned int i = 1; i < 8; ++i)
14863 elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
14864 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14865 check_vector_cst_stepped (elements, vector, 1);
14867 /* Try a downward series:
14868 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
14869 for (unsigned int i = 1; i < 8; ++i)
14870 elements[i] = build_int_cst (element_type, 80 - i);
14871 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14872 check_vector_cst_stepped (elements, vector, 1);
14874 /* Try two interleaved series with different bases and steps:
14875 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
14876 elements[1] = build_int_cst (element_type, 53);
14877 for (unsigned int i = 2; i < 8; i += 2)
14879 elements[i] = build_int_cst (element_type, 70 - i * 2);
14880 elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
14882 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14883 check_vector_cst_stepped (elements, vector, 2);
14885 /* Try a duplicated value:
14886 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
14887 for (unsigned int i = 1; i < 8; ++i)
14888 elements[i] = elements[0];
14889 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14890 check_vector_cst_duplicate (elements, vector, 1);
14892 /* Try an interleaved duplicated value:
14893 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
14894 elements[1] = build_int_cst (element_type, 55);
14895 for (unsigned int i = 2; i < 8; ++i)
14896 elements[i] = elements[i - 2];
14897 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14898 check_vector_cst_duplicate (elements, vector, 2);
14900 /* Try a duplicated value with 2 exceptions
14901 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
14902 elements[0] = build_int_cst (element_type, 41);
14903 elements[1] = build_int_cst (element_type, 97);
14904 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14905 check_vector_cst_fill (elements, vector, 2);
14907 /* Try with and without a step
14908 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
14909 for (unsigned int i = 3; i < 8; i += 2)
14910 elements[i] = build_int_cst (element_type, i * 7);
14911 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14912 check_vector_cst_stepped (elements, vector, 2);
14914 /* Try a fully-general constant:
14915 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
14916 elements[5] = build_int_cst (element_type, 9990);
14917 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14918 check_vector_cst_fill (elements, vector, 4);
14921 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
14922 Helper function for test_location_wrappers, to deal with STRIP_NOPS
14923 modifying its argument in-place. */
14925 static void
14926 check_strip_nops (tree node, tree expected)
14928 STRIP_NOPS (node);
14929 ASSERT_EQ (expected, node);
14932 /* Verify location wrappers. */
14934 static void
14935 test_location_wrappers ()
14937 location_t loc = BUILTINS_LOCATION;
14939 ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
14941 /* Wrapping a constant. */
14942 tree int_cst = build_int_cst (integer_type_node, 42);
14943 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
14944 ASSERT_FALSE (location_wrapper_p (int_cst));
14946 tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
14947 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
14948 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
14949 ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
14951 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
14952 ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
14954 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
14955 tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
14956 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
14957 ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
14959 /* Wrapping a STRING_CST. */
14960 tree string_cst = build_string (4, "foo");
14961 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
14962 ASSERT_FALSE (location_wrapper_p (string_cst));
14964 tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
14965 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
14966 ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
14967 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
14968 ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
14971 /* Wrapping a variable. */
14972 tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
14973 get_identifier ("some_int_var"),
14974 integer_type_node);
14975 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
14976 ASSERT_FALSE (location_wrapper_p (int_var));
14978 tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
14979 ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
14980 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
14981 ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
14983 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
14984 wrapper. */
14985 tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
14986 ASSERT_FALSE (location_wrapper_p (r_cast));
14987 ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
14989 /* Verify that STRIP_NOPS removes wrappers. */
14990 check_strip_nops (wrapped_int_cst, int_cst);
14991 check_strip_nops (wrapped_string_cst, string_cst);
14992 check_strip_nops (wrapped_int_var, int_var);
14995 /* Test various tree predicates. Verify that location wrappers don't
14996 affect the results. */
14998 static void
14999 test_predicates ()
15001 /* Build various constants and wrappers around them. */
15003 location_t loc = BUILTINS_LOCATION;
15005 tree i_0 = build_int_cst (integer_type_node, 0);
15006 tree wr_i_0 = maybe_wrap_with_location (i_0, loc);
15008 tree i_1 = build_int_cst (integer_type_node, 1);
15009 tree wr_i_1 = maybe_wrap_with_location (i_1, loc);
15011 tree i_m1 = build_int_cst (integer_type_node, -1);
15012 tree wr_i_m1 = maybe_wrap_with_location (i_m1, loc);
15014 tree f_0 = build_real_from_int_cst (float_type_node, i_0);
15015 tree wr_f_0 = maybe_wrap_with_location (f_0, loc);
15016 tree f_1 = build_real_from_int_cst (float_type_node, i_1);
15017 tree wr_f_1 = maybe_wrap_with_location (f_1, loc);
15018 tree f_m1 = build_real_from_int_cst (float_type_node, i_m1);
15019 tree wr_f_m1 = maybe_wrap_with_location (f_m1, loc);
15021 tree c_i_0 = build_complex (NULL_TREE, i_0, i_0);
15022 tree c_i_1 = build_complex (NULL_TREE, i_1, i_0);
15023 tree c_i_m1 = build_complex (NULL_TREE, i_m1, i_0);
15025 tree c_f_0 = build_complex (NULL_TREE, f_0, f_0);
15026 tree c_f_1 = build_complex (NULL_TREE, f_1, f_0);
15027 tree c_f_m1 = build_complex (NULL_TREE, f_m1, f_0);
15029 /* TODO: vector constants. */
15031 /* Test integer_onep. */
15032 ASSERT_FALSE (integer_onep (i_0));
15033 ASSERT_FALSE (integer_onep (wr_i_0));
15034 ASSERT_TRUE (integer_onep (i_1));
15035 ASSERT_TRUE (integer_onep (wr_i_1));
15036 ASSERT_FALSE (integer_onep (i_m1));
15037 ASSERT_FALSE (integer_onep (wr_i_m1));
15038 ASSERT_FALSE (integer_onep (f_0));
15039 ASSERT_FALSE (integer_onep (wr_f_0));
15040 ASSERT_FALSE (integer_onep (f_1));
15041 ASSERT_FALSE (integer_onep (wr_f_1));
15042 ASSERT_FALSE (integer_onep (f_m1));
15043 ASSERT_FALSE (integer_onep (wr_f_m1));
15044 ASSERT_FALSE (integer_onep (c_i_0));
15045 ASSERT_TRUE (integer_onep (c_i_1));
15046 ASSERT_FALSE (integer_onep (c_i_m1));
15047 ASSERT_FALSE (integer_onep (c_f_0));
15048 ASSERT_FALSE (integer_onep (c_f_1));
15049 ASSERT_FALSE (integer_onep (c_f_m1));
15051 /* Test integer_zerop. */
15052 ASSERT_TRUE (integer_zerop (i_0));
15053 ASSERT_TRUE (integer_zerop (wr_i_0));
15054 ASSERT_FALSE (integer_zerop (i_1));
15055 ASSERT_FALSE (integer_zerop (wr_i_1));
15056 ASSERT_FALSE (integer_zerop (i_m1));
15057 ASSERT_FALSE (integer_zerop (wr_i_m1));
15058 ASSERT_FALSE (integer_zerop (f_0));
15059 ASSERT_FALSE (integer_zerop (wr_f_0));
15060 ASSERT_FALSE (integer_zerop (f_1));
15061 ASSERT_FALSE (integer_zerop (wr_f_1));
15062 ASSERT_FALSE (integer_zerop (f_m1));
15063 ASSERT_FALSE (integer_zerop (wr_f_m1));
15064 ASSERT_TRUE (integer_zerop (c_i_0));
15065 ASSERT_FALSE (integer_zerop (c_i_1));
15066 ASSERT_FALSE (integer_zerop (c_i_m1));
15067 ASSERT_FALSE (integer_zerop (c_f_0));
15068 ASSERT_FALSE (integer_zerop (c_f_1));
15069 ASSERT_FALSE (integer_zerop (c_f_m1));
15071 /* Test integer_all_onesp. */
15072 ASSERT_FALSE (integer_all_onesp (i_0));
15073 ASSERT_FALSE (integer_all_onesp (wr_i_0));
15074 ASSERT_FALSE (integer_all_onesp (i_1));
15075 ASSERT_FALSE (integer_all_onesp (wr_i_1));
15076 ASSERT_TRUE (integer_all_onesp (i_m1));
15077 ASSERT_TRUE (integer_all_onesp (wr_i_m1));
15078 ASSERT_FALSE (integer_all_onesp (f_0));
15079 ASSERT_FALSE (integer_all_onesp (wr_f_0));
15080 ASSERT_FALSE (integer_all_onesp (f_1));
15081 ASSERT_FALSE (integer_all_onesp (wr_f_1));
15082 ASSERT_FALSE (integer_all_onesp (f_m1));
15083 ASSERT_FALSE (integer_all_onesp (wr_f_m1));
15084 ASSERT_FALSE (integer_all_onesp (c_i_0));
15085 ASSERT_FALSE (integer_all_onesp (c_i_1));
15086 ASSERT_FALSE (integer_all_onesp (c_i_m1));
15087 ASSERT_FALSE (integer_all_onesp (c_f_0));
15088 ASSERT_FALSE (integer_all_onesp (c_f_1));
15089 ASSERT_FALSE (integer_all_onesp (c_f_m1));
15091 /* Test integer_minus_onep. */
15092 ASSERT_FALSE (integer_minus_onep (i_0));
15093 ASSERT_FALSE (integer_minus_onep (wr_i_0));
15094 ASSERT_FALSE (integer_minus_onep (i_1));
15095 ASSERT_FALSE (integer_minus_onep (wr_i_1));
15096 ASSERT_TRUE (integer_minus_onep (i_m1));
15097 ASSERT_TRUE (integer_minus_onep (wr_i_m1));
15098 ASSERT_FALSE (integer_minus_onep (f_0));
15099 ASSERT_FALSE (integer_minus_onep (wr_f_0));
15100 ASSERT_FALSE (integer_minus_onep (f_1));
15101 ASSERT_FALSE (integer_minus_onep (wr_f_1));
15102 ASSERT_FALSE (integer_minus_onep (f_m1));
15103 ASSERT_FALSE (integer_minus_onep (wr_f_m1));
15104 ASSERT_FALSE (integer_minus_onep (c_i_0));
15105 ASSERT_FALSE (integer_minus_onep (c_i_1));
15106 ASSERT_TRUE (integer_minus_onep (c_i_m1));
15107 ASSERT_FALSE (integer_minus_onep (c_f_0));
15108 ASSERT_FALSE (integer_minus_onep (c_f_1));
15109 ASSERT_FALSE (integer_minus_onep (c_f_m1));
15111 /* Test integer_each_onep. */
15112 ASSERT_FALSE (integer_each_onep (i_0));
15113 ASSERT_FALSE (integer_each_onep (wr_i_0));
15114 ASSERT_TRUE (integer_each_onep (i_1));
15115 ASSERT_TRUE (integer_each_onep (wr_i_1));
15116 ASSERT_FALSE (integer_each_onep (i_m1));
15117 ASSERT_FALSE (integer_each_onep (wr_i_m1));
15118 ASSERT_FALSE (integer_each_onep (f_0));
15119 ASSERT_FALSE (integer_each_onep (wr_f_0));
15120 ASSERT_FALSE (integer_each_onep (f_1));
15121 ASSERT_FALSE (integer_each_onep (wr_f_1));
15122 ASSERT_FALSE (integer_each_onep (f_m1));
15123 ASSERT_FALSE (integer_each_onep (wr_f_m1));
15124 ASSERT_FALSE (integer_each_onep (c_i_0));
15125 ASSERT_FALSE (integer_each_onep (c_i_1));
15126 ASSERT_FALSE (integer_each_onep (c_i_m1));
15127 ASSERT_FALSE (integer_each_onep (c_f_0));
15128 ASSERT_FALSE (integer_each_onep (c_f_1));
15129 ASSERT_FALSE (integer_each_onep (c_f_m1));
15131 /* Test integer_truep. */
15132 ASSERT_FALSE (integer_truep (i_0));
15133 ASSERT_FALSE (integer_truep (wr_i_0));
15134 ASSERT_TRUE (integer_truep (i_1));
15135 ASSERT_TRUE (integer_truep (wr_i_1));
15136 ASSERT_FALSE (integer_truep (i_m1));
15137 ASSERT_FALSE (integer_truep (wr_i_m1));
15138 ASSERT_FALSE (integer_truep (f_0));
15139 ASSERT_FALSE (integer_truep (wr_f_0));
15140 ASSERT_FALSE (integer_truep (f_1));
15141 ASSERT_FALSE (integer_truep (wr_f_1));
15142 ASSERT_FALSE (integer_truep (f_m1));
15143 ASSERT_FALSE (integer_truep (wr_f_m1));
15144 ASSERT_FALSE (integer_truep (c_i_0));
15145 ASSERT_TRUE (integer_truep (c_i_1));
15146 ASSERT_FALSE (integer_truep (c_i_m1));
15147 ASSERT_FALSE (integer_truep (c_f_0));
15148 ASSERT_FALSE (integer_truep (c_f_1));
15149 ASSERT_FALSE (integer_truep (c_f_m1));
15151 /* Test integer_nonzerop. */
15152 ASSERT_FALSE (integer_nonzerop (i_0));
15153 ASSERT_FALSE (integer_nonzerop (wr_i_0));
15154 ASSERT_TRUE (integer_nonzerop (i_1));
15155 ASSERT_TRUE (integer_nonzerop (wr_i_1));
15156 ASSERT_TRUE (integer_nonzerop (i_m1));
15157 ASSERT_TRUE (integer_nonzerop (wr_i_m1));
15158 ASSERT_FALSE (integer_nonzerop (f_0));
15159 ASSERT_FALSE (integer_nonzerop (wr_f_0));
15160 ASSERT_FALSE (integer_nonzerop (f_1));
15161 ASSERT_FALSE (integer_nonzerop (wr_f_1));
15162 ASSERT_FALSE (integer_nonzerop (f_m1));
15163 ASSERT_FALSE (integer_nonzerop (wr_f_m1));
15164 ASSERT_FALSE (integer_nonzerop (c_i_0));
15165 ASSERT_TRUE (integer_nonzerop (c_i_1));
15166 ASSERT_TRUE (integer_nonzerop (c_i_m1));
15167 ASSERT_FALSE (integer_nonzerop (c_f_0));
15168 ASSERT_FALSE (integer_nonzerop (c_f_1));
15169 ASSERT_FALSE (integer_nonzerop (c_f_m1));
15171 /* Test real_zerop. */
15172 ASSERT_FALSE (real_zerop (i_0));
15173 ASSERT_FALSE (real_zerop (wr_i_0));
15174 ASSERT_FALSE (real_zerop (i_1));
15175 ASSERT_FALSE (real_zerop (wr_i_1));
15176 ASSERT_FALSE (real_zerop (i_m1));
15177 ASSERT_FALSE (real_zerop (wr_i_m1));
15178 ASSERT_TRUE (real_zerop (f_0));
15179 ASSERT_TRUE (real_zerop (wr_f_0));
15180 ASSERT_FALSE (real_zerop (f_1));
15181 ASSERT_FALSE (real_zerop (wr_f_1));
15182 ASSERT_FALSE (real_zerop (f_m1));
15183 ASSERT_FALSE (real_zerop (wr_f_m1));
15184 ASSERT_FALSE (real_zerop (c_i_0));
15185 ASSERT_FALSE (real_zerop (c_i_1));
15186 ASSERT_FALSE (real_zerop (c_i_m1));
15187 ASSERT_TRUE (real_zerop (c_f_0));
15188 ASSERT_FALSE (real_zerop (c_f_1));
15189 ASSERT_FALSE (real_zerop (c_f_m1));
15191 /* Test real_onep. */
15192 ASSERT_FALSE (real_onep (i_0));
15193 ASSERT_FALSE (real_onep (wr_i_0));
15194 ASSERT_FALSE (real_onep (i_1));
15195 ASSERT_FALSE (real_onep (wr_i_1));
15196 ASSERT_FALSE (real_onep (i_m1));
15197 ASSERT_FALSE (real_onep (wr_i_m1));
15198 ASSERT_FALSE (real_onep (f_0));
15199 ASSERT_FALSE (real_onep (wr_f_0));
15200 ASSERT_TRUE (real_onep (f_1));
15201 ASSERT_TRUE (real_onep (wr_f_1));
15202 ASSERT_FALSE (real_onep (f_m1));
15203 ASSERT_FALSE (real_onep (wr_f_m1));
15204 ASSERT_FALSE (real_onep (c_i_0));
15205 ASSERT_FALSE (real_onep (c_i_1));
15206 ASSERT_FALSE (real_onep (c_i_m1));
15207 ASSERT_FALSE (real_onep (c_f_0));
15208 ASSERT_TRUE (real_onep (c_f_1));
15209 ASSERT_FALSE (real_onep (c_f_m1));
15211 /* Test real_minus_onep. */
15212 ASSERT_FALSE (real_minus_onep (i_0));
15213 ASSERT_FALSE (real_minus_onep (wr_i_0));
15214 ASSERT_FALSE (real_minus_onep (i_1));
15215 ASSERT_FALSE (real_minus_onep (wr_i_1));
15216 ASSERT_FALSE (real_minus_onep (i_m1));
15217 ASSERT_FALSE (real_minus_onep (wr_i_m1));
15218 ASSERT_FALSE (real_minus_onep (f_0));
15219 ASSERT_FALSE (real_minus_onep (wr_f_0));
15220 ASSERT_FALSE (real_minus_onep (f_1));
15221 ASSERT_FALSE (real_minus_onep (wr_f_1));
15222 ASSERT_TRUE (real_minus_onep (f_m1));
15223 ASSERT_TRUE (real_minus_onep (wr_f_m1));
15224 ASSERT_FALSE (real_minus_onep (c_i_0));
15225 ASSERT_FALSE (real_minus_onep (c_i_1));
15226 ASSERT_FALSE (real_minus_onep (c_i_m1));
15227 ASSERT_FALSE (real_minus_onep (c_f_0));
15228 ASSERT_FALSE (real_minus_onep (c_f_1));
15229 ASSERT_TRUE (real_minus_onep (c_f_m1));
15231 /* Test zerop. */
15232 ASSERT_TRUE (zerop (i_0));
15233 ASSERT_TRUE (zerop (wr_i_0));
15234 ASSERT_FALSE (zerop (i_1));
15235 ASSERT_FALSE (zerop (wr_i_1));
15236 ASSERT_FALSE (zerop (i_m1));
15237 ASSERT_FALSE (zerop (wr_i_m1));
15238 ASSERT_TRUE (zerop (f_0));
15239 ASSERT_TRUE (zerop (wr_f_0));
15240 ASSERT_FALSE (zerop (f_1));
15241 ASSERT_FALSE (zerop (wr_f_1));
15242 ASSERT_FALSE (zerop (f_m1));
15243 ASSERT_FALSE (zerop (wr_f_m1));
15244 ASSERT_TRUE (zerop (c_i_0));
15245 ASSERT_FALSE (zerop (c_i_1));
15246 ASSERT_FALSE (zerop (c_i_m1));
15247 ASSERT_TRUE (zerop (c_f_0));
15248 ASSERT_FALSE (zerop (c_f_1));
15249 ASSERT_FALSE (zerop (c_f_m1));
15251 /* Test tree_expr_nonnegative_p. */
15252 ASSERT_TRUE (tree_expr_nonnegative_p (i_0));
15253 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0));
15254 ASSERT_TRUE (tree_expr_nonnegative_p (i_1));
15255 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1));
15256 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1));
15257 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1));
15258 ASSERT_TRUE (tree_expr_nonnegative_p (f_0));
15259 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0));
15260 ASSERT_TRUE (tree_expr_nonnegative_p (f_1));
15261 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1));
15262 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1));
15263 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1));
15264 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0));
15265 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1));
15266 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1));
15267 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0));
15268 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1));
15269 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1));
15271 /* Test tree_expr_nonzero_p. */
15272 ASSERT_FALSE (tree_expr_nonzero_p (i_0));
15273 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0));
15274 ASSERT_TRUE (tree_expr_nonzero_p (i_1));
15275 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1));
15276 ASSERT_TRUE (tree_expr_nonzero_p (i_m1));
15277 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1));
15279 /* Test integer_valued_real_p. */
15280 ASSERT_FALSE (integer_valued_real_p (i_0));
15281 ASSERT_TRUE (integer_valued_real_p (f_0));
15282 ASSERT_TRUE (integer_valued_real_p (wr_f_0));
15283 ASSERT_TRUE (integer_valued_real_p (f_1));
15284 ASSERT_TRUE (integer_valued_real_p (wr_f_1));
15286 /* Test integer_pow2p. */
15287 ASSERT_FALSE (integer_pow2p (i_0));
15288 ASSERT_TRUE (integer_pow2p (i_1));
15289 ASSERT_TRUE (integer_pow2p (wr_i_1));
15291 /* Test uniform_integer_cst_p. */
15292 ASSERT_TRUE (uniform_integer_cst_p (i_0));
15293 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0));
15294 ASSERT_TRUE (uniform_integer_cst_p (i_1));
15295 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1));
15296 ASSERT_TRUE (uniform_integer_cst_p (i_m1));
15297 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1));
15298 ASSERT_FALSE (uniform_integer_cst_p (f_0));
15299 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0));
15300 ASSERT_FALSE (uniform_integer_cst_p (f_1));
15301 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1));
15302 ASSERT_FALSE (uniform_integer_cst_p (f_m1));
15303 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1));
15304 ASSERT_FALSE (uniform_integer_cst_p (c_i_0));
15305 ASSERT_FALSE (uniform_integer_cst_p (c_i_1));
15306 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1));
15307 ASSERT_FALSE (uniform_integer_cst_p (c_f_0));
15308 ASSERT_FALSE (uniform_integer_cst_p (c_f_1));
15309 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1));
15312 /* Check that string escaping works correctly. */
15314 static void
15315 test_escaped_strings (void)
15317 int saved_cutoff;
15318 escaped_string msg;
15320 msg.escape (NULL);
15321 /* ASSERT_STREQ does not accept NULL as a valid test
15322 result, so we have to use ASSERT_EQ instead. */
15323 ASSERT_EQ (NULL, (const char *) msg);
15325 msg.escape ("");
15326 ASSERT_STREQ ("", (const char *) msg);
15328 msg.escape ("foobar");
15329 ASSERT_STREQ ("foobar", (const char *) msg);
15331 /* Ensure that we have -fmessage-length set to 0. */
15332 saved_cutoff = pp_line_cutoff (global_dc->printer);
15333 pp_line_cutoff (global_dc->printer) = 0;
15335 msg.escape ("foo\nbar");
15336 ASSERT_STREQ ("foo\\nbar", (const char *) msg);
15338 msg.escape ("\a\b\f\n\r\t\v");
15339 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
15341 /* Now repeat the tests with -fmessage-length set to 5. */
15342 pp_line_cutoff (global_dc->printer) = 5;
15344 /* Note that the newline is not translated into an escape. */
15345 msg.escape ("foo\nbar");
15346 ASSERT_STREQ ("foo\nbar", (const char *) msg);
15348 msg.escape ("\a\b\f\n\r\t\v");
15349 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
15351 /* Restore the original message length setting. */
15352 pp_line_cutoff (global_dc->printer) = saved_cutoff;
15355 /* Run all of the selftests within this file. */
15357 void
15358 tree_cc_tests ()
15360 test_integer_constants ();
15361 test_identifiers ();
15362 test_labels ();
15363 test_vector_cst_patterns ();
15364 test_location_wrappers ();
15365 test_predicates ();
15366 test_escaped_strings ();
15369 } // namespace selftest
15371 #endif /* CHECKING_P */
15373 #include "gt-tree.h"