d: Merge upstream dmd, druntime c8ae4adb2e, phobos 792c8b7c1.
[official-gcc.git] / gcc / tree.cc
blob0a51f9ddb4d83cd84f73ee274b90b9dd90773446
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2022 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "langhooks-def.h"
58 #include "tree-diagnostic.h"
59 #include "except.h"
60 #include "builtins.h"
61 #include "print-tree.h"
62 #include "ipa-utils.h"
63 #include "selftest.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "rtl.h"
67 #include "regs.h"
68 #include "tree-vector-builder.h"
69 #include "gimple-fold.h"
70 #include "escaped_string.h"
71 #include "gimple-range.h"
72 #include "gomp-constants.h"
73 #include "dfp.h"
74 #include "asan.h"
75 #include "ubsan.h"
79 /* Names of tree components.
80 Used for printing out the tree and error messages. */
81 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
82 #define END_OF_BASE_TREE_CODES "@dummy",
84 static const char *const tree_code_name[] = {
85 #include "all-tree.def"
88 #undef DEFTREECODE
89 #undef END_OF_BASE_TREE_CODES
91 /* Each tree code class has an associated string representation.
92 These must correspond to the tree_code_class entries. */
94 const char *const tree_code_class_strings[] =
96 "exceptional",
97 "constant",
98 "type",
99 "declaration",
100 "reference",
101 "comparison",
102 "unary",
103 "binary",
104 "statement",
105 "vl_exp",
106 "expression"
109 /* obstack.[ch] explicitly declined to prototype this. */
110 extern int _obstack_allocated_p (struct obstack *h, void *obj);
112 /* Statistics-gathering stuff. */
114 static uint64_t tree_code_counts[MAX_TREE_CODES];
115 uint64_t tree_node_counts[(int) all_kinds];
116 uint64_t tree_node_sizes[(int) all_kinds];
118 /* Keep in sync with tree.h:enum tree_node_kind. */
119 static const char * const tree_node_kind_names[] = {
120 "decls",
121 "types",
122 "blocks",
123 "stmts",
124 "refs",
125 "exprs",
126 "constants",
127 "identifiers",
128 "vecs",
129 "binfos",
130 "ssa names",
131 "constructors",
132 "random kinds",
133 "lang_decl kinds",
134 "lang_type kinds",
135 "omp clauses",
138 /* Unique id for next decl created. */
139 static GTY(()) int next_decl_uid;
140 /* Unique id for next type created. */
141 static GTY(()) unsigned next_type_uid = 1;
142 /* Unique id for next debug decl created. Use negative numbers,
143 to catch erroneous uses. */
144 static GTY(()) int next_debug_decl_uid;
146 /* Since we cannot rehash a type after it is in the table, we have to
147 keep the hash code. */
149 struct GTY((for_user)) type_hash {
150 unsigned long hash;
151 tree type;
154 /* Initial size of the hash table (rounded to next prime). */
155 #define TYPE_HASH_INITIAL_SIZE 1000
157 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
159 static hashval_t hash (type_hash *t) { return t->hash; }
160 static bool equal (type_hash *a, type_hash *b);
162 static int
163 keep_cache_entry (type_hash *&t)
165 return ggc_marked_p (t->type);
169 /* Now here is the hash table. When recording a type, it is added to
170 the slot whose index is the hash code. Note that the hash table is
171 used for several kinds of types (function types, array types and
172 array index range types, for now). While all these live in the
173 same table, they are completely independent, and the hash code is
174 computed differently for each of these. */
176 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
178 /* Hash table and temporary node for larger integer const values. */
179 static GTY (()) tree int_cst_node;
181 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
183 static hashval_t hash (tree t);
184 static bool equal (tree x, tree y);
187 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
189 /* Class and variable for making sure that there is a single POLY_INT_CST
190 for a given value. */
191 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
193 typedef std::pair<tree, const poly_wide_int *> compare_type;
194 static hashval_t hash (tree t);
195 static bool equal (tree x, const compare_type &y);
198 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
200 /* Hash table for optimization flags and target option flags. Use the same
201 hash table for both sets of options. Nodes for building the current
202 optimization and target option nodes. The assumption is most of the time
203 the options created will already be in the hash table, so we avoid
204 allocating and freeing up a node repeatably. */
205 static GTY (()) tree cl_optimization_node;
206 static GTY (()) tree cl_target_option_node;
208 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
210 static hashval_t hash (tree t);
211 static bool equal (tree x, tree y);
214 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
216 /* General tree->tree mapping structure for use in hash tables. */
219 static GTY ((cache))
220 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
222 static GTY ((cache))
223 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
225 static GTY ((cache))
226 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
228 static void set_type_quals (tree, int);
229 static void print_type_hash_statistics (void);
230 static void print_debug_expr_statistics (void);
231 static void print_value_expr_statistics (void);
233 tree global_trees[TI_MAX];
234 tree integer_types[itk_none];
236 bool int_n_enabled_p[NUM_INT_N_ENTS];
237 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
239 bool tree_contains_struct[MAX_TREE_CODES][64];
241 /* Number of operands for each OMP clause. */
242 unsigned const char omp_clause_num_ops[] =
244 0, /* OMP_CLAUSE_ERROR */
245 1, /* OMP_CLAUSE_PRIVATE */
246 1, /* OMP_CLAUSE_SHARED */
247 1, /* OMP_CLAUSE_FIRSTPRIVATE */
248 2, /* OMP_CLAUSE_LASTPRIVATE */
249 5, /* OMP_CLAUSE_REDUCTION */
250 5, /* OMP_CLAUSE_TASK_REDUCTION */
251 5, /* OMP_CLAUSE_IN_REDUCTION */
252 1, /* OMP_CLAUSE_COPYIN */
253 1, /* OMP_CLAUSE_COPYPRIVATE */
254 3, /* OMP_CLAUSE_LINEAR */
255 1, /* OMP_CLAUSE_AFFINITY */
256 2, /* OMP_CLAUSE_ALIGNED */
257 3, /* OMP_CLAUSE_ALLOCATE */
258 1, /* OMP_CLAUSE_DEPEND */
259 1, /* OMP_CLAUSE_NONTEMPORAL */
260 1, /* OMP_CLAUSE_UNIFORM */
261 1, /* OMP_CLAUSE_ENTER */
262 1, /* OMP_CLAUSE_LINK */
263 1, /* OMP_CLAUSE_DETACH */
264 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
265 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
266 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
267 1, /* OMP_CLAUSE_INCLUSIVE */
268 1, /* OMP_CLAUSE_EXCLUSIVE */
269 2, /* OMP_CLAUSE_FROM */
270 2, /* OMP_CLAUSE_TO */
271 2, /* OMP_CLAUSE_MAP */
272 1, /* OMP_CLAUSE_HAS_DEVICE_ADDR */
273 1, /* OMP_CLAUSE_DOACROSS */
274 2, /* OMP_CLAUSE__CACHE_ */
275 2, /* OMP_CLAUSE_GANG */
276 1, /* OMP_CLAUSE_ASYNC */
277 1, /* OMP_CLAUSE_WAIT */
278 0, /* OMP_CLAUSE_AUTO */
279 0, /* OMP_CLAUSE_SEQ */
280 1, /* OMP_CLAUSE__LOOPTEMP_ */
281 1, /* OMP_CLAUSE__REDUCTEMP_ */
282 1, /* OMP_CLAUSE__CONDTEMP_ */
283 1, /* OMP_CLAUSE__SCANTEMP_ */
284 1, /* OMP_CLAUSE_IF */
285 1, /* OMP_CLAUSE_NUM_THREADS */
286 1, /* OMP_CLAUSE_SCHEDULE */
287 0, /* OMP_CLAUSE_NOWAIT */
288 1, /* OMP_CLAUSE_ORDERED */
289 0, /* OMP_CLAUSE_DEFAULT */
290 3, /* OMP_CLAUSE_COLLAPSE */
291 0, /* OMP_CLAUSE_UNTIED */
292 1, /* OMP_CLAUSE_FINAL */
293 0, /* OMP_CLAUSE_MERGEABLE */
294 1, /* OMP_CLAUSE_DEVICE */
295 1, /* OMP_CLAUSE_DIST_SCHEDULE */
296 0, /* OMP_CLAUSE_INBRANCH */
297 0, /* OMP_CLAUSE_NOTINBRANCH */
298 2, /* OMP_CLAUSE_NUM_TEAMS */
299 1, /* OMP_CLAUSE_THREAD_LIMIT */
300 0, /* OMP_CLAUSE_PROC_BIND */
301 1, /* OMP_CLAUSE_SAFELEN */
302 1, /* OMP_CLAUSE_SIMDLEN */
303 0, /* OMP_CLAUSE_DEVICE_TYPE */
304 0, /* OMP_CLAUSE_FOR */
305 0, /* OMP_CLAUSE_PARALLEL */
306 0, /* OMP_CLAUSE_SECTIONS */
307 0, /* OMP_CLAUSE_TASKGROUP */
308 1, /* OMP_CLAUSE_PRIORITY */
309 1, /* OMP_CLAUSE_GRAINSIZE */
310 1, /* OMP_CLAUSE_NUM_TASKS */
311 0, /* OMP_CLAUSE_NOGROUP */
312 0, /* OMP_CLAUSE_THREADS */
313 0, /* OMP_CLAUSE_SIMD */
314 1, /* OMP_CLAUSE_HINT */
315 0, /* OMP_CLAUSE_DEFAULTMAP */
316 0, /* OMP_CLAUSE_ORDER */
317 0, /* OMP_CLAUSE_BIND */
318 1, /* OMP_CLAUSE_FILTER */
319 1, /* OMP_CLAUSE__SIMDUID_ */
320 0, /* OMP_CLAUSE__SIMT_ */
321 0, /* OMP_CLAUSE_INDEPENDENT */
322 1, /* OMP_CLAUSE_WORKER */
323 1, /* OMP_CLAUSE_VECTOR */
324 1, /* OMP_CLAUSE_NUM_GANGS */
325 1, /* OMP_CLAUSE_NUM_WORKERS */
326 1, /* OMP_CLAUSE_VECTOR_LENGTH */
327 3, /* OMP_CLAUSE_TILE */
328 0, /* OMP_CLAUSE_IF_PRESENT */
329 0, /* OMP_CLAUSE_FINALIZE */
330 0, /* OMP_CLAUSE_NOHOST */
333 const char * const omp_clause_code_name[] =
335 "error_clause",
336 "private",
337 "shared",
338 "firstprivate",
339 "lastprivate",
340 "reduction",
341 "task_reduction",
342 "in_reduction",
343 "copyin",
344 "copyprivate",
345 "linear",
346 "affinity",
347 "aligned",
348 "allocate",
349 "depend",
350 "nontemporal",
351 "uniform",
352 "enter",
353 "link",
354 "detach",
355 "use_device_ptr",
356 "use_device_addr",
357 "is_device_ptr",
358 "inclusive",
359 "exclusive",
360 "from",
361 "to",
362 "map",
363 "has_device_addr",
364 "doacross",
365 "_cache_",
366 "gang",
367 "async",
368 "wait",
369 "auto",
370 "seq",
371 "_looptemp_",
372 "_reductemp_",
373 "_condtemp_",
374 "_scantemp_",
375 "if",
376 "num_threads",
377 "schedule",
378 "nowait",
379 "ordered",
380 "default",
381 "collapse",
382 "untied",
383 "final",
384 "mergeable",
385 "device",
386 "dist_schedule",
387 "inbranch",
388 "notinbranch",
389 "num_teams",
390 "thread_limit",
391 "proc_bind",
392 "safelen",
393 "simdlen",
394 "device_type",
395 "for",
396 "parallel",
397 "sections",
398 "taskgroup",
399 "priority",
400 "grainsize",
401 "num_tasks",
402 "nogroup",
403 "threads",
404 "simd",
405 "hint",
406 "defaultmap",
407 "order",
408 "bind",
409 "filter",
410 "_simduid_",
411 "_simt_",
412 "independent",
413 "worker",
414 "vector",
415 "num_gangs",
416 "num_workers",
417 "vector_length",
418 "tile",
419 "if_present",
420 "finalize",
421 "nohost",
424 /* Unless specific to OpenACC, we tend to internally maintain OpenMP-centric
425 clause names, but for use in diagnostics etc. would like to use the "user"
426 clause names. */
428 const char *
429 user_omp_clause_code_name (tree clause, bool oacc)
431 /* For OpenACC, the 'OMP_CLAUSE_MAP_KIND' of an 'OMP_CLAUSE_MAP' is used to
432 distinguish clauses as seen by the user. See also where front ends do
433 'build_omp_clause' with 'OMP_CLAUSE_MAP'. */
434 if (oacc && OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP)
435 switch (OMP_CLAUSE_MAP_KIND (clause))
437 case GOMP_MAP_FORCE_ALLOC:
438 case GOMP_MAP_ALLOC: return "create";
439 case GOMP_MAP_FORCE_TO:
440 case GOMP_MAP_TO: return "copyin";
441 case GOMP_MAP_FORCE_FROM:
442 case GOMP_MAP_FROM: return "copyout";
443 case GOMP_MAP_FORCE_TOFROM:
444 case GOMP_MAP_TOFROM: return "copy";
445 case GOMP_MAP_RELEASE: return "delete";
446 case GOMP_MAP_FORCE_PRESENT: return "present";
447 case GOMP_MAP_ATTACH: return "attach";
448 case GOMP_MAP_FORCE_DETACH:
449 case GOMP_MAP_DETACH: return "detach";
450 case GOMP_MAP_DEVICE_RESIDENT: return "device_resident";
451 case GOMP_MAP_LINK: return "link";
452 case GOMP_MAP_FORCE_DEVICEPTR: return "deviceptr";
453 default: break;
456 return omp_clause_code_name[OMP_CLAUSE_CODE (clause)];
460 /* Return the tree node structure used by tree code CODE. */
462 static inline enum tree_node_structure_enum
463 tree_node_structure_for_code (enum tree_code code)
465 switch (TREE_CODE_CLASS (code))
467 case tcc_declaration:
468 switch (code)
470 case CONST_DECL: return TS_CONST_DECL;
471 case DEBUG_EXPR_DECL: return TS_DECL_WRTL;
472 case FIELD_DECL: return TS_FIELD_DECL;
473 case FUNCTION_DECL: return TS_FUNCTION_DECL;
474 case LABEL_DECL: return TS_LABEL_DECL;
475 case PARM_DECL: return TS_PARM_DECL;
476 case RESULT_DECL: return TS_RESULT_DECL;
477 case TRANSLATION_UNIT_DECL: return TS_TRANSLATION_UNIT_DECL;
478 case TYPE_DECL: return TS_TYPE_DECL;
479 case VAR_DECL: return TS_VAR_DECL;
480 default: return TS_DECL_NON_COMMON;
483 case tcc_type: return TS_TYPE_NON_COMMON;
485 case tcc_binary:
486 case tcc_comparison:
487 case tcc_expression:
488 case tcc_reference:
489 case tcc_statement:
490 case tcc_unary:
491 case tcc_vl_exp: return TS_EXP;
493 default: /* tcc_constant and tcc_exceptional */
494 break;
497 switch (code)
499 /* tcc_constant cases. */
500 case COMPLEX_CST: return TS_COMPLEX;
501 case FIXED_CST: return TS_FIXED_CST;
502 case INTEGER_CST: return TS_INT_CST;
503 case POLY_INT_CST: return TS_POLY_INT_CST;
504 case REAL_CST: return TS_REAL_CST;
505 case STRING_CST: return TS_STRING;
506 case VECTOR_CST: return TS_VECTOR;
507 case VOID_CST: return TS_TYPED;
509 /* tcc_exceptional cases. */
510 case BLOCK: return TS_BLOCK;
511 case CONSTRUCTOR: return TS_CONSTRUCTOR;
512 case ERROR_MARK: return TS_COMMON;
513 case IDENTIFIER_NODE: return TS_IDENTIFIER;
514 case OMP_CLAUSE: return TS_OMP_CLAUSE;
515 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
516 case PLACEHOLDER_EXPR: return TS_COMMON;
517 case SSA_NAME: return TS_SSA_NAME;
518 case STATEMENT_LIST: return TS_STATEMENT_LIST;
519 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
520 case TREE_BINFO: return TS_BINFO;
521 case TREE_LIST: return TS_LIST;
522 case TREE_VEC: return TS_VEC;
524 default:
525 gcc_unreachable ();
530 /* Initialize tree_contains_struct to describe the hierarchy of tree
531 nodes. */
533 static void
534 initialize_tree_contains_struct (void)
536 unsigned i;
538 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
540 enum tree_code code;
541 enum tree_node_structure_enum ts_code;
543 code = (enum tree_code) i;
544 ts_code = tree_node_structure_for_code (code);
546 /* Mark the TS structure itself. */
547 tree_contains_struct[code][ts_code] = 1;
549 /* Mark all the structures that TS is derived from. */
550 switch (ts_code)
552 case TS_TYPED:
553 case TS_BLOCK:
554 case TS_OPTIMIZATION:
555 case TS_TARGET_OPTION:
556 MARK_TS_BASE (code);
557 break;
559 case TS_COMMON:
560 case TS_INT_CST:
561 case TS_POLY_INT_CST:
562 case TS_REAL_CST:
563 case TS_FIXED_CST:
564 case TS_VECTOR:
565 case TS_STRING:
566 case TS_COMPLEX:
567 case TS_SSA_NAME:
568 case TS_CONSTRUCTOR:
569 case TS_EXP:
570 case TS_STATEMENT_LIST:
571 MARK_TS_TYPED (code);
572 break;
574 case TS_IDENTIFIER:
575 case TS_DECL_MINIMAL:
576 case TS_TYPE_COMMON:
577 case TS_LIST:
578 case TS_VEC:
579 case TS_BINFO:
580 case TS_OMP_CLAUSE:
581 MARK_TS_COMMON (code);
582 break;
584 case TS_TYPE_WITH_LANG_SPECIFIC:
585 MARK_TS_TYPE_COMMON (code);
586 break;
588 case TS_TYPE_NON_COMMON:
589 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
590 break;
592 case TS_DECL_COMMON:
593 MARK_TS_DECL_MINIMAL (code);
594 break;
596 case TS_DECL_WRTL:
597 case TS_CONST_DECL:
598 MARK_TS_DECL_COMMON (code);
599 break;
601 case TS_DECL_NON_COMMON:
602 MARK_TS_DECL_WITH_VIS (code);
603 break;
605 case TS_DECL_WITH_VIS:
606 case TS_PARM_DECL:
607 case TS_LABEL_DECL:
608 case TS_RESULT_DECL:
609 MARK_TS_DECL_WRTL (code);
610 break;
612 case TS_FIELD_DECL:
613 MARK_TS_DECL_COMMON (code);
614 break;
616 case TS_VAR_DECL:
617 MARK_TS_DECL_WITH_VIS (code);
618 break;
620 case TS_TYPE_DECL:
621 case TS_FUNCTION_DECL:
622 MARK_TS_DECL_NON_COMMON (code);
623 break;
625 case TS_TRANSLATION_UNIT_DECL:
626 MARK_TS_DECL_COMMON (code);
627 break;
629 default:
630 gcc_unreachable ();
634 /* Basic consistency checks for attributes used in fold. */
635 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
636 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
637 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
638 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
639 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
640 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
641 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
642 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
643 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
644 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
645 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
646 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
647 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
648 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
649 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
650 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
651 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
652 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
653 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
654 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
655 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
656 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
657 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
658 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
659 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
660 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
661 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
662 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
663 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
664 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
665 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
666 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
667 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
668 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
669 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
670 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
671 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
672 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
673 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
674 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
678 /* Init tree.cc. */
680 void
681 init_ttree (void)
683 /* Initialize the hash table of types. */
684 type_hash_table
685 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
687 debug_expr_for_decl
688 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
690 value_expr_for_decl
691 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
693 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
695 poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
697 int_cst_node = make_int_cst (1, 1);
699 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
701 cl_optimization_node = make_node (OPTIMIZATION_NODE);
702 cl_target_option_node = make_node (TARGET_OPTION_NODE);
704 /* Initialize the tree_contains_struct array. */
705 initialize_tree_contains_struct ();
706 lang_hooks.init_ts ();
710 /* The name of the object as the assembler will see it (but before any
711 translations made by ASM_OUTPUT_LABELREF). Often this is the same
712 as DECL_NAME. It is an IDENTIFIER_NODE. */
713 tree
714 decl_assembler_name (tree decl)
716 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
717 lang_hooks.set_decl_assembler_name (decl);
718 return DECL_ASSEMBLER_NAME_RAW (decl);
721 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
722 (either of which may be NULL). Inform the FE, if this changes the
723 name. */
725 void
726 overwrite_decl_assembler_name (tree decl, tree name)
728 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
729 lang_hooks.overwrite_decl_assembler_name (decl, name);
732 /* Return true if DECL may need an assembler name to be set. */
734 static inline bool
735 need_assembler_name_p (tree decl)
737 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
738 Rule merging. This makes type_odr_p to return true on those types during
739 LTO and by comparing the mangled name, we can say what types are intended
740 to be equivalent across compilation unit.
742 We do not store names of type_in_anonymous_namespace_p.
744 Record, union and enumeration type have linkage that allows use
745 to check type_in_anonymous_namespace_p. We do not mangle compound types
746 that always can be compared structurally.
748 Similarly for builtin types, we compare properties of their main variant.
749 A special case are integer types where mangling do make differences
750 between char/signed char/unsigned char etc. Storing name for these makes
751 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
752 See cp/mangle.cc:write_builtin_type for details. */
754 if (TREE_CODE (decl) == TYPE_DECL)
756 if (DECL_NAME (decl)
757 && decl == TYPE_NAME (TREE_TYPE (decl))
758 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
759 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
760 && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
761 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE)
762 || TYPE_CXX_ODR_P (TREE_TYPE (decl)))
763 && (type_with_linkage_p (TREE_TYPE (decl))
764 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
765 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
766 return !DECL_ASSEMBLER_NAME_SET_P (decl);
767 return false;
769 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
770 if (!VAR_OR_FUNCTION_DECL_P (decl))
771 return false;
773 /* If DECL already has its assembler name set, it does not need a
774 new one. */
775 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
776 || DECL_ASSEMBLER_NAME_SET_P (decl))
777 return false;
779 /* Abstract decls do not need an assembler name. */
780 if (DECL_ABSTRACT_P (decl))
781 return false;
783 /* For VAR_DECLs, only static, public and external symbols need an
784 assembler name. */
785 if (VAR_P (decl)
786 && !TREE_STATIC (decl)
787 && !TREE_PUBLIC (decl)
788 && !DECL_EXTERNAL (decl))
789 return false;
791 if (TREE_CODE (decl) == FUNCTION_DECL)
793 /* Do not set assembler name on builtins. Allow RTL expansion to
794 decide whether to expand inline or via a regular call. */
795 if (fndecl_built_in_p (decl)
796 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
797 return false;
799 /* Functions represented in the callgraph need an assembler name. */
800 if (cgraph_node::get (decl) != NULL)
801 return true;
803 /* Unused and not public functions don't need an assembler name. */
804 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
805 return false;
808 return true;
811 /* If T needs an assembler name, have one created for it. */
813 void
814 assign_assembler_name_if_needed (tree t)
816 if (need_assembler_name_p (t))
818 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
819 diagnostics that use input_location to show locus
820 information. The problem here is that, at this point,
821 input_location is generally anchored to the end of the file
822 (since the parser is long gone), so we don't have a good
823 position to pin it to.
825 To alleviate this problem, this uses the location of T's
826 declaration. Examples of this are
827 testsuite/g++.dg/template/cond2.C and
828 testsuite/g++.dg/template/pr35240.C. */
829 location_t saved_location = input_location;
830 input_location = DECL_SOURCE_LOCATION (t);
832 decl_assembler_name (t);
834 input_location = saved_location;
838 /* When the target supports COMDAT groups, this indicates which group the
839 DECL is associated with. This can be either an IDENTIFIER_NODE or a
840 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
841 tree
842 decl_comdat_group (const_tree node)
844 struct symtab_node *snode = symtab_node::get (node);
845 if (!snode)
846 return NULL;
847 return snode->get_comdat_group ();
850 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
851 tree
852 decl_comdat_group_id (const_tree node)
854 struct symtab_node *snode = symtab_node::get (node);
855 if (!snode)
856 return NULL;
857 return snode->get_comdat_group_id ();
860 /* When the target supports named section, return its name as IDENTIFIER_NODE
861 or NULL if it is in no section. */
862 const char *
863 decl_section_name (const_tree node)
865 struct symtab_node *snode = symtab_node::get (node);
866 if (!snode)
867 return NULL;
868 return snode->get_section ();
871 /* Set section name of NODE to VALUE (that is expected to be
872 identifier node) */
873 void
874 set_decl_section_name (tree node, const char *value)
876 struct symtab_node *snode;
878 if (value == NULL)
880 snode = symtab_node::get (node);
881 if (!snode)
882 return;
884 else if (VAR_P (node))
885 snode = varpool_node::get_create (node);
886 else
887 snode = cgraph_node::get_create (node);
888 snode->set_section (value);
891 /* Set section name of NODE to match the section name of OTHER.
893 set_decl_section_name (decl, other) is equivalent to
894 set_decl_section_name (decl, DECL_SECTION_NAME (other)), but possibly more
895 efficient. */
896 void
897 set_decl_section_name (tree decl, const_tree other)
899 struct symtab_node *other_node = symtab_node::get (other);
900 if (other_node)
902 struct symtab_node *decl_node;
903 if (VAR_P (decl))
904 decl_node = varpool_node::get_create (decl);
905 else
906 decl_node = cgraph_node::get_create (decl);
907 decl_node->set_section (*other_node);
909 else
911 struct symtab_node *decl_node = symtab_node::get (decl);
912 if (!decl_node)
913 return;
914 decl_node->set_section (NULL);
918 /* Return TLS model of a variable NODE. */
919 enum tls_model
920 decl_tls_model (const_tree node)
922 struct varpool_node *snode = varpool_node::get (node);
923 if (!snode)
924 return TLS_MODEL_NONE;
925 return snode->tls_model;
928 /* Set TLS model of variable NODE to MODEL. */
929 void
930 set_decl_tls_model (tree node, enum tls_model model)
932 struct varpool_node *vnode;
934 if (model == TLS_MODEL_NONE)
936 vnode = varpool_node::get (node);
937 if (!vnode)
938 return;
940 else
941 vnode = varpool_node::get_create (node);
942 vnode->tls_model = model;
945 /* Compute the number of bytes occupied by a tree with code CODE.
946 This function cannot be used for nodes that have variable sizes,
947 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
948 size_t
949 tree_code_size (enum tree_code code)
951 switch (TREE_CODE_CLASS (code))
953 case tcc_declaration: /* A decl node */
954 switch (code)
956 case FIELD_DECL: return sizeof (tree_field_decl);
957 case PARM_DECL: return sizeof (tree_parm_decl);
958 case VAR_DECL: return sizeof (tree_var_decl);
959 case LABEL_DECL: return sizeof (tree_label_decl);
960 case RESULT_DECL: return sizeof (tree_result_decl);
961 case CONST_DECL: return sizeof (tree_const_decl);
962 case TYPE_DECL: return sizeof (tree_type_decl);
963 case FUNCTION_DECL: return sizeof (tree_function_decl);
964 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
965 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
966 case NAMESPACE_DECL:
967 case IMPORTED_DECL:
968 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
969 default:
970 gcc_checking_assert (code >= NUM_TREE_CODES);
971 return lang_hooks.tree_size (code);
974 case tcc_type: /* a type node */
975 switch (code)
977 case OFFSET_TYPE:
978 case ENUMERAL_TYPE:
979 case BOOLEAN_TYPE:
980 case INTEGER_TYPE:
981 case REAL_TYPE:
982 case OPAQUE_TYPE:
983 case POINTER_TYPE:
984 case REFERENCE_TYPE:
985 case NULLPTR_TYPE:
986 case FIXED_POINT_TYPE:
987 case COMPLEX_TYPE:
988 case VECTOR_TYPE:
989 case ARRAY_TYPE:
990 case RECORD_TYPE:
991 case UNION_TYPE:
992 case QUAL_UNION_TYPE:
993 case VOID_TYPE:
994 case FUNCTION_TYPE:
995 case METHOD_TYPE:
996 case LANG_TYPE: return sizeof (tree_type_non_common);
997 default:
998 gcc_checking_assert (code >= NUM_TREE_CODES);
999 return lang_hooks.tree_size (code);
1002 case tcc_reference: /* a reference */
1003 case tcc_expression: /* an expression */
1004 case tcc_statement: /* an expression with side effects */
1005 case tcc_comparison: /* a comparison expression */
1006 case tcc_unary: /* a unary arithmetic expression */
1007 case tcc_binary: /* a binary arithmetic expression */
1008 return (sizeof (struct tree_exp)
1009 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
1011 case tcc_constant: /* a constant */
1012 switch (code)
1014 case VOID_CST: return sizeof (tree_typed);
1015 case INTEGER_CST: gcc_unreachable ();
1016 case POLY_INT_CST: return sizeof (tree_poly_int_cst);
1017 case REAL_CST: return sizeof (tree_real_cst);
1018 case FIXED_CST: return sizeof (tree_fixed_cst);
1019 case COMPLEX_CST: return sizeof (tree_complex);
1020 case VECTOR_CST: gcc_unreachable ();
1021 case STRING_CST: gcc_unreachable ();
1022 default:
1023 gcc_checking_assert (code >= NUM_TREE_CODES);
1024 return lang_hooks.tree_size (code);
1027 case tcc_exceptional: /* something random, like an identifier. */
1028 switch (code)
1030 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
1031 case TREE_LIST: return sizeof (tree_list);
1033 case ERROR_MARK:
1034 case PLACEHOLDER_EXPR: return sizeof (tree_common);
1036 case TREE_VEC: gcc_unreachable ();
1037 case OMP_CLAUSE: gcc_unreachable ();
1039 case SSA_NAME: return sizeof (tree_ssa_name);
1041 case STATEMENT_LIST: return sizeof (tree_statement_list);
1042 case BLOCK: return sizeof (struct tree_block);
1043 case CONSTRUCTOR: return sizeof (tree_constructor);
1044 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
1045 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
1047 default:
1048 gcc_checking_assert (code >= NUM_TREE_CODES);
1049 return lang_hooks.tree_size (code);
1052 default:
1053 gcc_unreachable ();
1057 /* Compute the number of bytes occupied by NODE. This routine only
1058 looks at TREE_CODE, except for those nodes that have variable sizes. */
1059 size_t
1060 tree_size (const_tree node)
1062 const enum tree_code code = TREE_CODE (node);
1063 switch (code)
1065 case INTEGER_CST:
1066 return (sizeof (struct tree_int_cst)
1067 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
1069 case TREE_BINFO:
1070 return (offsetof (struct tree_binfo, base_binfos)
1071 + vec<tree, va_gc>
1072 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
1074 case TREE_VEC:
1075 return (sizeof (struct tree_vec)
1076 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
1078 case VECTOR_CST:
1079 return (sizeof (struct tree_vector)
1080 + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
1082 case STRING_CST:
1083 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
1085 case OMP_CLAUSE:
1086 return (sizeof (struct tree_omp_clause)
1087 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
1088 * sizeof (tree));
1090 default:
1091 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
1092 return (sizeof (struct tree_exp)
1093 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
1094 else
1095 return tree_code_size (code);
1099 /* Return tree node kind based on tree CODE. */
1101 static tree_node_kind
1102 get_stats_node_kind (enum tree_code code)
1104 enum tree_code_class type = TREE_CODE_CLASS (code);
1106 switch (type)
1108 case tcc_declaration: /* A decl node */
1109 return d_kind;
1110 case tcc_type: /* a type node */
1111 return t_kind;
1112 case tcc_statement: /* an expression with side effects */
1113 return s_kind;
1114 case tcc_reference: /* a reference */
1115 return r_kind;
1116 case tcc_expression: /* an expression */
1117 case tcc_comparison: /* a comparison expression */
1118 case tcc_unary: /* a unary arithmetic expression */
1119 case tcc_binary: /* a binary arithmetic expression */
1120 return e_kind;
1121 case tcc_constant: /* a constant */
1122 return c_kind;
1123 case tcc_exceptional: /* something random, like an identifier. */
1124 switch (code)
1126 case IDENTIFIER_NODE:
1127 return id_kind;
1128 case TREE_VEC:
1129 return vec_kind;
1130 case TREE_BINFO:
1131 return binfo_kind;
1132 case SSA_NAME:
1133 return ssa_name_kind;
1134 case BLOCK:
1135 return b_kind;
1136 case CONSTRUCTOR:
1137 return constr_kind;
1138 case OMP_CLAUSE:
1139 return omp_clause_kind;
1140 default:
1141 return x_kind;
1143 break;
1144 case tcc_vl_exp:
1145 return e_kind;
1146 default:
1147 gcc_unreachable ();
1151 /* Record interesting allocation statistics for a tree node with CODE
1152 and LENGTH. */
1154 static void
1155 record_node_allocation_statistics (enum tree_code code, size_t length)
1157 if (!GATHER_STATISTICS)
1158 return;
1160 tree_node_kind kind = get_stats_node_kind (code);
1162 tree_code_counts[(int) code]++;
1163 tree_node_counts[(int) kind]++;
1164 tree_node_sizes[(int) kind] += length;
1167 /* Allocate and return a new UID from the DECL_UID namespace. */
1170 allocate_decl_uid (void)
1172 return next_decl_uid++;
1175 /* Return a newly allocated node of code CODE. For decl and type
1176 nodes, some other fields are initialized. The rest of the node is
1177 initialized to zero. This function cannot be used for TREE_VEC,
1178 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1179 tree_code_size.
1181 Achoo! I got a code in the node. */
1183 tree
1184 make_node (enum tree_code code MEM_STAT_DECL)
1186 tree t;
1187 enum tree_code_class type = TREE_CODE_CLASS (code);
1188 size_t length = tree_code_size (code);
1190 record_node_allocation_statistics (code, length);
1192 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1193 TREE_SET_CODE (t, code);
1195 switch (type)
1197 case tcc_statement:
1198 if (code != DEBUG_BEGIN_STMT)
1199 TREE_SIDE_EFFECTS (t) = 1;
1200 break;
1202 case tcc_declaration:
1203 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1205 if (code == FUNCTION_DECL)
1207 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1208 SET_DECL_MODE (t, FUNCTION_MODE);
1210 else
1211 SET_DECL_ALIGN (t, 1);
1213 DECL_SOURCE_LOCATION (t) = input_location;
1214 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1215 DECL_UID (t) = --next_debug_decl_uid;
1216 else
1218 DECL_UID (t) = allocate_decl_uid ();
1219 SET_DECL_PT_UID (t, -1);
1221 if (TREE_CODE (t) == LABEL_DECL)
1222 LABEL_DECL_UID (t) = -1;
1224 break;
1226 case tcc_type:
1227 TYPE_UID (t) = next_type_uid++;
1228 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1229 TYPE_USER_ALIGN (t) = 0;
1230 TYPE_MAIN_VARIANT (t) = t;
1231 TYPE_CANONICAL (t) = t;
1233 /* Default to no attributes for type, but let target change that. */
1234 TYPE_ATTRIBUTES (t) = NULL_TREE;
1235 targetm.set_default_type_attributes (t);
1237 /* We have not yet computed the alias set for this type. */
1238 TYPE_ALIAS_SET (t) = -1;
1239 break;
1241 case tcc_constant:
1242 TREE_CONSTANT (t) = 1;
1243 break;
1245 case tcc_expression:
1246 switch (code)
1248 case INIT_EXPR:
1249 case MODIFY_EXPR:
1250 case VA_ARG_EXPR:
1251 case PREDECREMENT_EXPR:
1252 case PREINCREMENT_EXPR:
1253 case POSTDECREMENT_EXPR:
1254 case POSTINCREMENT_EXPR:
1255 /* All of these have side-effects, no matter what their
1256 operands are. */
1257 TREE_SIDE_EFFECTS (t) = 1;
1258 break;
1260 default:
1261 break;
1263 break;
1265 case tcc_exceptional:
1266 switch (code)
1268 case TARGET_OPTION_NODE:
1269 TREE_TARGET_OPTION(t)
1270 = ggc_cleared_alloc<struct cl_target_option> ();
1271 break;
1273 case OPTIMIZATION_NODE:
1274 TREE_OPTIMIZATION (t)
1275 = ggc_cleared_alloc<struct cl_optimization> ();
1276 break;
1278 default:
1279 break;
1281 break;
1283 default:
1284 /* Other classes need no special treatment. */
1285 break;
1288 return t;
1291 /* Free tree node. */
1293 void
1294 free_node (tree node)
1296 enum tree_code code = TREE_CODE (node);
1297 if (GATHER_STATISTICS)
1299 enum tree_node_kind kind = get_stats_node_kind (code);
1301 gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1302 gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1303 gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1305 tree_code_counts[(int) TREE_CODE (node)]--;
1306 tree_node_counts[(int) kind]--;
1307 tree_node_sizes[(int) kind] -= tree_size (node);
1309 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1310 vec_free (CONSTRUCTOR_ELTS (node));
1311 else if (code == BLOCK)
1312 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1313 else if (code == TREE_BINFO)
1314 vec_free (BINFO_BASE_ACCESSES (node));
1315 else if (code == OPTIMIZATION_NODE)
1316 cl_optimization_option_free (TREE_OPTIMIZATION (node));
1317 else if (code == TARGET_OPTION_NODE)
1318 cl_target_option_free (TREE_TARGET_OPTION (node));
1319 ggc_free (node);
1322 /* Return a new node with the same contents as NODE except that its
1323 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1325 tree
1326 copy_node (tree node MEM_STAT_DECL)
1328 tree t;
1329 enum tree_code code = TREE_CODE (node);
1330 size_t length;
1332 gcc_assert (code != STATEMENT_LIST);
1334 length = tree_size (node);
1335 record_node_allocation_statistics (code, length);
1336 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1337 memcpy (t, node, length);
1339 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1340 TREE_CHAIN (t) = 0;
1341 TREE_ASM_WRITTEN (t) = 0;
1342 TREE_VISITED (t) = 0;
1344 if (TREE_CODE_CLASS (code) == tcc_declaration)
1346 if (code == DEBUG_EXPR_DECL)
1347 DECL_UID (t) = --next_debug_decl_uid;
1348 else
1350 DECL_UID (t) = allocate_decl_uid ();
1351 if (DECL_PT_UID_SET_P (node))
1352 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1354 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1355 && DECL_HAS_VALUE_EXPR_P (node))
1357 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1358 DECL_HAS_VALUE_EXPR_P (t) = 1;
1360 /* DECL_DEBUG_EXPR is copied explicitly by callers. */
1361 if (VAR_P (node))
1363 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1364 t->decl_with_vis.symtab_node = NULL;
1366 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1368 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1369 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1371 if (TREE_CODE (node) == FUNCTION_DECL)
1373 DECL_STRUCT_FUNCTION (t) = NULL;
1374 t->decl_with_vis.symtab_node = NULL;
1377 else if (TREE_CODE_CLASS (code) == tcc_type)
1379 TYPE_UID (t) = next_type_uid++;
1380 /* The following is so that the debug code for
1381 the copy is different from the original type.
1382 The two statements usually duplicate each other
1383 (because they clear fields of the same union),
1384 but the optimizer should catch that. */
1385 TYPE_SYMTAB_ADDRESS (t) = 0;
1386 TYPE_SYMTAB_DIE (t) = 0;
1388 /* Do not copy the values cache. */
1389 if (TYPE_CACHED_VALUES_P (t))
1391 TYPE_CACHED_VALUES_P (t) = 0;
1392 TYPE_CACHED_VALUES (t) = NULL_TREE;
1395 else if (code == TARGET_OPTION_NODE)
1397 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1398 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1399 sizeof (struct cl_target_option));
1401 else if (code == OPTIMIZATION_NODE)
1403 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1404 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1405 sizeof (struct cl_optimization));
1408 return t;
1411 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1412 For example, this can copy a list made of TREE_LIST nodes. */
1414 tree
1415 copy_list (tree list)
1417 tree head;
1418 tree prev, next;
1420 if (list == 0)
1421 return 0;
1423 head = prev = copy_node (list);
1424 next = TREE_CHAIN (list);
1425 while (next)
1427 TREE_CHAIN (prev) = copy_node (next);
1428 prev = TREE_CHAIN (prev);
1429 next = TREE_CHAIN (next);
1431 return head;
1435 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1436 INTEGER_CST with value CST and type TYPE. */
1438 static unsigned int
1439 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1441 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1442 /* We need extra HWIs if CST is an unsigned integer with its
1443 upper bit set. */
1444 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1445 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1446 return cst.get_len ();
1449 /* Return a new INTEGER_CST with value CST and type TYPE. */
1451 static tree
1452 build_new_int_cst (tree type, const wide_int &cst)
1454 unsigned int len = cst.get_len ();
1455 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1456 tree nt = make_int_cst (len, ext_len);
1458 if (len < ext_len)
1460 --ext_len;
1461 TREE_INT_CST_ELT (nt, ext_len)
1462 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1463 for (unsigned int i = len; i < ext_len; ++i)
1464 TREE_INT_CST_ELT (nt, i) = -1;
1466 else if (TYPE_UNSIGNED (type)
1467 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1469 len--;
1470 TREE_INT_CST_ELT (nt, len)
1471 = zext_hwi (cst.elt (len),
1472 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1475 for (unsigned int i = 0; i < len; i++)
1476 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1477 TREE_TYPE (nt) = type;
1478 return nt;
1481 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1483 static tree
1484 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1485 CXX_MEM_STAT_INFO)
1487 size_t length = sizeof (struct tree_poly_int_cst);
1488 record_node_allocation_statistics (POLY_INT_CST, length);
1490 tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1492 TREE_SET_CODE (t, POLY_INT_CST);
1493 TREE_CONSTANT (t) = 1;
1494 TREE_TYPE (t) = type;
1495 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1496 POLY_INT_CST_COEFF (t, i) = coeffs[i];
1497 return t;
1500 /* Create a constant tree that contains CST sign-extended to TYPE. */
1502 tree
1503 build_int_cst (tree type, poly_int64 cst)
1505 /* Support legacy code. */
1506 if (!type)
1507 type = integer_type_node;
1509 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1512 /* Create a constant tree that contains CST zero-extended to TYPE. */
1514 tree
1515 build_int_cstu (tree type, poly_uint64 cst)
1517 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1520 /* Create a constant tree that contains CST sign-extended to TYPE. */
1522 tree
1523 build_int_cst_type (tree type, poly_int64 cst)
1525 gcc_assert (type);
1526 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1529 /* Constructs tree in type TYPE from with value given by CST. Signedness
1530 of CST is assumed to be the same as the signedness of TYPE. */
1532 tree
1533 double_int_to_tree (tree type, double_int cst)
1535 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1538 /* We force the wide_int CST to the range of the type TYPE by sign or
1539 zero extending it. OVERFLOWABLE indicates if we are interested in
1540 overflow of the value, when >0 we are only interested in signed
1541 overflow, for <0 we are interested in any overflow. OVERFLOWED
1542 indicates whether overflow has already occurred. CONST_OVERFLOWED
1543 indicates whether constant overflow has already occurred. We force
1544 T's value to be within range of T's type (by setting to 0 or 1 all
1545 the bits outside the type's range). We set TREE_OVERFLOWED if,
1546 OVERFLOWED is nonzero,
1547 or OVERFLOWABLE is >0 and signed overflow occurs
1548 or OVERFLOWABLE is <0 and any overflow occurs
1549 We return a new tree node for the extended wide_int. The node
1550 is shared if no overflow flags are set. */
1553 tree
1554 force_fit_type (tree type, const poly_wide_int_ref &cst,
1555 int overflowable, bool overflowed)
1557 signop sign = TYPE_SIGN (type);
1559 /* If we need to set overflow flags, return a new unshared node. */
1560 if (overflowed || !wi::fits_to_tree_p (cst, type))
1562 if (overflowed
1563 || overflowable < 0
1564 || (overflowable > 0 && sign == SIGNED))
1566 poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1567 sign);
1568 tree t;
1569 if (tmp.is_constant ())
1570 t = build_new_int_cst (type, tmp.coeffs[0]);
1571 else
1573 tree coeffs[NUM_POLY_INT_COEFFS];
1574 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1576 coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1577 TREE_OVERFLOW (coeffs[i]) = 1;
1579 t = build_new_poly_int_cst (type, coeffs);
1581 TREE_OVERFLOW (t) = 1;
1582 return t;
1586 /* Else build a shared node. */
1587 return wide_int_to_tree (type, cst);
1590 /* These are the hash table functions for the hash table of INTEGER_CST
1591 nodes of a sizetype. */
1593 /* Return the hash code X, an INTEGER_CST. */
1595 hashval_t
1596 int_cst_hasher::hash (tree x)
1598 const_tree const t = x;
1599 hashval_t code = TYPE_UID (TREE_TYPE (t));
1600 int i;
1602 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1603 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1605 return code;
1608 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1609 is the same as that given by *Y, which is the same. */
1611 bool
1612 int_cst_hasher::equal (tree x, tree y)
1614 const_tree const xt = x;
1615 const_tree const yt = y;
1617 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1618 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1619 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1620 return false;
1622 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1623 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1624 return false;
1626 return true;
1629 /* Cache wide_int CST into the TYPE_CACHED_VALUES cache for TYPE.
1630 SLOT is the slot entry to store it in, and MAX_SLOTS is the maximum
1631 number of slots that can be cached for the type. */
1633 static inline tree
1634 cache_wide_int_in_type_cache (tree type, const wide_int &cst,
1635 int slot, int max_slots)
1637 gcc_checking_assert (slot >= 0);
1638 /* Initialize cache. */
1639 if (!TYPE_CACHED_VALUES_P (type))
1641 TYPE_CACHED_VALUES_P (type) = 1;
1642 TYPE_CACHED_VALUES (type) = make_tree_vec (max_slots);
1644 tree t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot);
1645 if (!t)
1647 /* Create a new shared int. */
1648 t = build_new_int_cst (type, cst);
1649 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot) = t;
1651 return t;
1654 /* Create an INT_CST node of TYPE and value CST.
1655 The returned node is always shared. For small integers we use a
1656 per-type vector cache, for larger ones we use a single hash table.
1657 The value is extended from its precision according to the sign of
1658 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1659 the upper bits and ensures that hashing and value equality based
1660 upon the underlying HOST_WIDE_INTs works without masking. */
1662 static tree
1663 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1665 tree t;
1666 int ix = -1;
1667 int limit = 0;
1669 gcc_assert (type);
1670 unsigned int prec = TYPE_PRECISION (type);
1671 signop sgn = TYPE_SIGN (type);
1673 /* Verify that everything is canonical. */
1674 int l = pcst.get_len ();
1675 if (l > 1)
1677 if (pcst.elt (l - 1) == 0)
1678 gcc_checking_assert (pcst.elt (l - 2) < 0);
1679 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1680 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1683 wide_int cst = wide_int::from (pcst, prec, sgn);
1684 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1686 enum tree_code code = TREE_CODE (type);
1687 if (code == POINTER_TYPE || code == REFERENCE_TYPE)
1689 /* Cache NULL pointer and zero bounds. */
1690 if (cst == 0)
1691 ix = 0;
1692 /* Cache upper bounds of pointers. */
1693 else if (cst == wi::max_value (prec, sgn))
1694 ix = 1;
1695 /* Cache 1 which is used for a non-zero range. */
1696 else if (cst == 1)
1697 ix = 2;
1699 if (ix >= 0)
1701 t = cache_wide_int_in_type_cache (type, cst, ix, 3);
1702 /* Make sure no one is clobbering the shared constant. */
1703 gcc_checking_assert (TREE_TYPE (t) == type
1704 && cst == wi::to_wide (t));
1705 return t;
1708 if (ext_len == 1)
1710 /* We just need to store a single HOST_WIDE_INT. */
1711 HOST_WIDE_INT hwi;
1712 if (TYPE_UNSIGNED (type))
1713 hwi = cst.to_uhwi ();
1714 else
1715 hwi = cst.to_shwi ();
1717 switch (code)
1719 case NULLPTR_TYPE:
1720 gcc_assert (hwi == 0);
1721 /* Fallthru. */
1723 case POINTER_TYPE:
1724 case REFERENCE_TYPE:
1725 /* Ignore pointers, as they were already handled above. */
1726 break;
1728 case BOOLEAN_TYPE:
1729 /* Cache false or true. */
1730 limit = 2;
1731 if (IN_RANGE (hwi, 0, 1))
1732 ix = hwi;
1733 break;
1735 case INTEGER_TYPE:
1736 case OFFSET_TYPE:
1737 if (TYPE_SIGN (type) == UNSIGNED)
1739 /* Cache [0, N). */
1740 limit = param_integer_share_limit;
1741 if (IN_RANGE (hwi, 0, param_integer_share_limit - 1))
1742 ix = hwi;
1744 else
1746 /* Cache [-1, N). */
1747 limit = param_integer_share_limit + 1;
1748 if (IN_RANGE (hwi, -1, param_integer_share_limit - 1))
1749 ix = hwi + 1;
1751 break;
1753 case ENUMERAL_TYPE:
1754 break;
1756 default:
1757 gcc_unreachable ();
1760 if (ix >= 0)
1762 t = cache_wide_int_in_type_cache (type, cst, ix, limit);
1763 /* Make sure no one is clobbering the shared constant. */
1764 gcc_checking_assert (TREE_TYPE (t) == type
1765 && TREE_INT_CST_NUNITS (t) == 1
1766 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1767 && TREE_INT_CST_EXT_NUNITS (t) == 1
1768 && TREE_INT_CST_ELT (t, 0) == hwi);
1769 return t;
1771 else
1773 /* Use the cache of larger shared ints, using int_cst_node as
1774 a temporary. */
1776 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1777 TREE_TYPE (int_cst_node) = type;
1779 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1780 t = *slot;
1781 if (!t)
1783 /* Insert this one into the hash table. */
1784 t = int_cst_node;
1785 *slot = t;
1786 /* Make a new node for next time round. */
1787 int_cst_node = make_int_cst (1, 1);
1791 else
1793 /* The value either hashes properly or we drop it on the floor
1794 for the gc to take care of. There will not be enough of them
1795 to worry about. */
1797 tree nt = build_new_int_cst (type, cst);
1798 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1799 t = *slot;
1800 if (!t)
1802 /* Insert this one into the hash table. */
1803 t = nt;
1804 *slot = t;
1806 else
1807 ggc_free (nt);
1810 return t;
1813 hashval_t
1814 poly_int_cst_hasher::hash (tree t)
1816 inchash::hash hstate;
1818 hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1819 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1820 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1822 return hstate.end ();
1825 bool
1826 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1828 if (TREE_TYPE (x) != y.first)
1829 return false;
1830 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1831 if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1832 return false;
1833 return true;
1836 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1837 The elements must also have type TYPE. */
1839 tree
1840 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1842 unsigned int prec = TYPE_PRECISION (type);
1843 gcc_assert (prec <= values.coeffs[0].get_precision ());
1844 poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1846 inchash::hash h;
1847 h.add_int (TYPE_UID (type));
1848 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1849 h.add_wide_int (c.coeffs[i]);
1850 poly_int_cst_hasher::compare_type comp (type, &c);
1851 tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1852 INSERT);
1853 if (*slot == NULL_TREE)
1855 tree coeffs[NUM_POLY_INT_COEFFS];
1856 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1857 coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1858 *slot = build_new_poly_int_cst (type, coeffs);
1860 return *slot;
1863 /* Create a constant tree with value VALUE in type TYPE. */
1865 tree
1866 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1868 if (value.is_constant ())
1869 return wide_int_to_tree_1 (type, value.coeffs[0]);
1870 return build_poly_int_cst (type, value);
1873 /* Insert INTEGER_CST T into a cache of integer constants. And return
1874 the cached constant (which may or may not be T). If MIGHT_DUPLICATE
1875 is false, and T falls into the type's 'smaller values' range, there
1876 cannot be an existing entry. Otherwise, if MIGHT_DUPLICATE is true,
1877 or the value is large, should an existing entry exist, it is
1878 returned (rather than inserting T). */
1880 tree
1881 cache_integer_cst (tree t, bool might_duplicate ATTRIBUTE_UNUSED)
1883 tree type = TREE_TYPE (t);
1884 int ix = -1;
1885 int limit = 0;
1886 int prec = TYPE_PRECISION (type);
1888 gcc_assert (!TREE_OVERFLOW (t));
1890 /* The caching indices here must match those in
1891 wide_int_to_type_1. */
1892 switch (TREE_CODE (type))
1894 case NULLPTR_TYPE:
1895 gcc_checking_assert (integer_zerop (t));
1896 /* Fallthru. */
1898 case POINTER_TYPE:
1899 case REFERENCE_TYPE:
1901 if (integer_zerop (t))
1902 ix = 0;
1903 else if (integer_onep (t))
1904 ix = 2;
1906 if (ix >= 0)
1907 limit = 3;
1909 break;
1911 case BOOLEAN_TYPE:
1912 /* Cache false or true. */
1913 limit = 2;
1914 if (wi::ltu_p (wi::to_wide (t), 2))
1915 ix = TREE_INT_CST_ELT (t, 0);
1916 break;
1918 case INTEGER_TYPE:
1919 case OFFSET_TYPE:
1920 if (TYPE_UNSIGNED (type))
1922 /* Cache 0..N */
1923 limit = param_integer_share_limit;
1925 /* This is a little hokie, but if the prec is smaller than
1926 what is necessary to hold param_integer_share_limit, then the
1927 obvious test will not get the correct answer. */
1928 if (prec < HOST_BITS_PER_WIDE_INT)
1930 if (tree_to_uhwi (t)
1931 < (unsigned HOST_WIDE_INT) param_integer_share_limit)
1932 ix = tree_to_uhwi (t);
1934 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1935 ix = tree_to_uhwi (t);
1937 else
1939 /* Cache -1..N */
1940 limit = param_integer_share_limit + 1;
1942 if (integer_minus_onep (t))
1943 ix = 0;
1944 else if (!wi::neg_p (wi::to_wide (t)))
1946 if (prec < HOST_BITS_PER_WIDE_INT)
1948 if (tree_to_shwi (t) < param_integer_share_limit)
1949 ix = tree_to_shwi (t) + 1;
1951 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1952 ix = tree_to_shwi (t) + 1;
1955 break;
1957 case ENUMERAL_TYPE:
1958 /* The slot used by TYPE_CACHED_VALUES is used for the enum
1959 members. */
1960 break;
1962 default:
1963 gcc_unreachable ();
1966 if (ix >= 0)
1968 /* Look for it in the type's vector of small shared ints. */
1969 if (!TYPE_CACHED_VALUES_P (type))
1971 TYPE_CACHED_VALUES_P (type) = 1;
1972 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1975 if (tree r = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix))
1977 gcc_checking_assert (might_duplicate);
1978 t = r;
1980 else
1981 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1983 else
1985 /* Use the cache of larger shared ints. */
1986 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1987 if (tree r = *slot)
1989 /* If there is already an entry for the number verify it's the
1990 same value. */
1991 gcc_checking_assert (wi::to_wide (tree (r)) == wi::to_wide (t));
1992 /* And return the cached value. */
1993 t = r;
1995 else
1996 /* Otherwise insert this one into the hash table. */
1997 *slot = t;
2000 return t;
2004 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
2005 and the rest are zeros. */
2007 tree
2008 build_low_bits_mask (tree type, unsigned bits)
2010 gcc_assert (bits <= TYPE_PRECISION (type));
2012 return wide_int_to_tree (type, wi::mask (bits, false,
2013 TYPE_PRECISION (type)));
2016 /* Checks that X is integer constant that can be expressed in (unsigned)
2017 HOST_WIDE_INT without loss of precision. */
2019 bool
2020 cst_and_fits_in_hwi (const_tree x)
2022 return (TREE_CODE (x) == INTEGER_CST
2023 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
2026 /* Build a newly constructed VECTOR_CST with the given values of
2027 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
2029 tree
2030 make_vector (unsigned log2_npatterns,
2031 unsigned int nelts_per_pattern MEM_STAT_DECL)
2033 gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
2034 tree t;
2035 unsigned npatterns = 1 << log2_npatterns;
2036 unsigned encoded_nelts = npatterns * nelts_per_pattern;
2037 unsigned length = (sizeof (struct tree_vector)
2038 + (encoded_nelts - 1) * sizeof (tree));
2040 record_node_allocation_statistics (VECTOR_CST, length);
2042 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2044 TREE_SET_CODE (t, VECTOR_CST);
2045 TREE_CONSTANT (t) = 1;
2046 VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
2047 VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
2049 return t;
2052 /* Return a new VECTOR_CST node whose type is TYPE and whose values
2053 are extracted from V, a vector of CONSTRUCTOR_ELT. */
2055 tree
2056 build_vector_from_ctor (tree type, const vec<constructor_elt, va_gc> *v)
2058 if (vec_safe_length (v) == 0)
2059 return build_zero_cst (type);
2061 unsigned HOST_WIDE_INT idx, nelts;
2062 tree value;
2064 /* We can't construct a VECTOR_CST for a variable number of elements. */
2065 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
2066 tree_vector_builder vec (type, nelts, 1);
2067 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
2069 if (TREE_CODE (value) == VECTOR_CST)
2071 /* If NELTS is constant then this must be too. */
2072 unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
2073 for (unsigned i = 0; i < sub_nelts; ++i)
2074 vec.quick_push (VECTOR_CST_ELT (value, i));
2076 else
2077 vec.quick_push (value);
2079 while (vec.length () < nelts)
2080 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
2082 return vec.build ();
2085 /* Build a vector of type VECTYPE where all the elements are SCs. */
2086 tree
2087 build_vector_from_val (tree vectype, tree sc)
2089 unsigned HOST_WIDE_INT i, nunits;
2091 if (sc == error_mark_node)
2092 return sc;
2094 /* Verify that the vector type is suitable for SC. Note that there
2095 is some inconsistency in the type-system with respect to restrict
2096 qualifications of pointers. Vector types always have a main-variant
2097 element type and the qualification is applied to the vector-type.
2098 So TREE_TYPE (vector-type) does not return a properly qualified
2099 vector element-type. */
2100 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
2101 TREE_TYPE (vectype)));
2103 if (CONSTANT_CLASS_P (sc))
2105 tree_vector_builder v (vectype, 1, 1);
2106 v.quick_push (sc);
2107 return v.build ();
2109 else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
2110 return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
2111 else
2113 vec<constructor_elt, va_gc> *v;
2114 vec_alloc (v, nunits);
2115 for (i = 0; i < nunits; ++i)
2116 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
2117 return build_constructor (vectype, v);
2121 /* If TYPE is not a vector type, just return SC, otherwise return
2122 build_vector_from_val (TYPE, SC). */
2124 tree
2125 build_uniform_cst (tree type, tree sc)
2127 if (!VECTOR_TYPE_P (type))
2128 return sc;
2130 return build_vector_from_val (type, sc);
2133 /* Build a vector series of type TYPE in which element I has the value
2134 BASE + I * STEP. The result is a constant if BASE and STEP are constant
2135 and a VEC_SERIES_EXPR otherwise. */
2137 tree
2138 build_vec_series (tree type, tree base, tree step)
2140 if (integer_zerop (step))
2141 return build_vector_from_val (type, base);
2142 if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
2144 tree_vector_builder builder (type, 1, 3);
2145 tree elt1 = wide_int_to_tree (TREE_TYPE (base),
2146 wi::to_wide (base) + wi::to_wide (step));
2147 tree elt2 = wide_int_to_tree (TREE_TYPE (base),
2148 wi::to_wide (elt1) + wi::to_wide (step));
2149 builder.quick_push (base);
2150 builder.quick_push (elt1);
2151 builder.quick_push (elt2);
2152 return builder.build ();
2154 return build2 (VEC_SERIES_EXPR, type, base, step);
2157 /* Return a vector with the same number of units and number of bits
2158 as VEC_TYPE, but in which the elements are a linear series of unsigned
2159 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
2161 tree
2162 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
2164 tree index_vec_type = vec_type;
2165 tree index_elt_type = TREE_TYPE (vec_type);
2166 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
2167 if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
2169 index_elt_type = build_nonstandard_integer_type
2170 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
2171 index_vec_type = build_vector_type (index_elt_type, nunits);
2174 tree_vector_builder v (index_vec_type, 1, 3);
2175 for (unsigned int i = 0; i < 3; ++i)
2176 v.quick_push (build_int_cstu (index_elt_type, base + i * step));
2177 return v.build ();
2180 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
2181 elements are A and the rest are B. */
2183 tree
2184 build_vector_a_then_b (tree vec_type, unsigned int num_a, tree a, tree b)
2186 gcc_assert (known_le (num_a, TYPE_VECTOR_SUBPARTS (vec_type)));
2187 unsigned int count = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type));
2188 /* Optimize the constant case. */
2189 if ((count & 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type).is_constant ())
2190 count /= 2;
2191 tree_vector_builder builder (vec_type, count, 2);
2192 for (unsigned int i = 0; i < count * 2; ++i)
2193 builder.quick_push (i < num_a ? a : b);
2194 return builder.build ();
2197 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2198 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
2200 void
2201 recompute_constructor_flags (tree c)
2203 unsigned int i;
2204 tree val;
2205 bool constant_p = true;
2206 bool side_effects_p = false;
2207 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2209 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2211 /* Mostly ctors will have elts that don't have side-effects, so
2212 the usual case is to scan all the elements. Hence a single
2213 loop for both const and side effects, rather than one loop
2214 each (with early outs). */
2215 if (!TREE_CONSTANT (val))
2216 constant_p = false;
2217 if (TREE_SIDE_EFFECTS (val))
2218 side_effects_p = true;
2221 TREE_SIDE_EFFECTS (c) = side_effects_p;
2222 TREE_CONSTANT (c) = constant_p;
2225 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2226 CONSTRUCTOR C. */
2228 void
2229 verify_constructor_flags (tree c)
2231 unsigned int i;
2232 tree val;
2233 bool constant_p = TREE_CONSTANT (c);
2234 bool side_effects_p = TREE_SIDE_EFFECTS (c);
2235 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2237 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2239 if (constant_p && !TREE_CONSTANT (val))
2240 internal_error ("non-constant element in constant CONSTRUCTOR");
2241 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
2242 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2246 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2247 are in the vec pointed to by VALS. */
2248 tree
2249 build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL)
2251 tree c = make_node (CONSTRUCTOR PASS_MEM_STAT);
2253 TREE_TYPE (c) = type;
2254 CONSTRUCTOR_ELTS (c) = vals;
2256 recompute_constructor_flags (c);
2258 return c;
2261 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2262 INDEX and VALUE. */
2263 tree
2264 build_constructor_single (tree type, tree index, tree value)
2266 vec<constructor_elt, va_gc> *v;
2267 constructor_elt elt = {index, value};
2269 vec_alloc (v, 1);
2270 v->quick_push (elt);
2272 return build_constructor (type, v);
2276 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2277 are in a list pointed to by VALS. */
2278 tree
2279 build_constructor_from_list (tree type, tree vals)
2281 tree t;
2282 vec<constructor_elt, va_gc> *v = NULL;
2284 if (vals)
2286 vec_alloc (v, list_length (vals));
2287 for (t = vals; t; t = TREE_CHAIN (t))
2288 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2291 return build_constructor (type, v);
2294 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2295 are in a vector pointed to by VALS. Note that the TREE_PURPOSE
2296 fields in the constructor remain null. */
2298 tree
2299 build_constructor_from_vec (tree type, const vec<tree, va_gc> *vals)
2301 vec<constructor_elt, va_gc> *v = NULL;
2303 for (tree t : vals)
2304 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t);
2306 return build_constructor (type, v);
2309 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2310 of elements, provided as index/value pairs. */
2312 tree
2313 build_constructor_va (tree type, int nelts, ...)
2315 vec<constructor_elt, va_gc> *v = NULL;
2316 va_list p;
2318 va_start (p, nelts);
2319 vec_alloc (v, nelts);
2320 while (nelts--)
2322 tree index = va_arg (p, tree);
2323 tree value = va_arg (p, tree);
2324 CONSTRUCTOR_APPEND_ELT (v, index, value);
2326 va_end (p);
2327 return build_constructor (type, v);
2330 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2332 tree
2333 build_clobber (tree type, enum clobber_kind kind)
2335 tree clobber = build_constructor (type, NULL);
2336 TREE_THIS_VOLATILE (clobber) = true;
2337 CLOBBER_KIND (clobber) = kind;
2338 return clobber;
2341 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2343 tree
2344 build_fixed (tree type, FIXED_VALUE_TYPE f)
2346 tree v;
2347 FIXED_VALUE_TYPE *fp;
2349 v = make_node (FIXED_CST);
2350 fp = ggc_alloc<fixed_value> ();
2351 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2353 TREE_TYPE (v) = type;
2354 TREE_FIXED_CST_PTR (v) = fp;
2355 return v;
2358 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2360 tree
2361 build_real (tree type, REAL_VALUE_TYPE d)
2363 tree v;
2364 int overflow = 0;
2366 /* dconst{0,1,2,m1,half} are used in various places in
2367 the middle-end and optimizers, allow them here
2368 even for decimal floating point types as an exception
2369 by converting them to decimal. */
2370 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type))
2371 && (d.cl == rvc_normal || d.cl == rvc_zero)
2372 && !d.decimal)
2374 if (memcmp (&d, &dconst1, sizeof (d)) == 0)
2375 decimal_real_from_string (&d, "1");
2376 else if (memcmp (&d, &dconst2, sizeof (d)) == 0)
2377 decimal_real_from_string (&d, "2");
2378 else if (memcmp (&d, &dconstm1, sizeof (d)) == 0)
2379 decimal_real_from_string (&d, "-1");
2380 else if (memcmp (&d, &dconsthalf, sizeof (d)) == 0)
2381 decimal_real_from_string (&d, "0.5");
2382 else if (memcmp (&d, &dconst0, sizeof (d)) == 0)
2384 /* Make sure to give zero the minimum quantum exponent for
2385 the type (which corresponds to all bits zero). */
2386 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
2387 char buf[16];
2388 sprintf (buf, "0e%d", fmt->emin - fmt->p);
2389 decimal_real_from_string (&d, buf);
2391 else
2392 gcc_unreachable ();
2395 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2396 Consider doing it via real_convert now. */
2398 v = make_node (REAL_CST);
2399 TREE_TYPE (v) = type;
2400 memcpy (TREE_REAL_CST_PTR (v), &d, sizeof (REAL_VALUE_TYPE));
2401 TREE_OVERFLOW (v) = overflow;
2402 return v;
2405 /* Like build_real, but first truncate D to the type. */
2407 tree
2408 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2410 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2413 /* Return a new REAL_CST node whose type is TYPE
2414 and whose value is the integer value of the INTEGER_CST node I. */
2416 REAL_VALUE_TYPE
2417 real_value_from_int_cst (const_tree type, const_tree i)
2419 REAL_VALUE_TYPE d;
2421 /* Clear all bits of the real value type so that we can later do
2422 bitwise comparisons to see if two values are the same. */
2423 memset (&d, 0, sizeof d);
2425 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2426 TYPE_SIGN (TREE_TYPE (i)));
2427 return d;
2430 /* Given a tree representing an integer constant I, return a tree
2431 representing the same value as a floating-point constant of type TYPE. */
2433 tree
2434 build_real_from_int_cst (tree type, const_tree i)
2436 tree v;
2437 int overflow = TREE_OVERFLOW (i);
2439 v = build_real (type, real_value_from_int_cst (type, i));
2441 TREE_OVERFLOW (v) |= overflow;
2442 return v;
2445 /* Return a new REAL_CST node whose type is TYPE
2446 and whose value is the integer value I which has sign SGN. */
2448 tree
2449 build_real_from_wide (tree type, const wide_int_ref &i, signop sgn)
2451 REAL_VALUE_TYPE d;
2453 /* Clear all bits of the real value type so that we can later do
2454 bitwise comparisons to see if two values are the same. */
2455 memset (&d, 0, sizeof d);
2457 real_from_integer (&d, TYPE_MODE (type), i, sgn);
2458 return build_real (type, d);
2461 /* Return a newly constructed STRING_CST node whose value is the LEN
2462 characters at STR when STR is nonnull, or all zeros otherwise.
2463 Note that for a C string literal, LEN should include the trailing NUL.
2464 The TREE_TYPE is not initialized. */
2466 tree
2467 build_string (unsigned len, const char *str /*= NULL */)
2469 /* Do not waste bytes provided by padding of struct tree_string. */
2470 unsigned size = len + offsetof (struct tree_string, str) + 1;
2472 record_node_allocation_statistics (STRING_CST, size);
2474 tree s = (tree) ggc_internal_alloc (size);
2476 memset (s, 0, sizeof (struct tree_typed));
2477 TREE_SET_CODE (s, STRING_CST);
2478 TREE_CONSTANT (s) = 1;
2479 TREE_STRING_LENGTH (s) = len;
2480 if (str)
2481 memcpy (s->string.str, str, len);
2482 else
2483 memset (s->string.str, 0, len);
2484 s->string.str[len] = '\0';
2486 return s;
2489 /* Return a newly constructed COMPLEX_CST node whose value is
2490 specified by the real and imaginary parts REAL and IMAG.
2491 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2492 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2494 tree
2495 build_complex (tree type, tree real, tree imag)
2497 gcc_assert (CONSTANT_CLASS_P (real));
2498 gcc_assert (CONSTANT_CLASS_P (imag));
2500 tree t = make_node (COMPLEX_CST);
2502 TREE_REALPART (t) = real;
2503 TREE_IMAGPART (t) = imag;
2504 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2505 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2506 return t;
2509 /* Build a complex (inf +- 0i), such as for the result of cproj.
2510 TYPE is the complex tree type of the result. If NEG is true, the
2511 imaginary zero is negative. */
2513 tree
2514 build_complex_inf (tree type, bool neg)
2516 REAL_VALUE_TYPE rzero = dconst0;
2518 rzero.sign = neg;
2519 return build_complex (type, build_real (TREE_TYPE (type), dconstinf),
2520 build_real (TREE_TYPE (type), rzero));
2523 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2524 element is set to 1. In particular, this is 1 + i for complex types. */
2526 tree
2527 build_each_one_cst (tree type)
2529 if (TREE_CODE (type) == COMPLEX_TYPE)
2531 tree scalar = build_one_cst (TREE_TYPE (type));
2532 return build_complex (type, scalar, scalar);
2534 else
2535 return build_one_cst (type);
2538 /* Return a constant of arithmetic type TYPE which is the
2539 multiplicative identity of the set TYPE. */
2541 tree
2542 build_one_cst (tree type)
2544 switch (TREE_CODE (type))
2546 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2547 case POINTER_TYPE: case REFERENCE_TYPE:
2548 case OFFSET_TYPE:
2549 return build_int_cst (type, 1);
2551 case REAL_TYPE:
2552 return build_real (type, dconst1);
2554 case FIXED_POINT_TYPE:
2555 /* We can only generate 1 for accum types. */
2556 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2557 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2559 case VECTOR_TYPE:
2561 tree scalar = build_one_cst (TREE_TYPE (type));
2563 return build_vector_from_val (type, scalar);
2566 case COMPLEX_TYPE:
2567 return build_complex (type,
2568 build_one_cst (TREE_TYPE (type)),
2569 build_zero_cst (TREE_TYPE (type)));
2571 default:
2572 gcc_unreachable ();
2576 /* Return an integer of type TYPE containing all 1's in as much precision as
2577 it contains, or a complex or vector whose subparts are such integers. */
2579 tree
2580 build_all_ones_cst (tree type)
2582 if (TREE_CODE (type) == COMPLEX_TYPE)
2584 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2585 return build_complex (type, scalar, scalar);
2587 else
2588 return build_minus_one_cst (type);
2591 /* Return a constant of arithmetic type TYPE which is the
2592 opposite of the multiplicative identity of the set TYPE. */
2594 tree
2595 build_minus_one_cst (tree type)
2597 switch (TREE_CODE (type))
2599 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2600 case POINTER_TYPE: case REFERENCE_TYPE:
2601 case OFFSET_TYPE:
2602 return build_int_cst (type, -1);
2604 case REAL_TYPE:
2605 return build_real (type, dconstm1);
2607 case FIXED_POINT_TYPE:
2608 /* We can only generate 1 for accum types. */
2609 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2610 return build_fixed (type,
2611 fixed_from_double_int (double_int_minus_one,
2612 SCALAR_TYPE_MODE (type)));
2614 case VECTOR_TYPE:
2616 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2618 return build_vector_from_val (type, scalar);
2621 case COMPLEX_TYPE:
2622 return build_complex (type,
2623 build_minus_one_cst (TREE_TYPE (type)),
2624 build_zero_cst (TREE_TYPE (type)));
2626 default:
2627 gcc_unreachable ();
2631 /* Build 0 constant of type TYPE. This is used by constructor folding
2632 and thus the constant should be represented in memory by
2633 zero(es). */
2635 tree
2636 build_zero_cst (tree type)
2638 switch (TREE_CODE (type))
2640 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2641 case POINTER_TYPE: case REFERENCE_TYPE:
2642 case OFFSET_TYPE: case NULLPTR_TYPE:
2643 return build_int_cst (type, 0);
2645 case REAL_TYPE:
2646 return build_real (type, dconst0);
2648 case FIXED_POINT_TYPE:
2649 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2651 case VECTOR_TYPE:
2653 tree scalar = build_zero_cst (TREE_TYPE (type));
2655 return build_vector_from_val (type, scalar);
2658 case COMPLEX_TYPE:
2660 tree zero = build_zero_cst (TREE_TYPE (type));
2662 return build_complex (type, zero, zero);
2665 default:
2666 if (!AGGREGATE_TYPE_P (type))
2667 return fold_convert (type, integer_zero_node);
2668 return build_constructor (type, NULL);
2673 /* Build a BINFO with LEN language slots. */
2675 tree
2676 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2678 tree t;
2679 size_t length = (offsetof (struct tree_binfo, base_binfos)
2680 + vec<tree, va_gc>::embedded_size (base_binfos));
2682 record_node_allocation_statistics (TREE_BINFO, length);
2684 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2686 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2688 TREE_SET_CODE (t, TREE_BINFO);
2690 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2692 return t;
2695 /* Create a CASE_LABEL_EXPR tree node and return it. */
2697 tree
2698 build_case_label (tree low_value, tree high_value, tree label_decl)
2700 tree t = make_node (CASE_LABEL_EXPR);
2702 TREE_TYPE (t) = void_type_node;
2703 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2705 CASE_LOW (t) = low_value;
2706 CASE_HIGH (t) = high_value;
2707 CASE_LABEL (t) = label_decl;
2708 CASE_CHAIN (t) = NULL_TREE;
2710 return t;
2713 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2714 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2715 The latter determines the length of the HOST_WIDE_INT vector. */
2717 tree
2718 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2720 tree t;
2721 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2722 + sizeof (struct tree_int_cst));
2724 gcc_assert (len);
2725 record_node_allocation_statistics (INTEGER_CST, length);
2727 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2729 TREE_SET_CODE (t, INTEGER_CST);
2730 TREE_INT_CST_NUNITS (t) = len;
2731 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2732 /* to_offset can only be applied to trees that are offset_int-sized
2733 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2734 must be exactly the precision of offset_int and so LEN is correct. */
2735 if (ext_len <= OFFSET_INT_ELTS)
2736 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2737 else
2738 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2740 TREE_CONSTANT (t) = 1;
2742 return t;
2745 /* Build a newly constructed TREE_VEC node of length LEN. */
2747 tree
2748 make_tree_vec (int len MEM_STAT_DECL)
2750 tree t;
2751 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2753 record_node_allocation_statistics (TREE_VEC, length);
2755 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2757 TREE_SET_CODE (t, TREE_VEC);
2758 TREE_VEC_LENGTH (t) = len;
2760 return t;
2763 /* Grow a TREE_VEC node to new length LEN. */
2765 tree
2766 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2768 gcc_assert (TREE_CODE (v) == TREE_VEC);
2770 int oldlen = TREE_VEC_LENGTH (v);
2771 gcc_assert (len > oldlen);
2773 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2774 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2776 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2778 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2780 TREE_VEC_LENGTH (v) = len;
2782 return v;
2785 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2786 fixed, and scalar, complex or vector. */
2788 bool
2789 zerop (const_tree expr)
2791 return (integer_zerop (expr)
2792 || real_zerop (expr)
2793 || fixed_zerop (expr));
2796 /* Return 1 if EXPR is the integer constant zero or a complex constant
2797 of zero, or a location wrapper for such a constant. */
2799 bool
2800 integer_zerop (const_tree expr)
2802 STRIP_ANY_LOCATION_WRAPPER (expr);
2804 switch (TREE_CODE (expr))
2806 case INTEGER_CST:
2807 return wi::to_wide (expr) == 0;
2808 case COMPLEX_CST:
2809 return (integer_zerop (TREE_REALPART (expr))
2810 && integer_zerop (TREE_IMAGPART (expr)));
2811 case VECTOR_CST:
2812 return (VECTOR_CST_NPATTERNS (expr) == 1
2813 && VECTOR_CST_DUPLICATE_P (expr)
2814 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2815 default:
2816 return false;
2820 /* Return 1 if EXPR is the integer constant one or the corresponding
2821 complex constant, or a location wrapper for such a constant. */
2823 bool
2824 integer_onep (const_tree expr)
2826 STRIP_ANY_LOCATION_WRAPPER (expr);
2828 switch (TREE_CODE (expr))
2830 case INTEGER_CST:
2831 return wi::eq_p (wi::to_widest (expr), 1);
2832 case COMPLEX_CST:
2833 return (integer_onep (TREE_REALPART (expr))
2834 && integer_zerop (TREE_IMAGPART (expr)));
2835 case VECTOR_CST:
2836 return (VECTOR_CST_NPATTERNS (expr) == 1
2837 && VECTOR_CST_DUPLICATE_P (expr)
2838 && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2839 default:
2840 return false;
2844 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2845 return 1 if every piece is the integer constant one.
2846 Also return 1 for location wrappers for such a constant. */
2848 bool
2849 integer_each_onep (const_tree expr)
2851 STRIP_ANY_LOCATION_WRAPPER (expr);
2853 if (TREE_CODE (expr) == COMPLEX_CST)
2854 return (integer_onep (TREE_REALPART (expr))
2855 && integer_onep (TREE_IMAGPART (expr)));
2856 else
2857 return integer_onep (expr);
2860 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2861 it contains, or a complex or vector whose subparts are such integers,
2862 or a location wrapper for such a constant. */
2864 bool
2865 integer_all_onesp (const_tree expr)
2867 STRIP_ANY_LOCATION_WRAPPER (expr);
2869 if (TREE_CODE (expr) == COMPLEX_CST
2870 && integer_all_onesp (TREE_REALPART (expr))
2871 && integer_all_onesp (TREE_IMAGPART (expr)))
2872 return true;
2874 else if (TREE_CODE (expr) == VECTOR_CST)
2875 return (VECTOR_CST_NPATTERNS (expr) == 1
2876 && VECTOR_CST_DUPLICATE_P (expr)
2877 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2879 else if (TREE_CODE (expr) != INTEGER_CST)
2880 return false;
2882 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2883 == wi::to_wide (expr));
2886 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
2887 for such a constant. */
2889 bool
2890 integer_minus_onep (const_tree expr)
2892 STRIP_ANY_LOCATION_WRAPPER (expr);
2894 if (TREE_CODE (expr) == COMPLEX_CST)
2895 return (integer_all_onesp (TREE_REALPART (expr))
2896 && integer_zerop (TREE_IMAGPART (expr)));
2897 else
2898 return integer_all_onesp (expr);
2901 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2902 one bit on), or a location wrapper for such a constant. */
2904 bool
2905 integer_pow2p (const_tree expr)
2907 STRIP_ANY_LOCATION_WRAPPER (expr);
2909 if (TREE_CODE (expr) == COMPLEX_CST
2910 && integer_pow2p (TREE_REALPART (expr))
2911 && integer_zerop (TREE_IMAGPART (expr)))
2912 return true;
2914 if (TREE_CODE (expr) != INTEGER_CST)
2915 return false;
2917 return wi::popcount (wi::to_wide (expr)) == 1;
2920 /* Return 1 if EXPR is an integer constant other than zero or a
2921 complex constant other than zero, or a location wrapper for such a
2922 constant. */
2924 bool
2925 integer_nonzerop (const_tree expr)
2927 STRIP_ANY_LOCATION_WRAPPER (expr);
2929 return ((TREE_CODE (expr) == INTEGER_CST
2930 && wi::to_wide (expr) != 0)
2931 || (TREE_CODE (expr) == COMPLEX_CST
2932 && (integer_nonzerop (TREE_REALPART (expr))
2933 || integer_nonzerop (TREE_IMAGPART (expr)))));
2936 /* Return 1 if EXPR is the integer constant one. For vector,
2937 return 1 if every piece is the integer constant minus one
2938 (representing the value TRUE).
2939 Also return 1 for location wrappers for such a constant. */
2941 bool
2942 integer_truep (const_tree expr)
2944 STRIP_ANY_LOCATION_WRAPPER (expr);
2946 if (TREE_CODE (expr) == VECTOR_CST)
2947 return integer_all_onesp (expr);
2948 return integer_onep (expr);
2951 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
2952 for such a constant. */
2954 bool
2955 fixed_zerop (const_tree expr)
2957 STRIP_ANY_LOCATION_WRAPPER (expr);
2959 return (TREE_CODE (expr) == FIXED_CST
2960 && TREE_FIXED_CST (expr).data.is_zero ());
2963 /* Return the power of two represented by a tree node known to be a
2964 power of two. */
2967 tree_log2 (const_tree expr)
2969 if (TREE_CODE (expr) == COMPLEX_CST)
2970 return tree_log2 (TREE_REALPART (expr));
2972 return wi::exact_log2 (wi::to_wide (expr));
2975 /* Similar, but return the largest integer Y such that 2 ** Y is less
2976 than or equal to EXPR. */
2979 tree_floor_log2 (const_tree expr)
2981 if (TREE_CODE (expr) == COMPLEX_CST)
2982 return tree_log2 (TREE_REALPART (expr));
2984 return wi::floor_log2 (wi::to_wide (expr));
2987 /* Return number of known trailing zero bits in EXPR, or, if the value of
2988 EXPR is known to be zero, the precision of it's type. */
2990 unsigned int
2991 tree_ctz (const_tree expr)
2993 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2994 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2995 return 0;
2997 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2998 switch (TREE_CODE (expr))
3000 case INTEGER_CST:
3001 ret1 = wi::ctz (wi::to_wide (expr));
3002 return MIN (ret1, prec);
3003 case SSA_NAME:
3004 ret1 = wi::ctz (get_nonzero_bits (expr));
3005 return MIN (ret1, prec);
3006 case PLUS_EXPR:
3007 case MINUS_EXPR:
3008 case BIT_IOR_EXPR:
3009 case BIT_XOR_EXPR:
3010 case MIN_EXPR:
3011 case MAX_EXPR:
3012 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3013 if (ret1 == 0)
3014 return ret1;
3015 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3016 return MIN (ret1, ret2);
3017 case POINTER_PLUS_EXPR:
3018 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3019 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3020 /* Second operand is sizetype, which could be in theory
3021 wider than pointer's precision. Make sure we never
3022 return more than prec. */
3023 ret2 = MIN (ret2, prec);
3024 return MIN (ret1, ret2);
3025 case BIT_AND_EXPR:
3026 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3027 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3028 return MAX (ret1, ret2);
3029 case MULT_EXPR:
3030 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3031 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3032 return MIN (ret1 + ret2, prec);
3033 case LSHIFT_EXPR:
3034 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3035 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3036 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3038 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3039 return MIN (ret1 + ret2, prec);
3041 return ret1;
3042 case RSHIFT_EXPR:
3043 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3044 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3046 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3047 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3048 if (ret1 > ret2)
3049 return ret1 - ret2;
3051 return 0;
3052 case TRUNC_DIV_EXPR:
3053 case CEIL_DIV_EXPR:
3054 case FLOOR_DIV_EXPR:
3055 case ROUND_DIV_EXPR:
3056 case EXACT_DIV_EXPR:
3057 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3058 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
3060 int l = tree_log2 (TREE_OPERAND (expr, 1));
3061 if (l >= 0)
3063 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3064 ret2 = l;
3065 if (ret1 > ret2)
3066 return ret1 - ret2;
3069 return 0;
3070 CASE_CONVERT:
3071 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3072 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
3073 ret1 = prec;
3074 return MIN (ret1, prec);
3075 case SAVE_EXPR:
3076 return tree_ctz (TREE_OPERAND (expr, 0));
3077 case COND_EXPR:
3078 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
3079 if (ret1 == 0)
3080 return 0;
3081 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
3082 return MIN (ret1, ret2);
3083 case COMPOUND_EXPR:
3084 return tree_ctz (TREE_OPERAND (expr, 1));
3085 case ADDR_EXPR:
3086 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
3087 if (ret1 > BITS_PER_UNIT)
3089 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
3090 return MIN (ret1, prec);
3092 return 0;
3093 default:
3094 return 0;
3098 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
3099 decimal float constants, so don't return 1 for them.
3100 Also return 1 for location wrappers around such a constant. */
3102 bool
3103 real_zerop (const_tree expr)
3105 STRIP_ANY_LOCATION_WRAPPER (expr);
3107 switch (TREE_CODE (expr))
3109 case REAL_CST:
3110 return real_equal (&TREE_REAL_CST (expr), &dconst0)
3111 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3112 case COMPLEX_CST:
3113 return real_zerop (TREE_REALPART (expr))
3114 && real_zerop (TREE_IMAGPART (expr));
3115 case VECTOR_CST:
3117 /* Don't simply check for a duplicate because the predicate
3118 accepts both +0.0 and -0.0. */
3119 unsigned count = vector_cst_encoded_nelts (expr);
3120 for (unsigned int i = 0; i < count; ++i)
3121 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
3122 return false;
3123 return true;
3125 default:
3126 return false;
3130 /* Return 1 if EXPR is the real constant one in real or complex form.
3131 Trailing zeroes matter for decimal float constants, so don't return
3132 1 for them.
3133 Also return 1 for location wrappers around such a constant. */
3135 bool
3136 real_onep (const_tree expr)
3138 STRIP_ANY_LOCATION_WRAPPER (expr);
3140 switch (TREE_CODE (expr))
3142 case REAL_CST:
3143 return real_equal (&TREE_REAL_CST (expr), &dconst1)
3144 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3145 case COMPLEX_CST:
3146 return real_onep (TREE_REALPART (expr))
3147 && real_zerop (TREE_IMAGPART (expr));
3148 case VECTOR_CST:
3149 return (VECTOR_CST_NPATTERNS (expr) == 1
3150 && VECTOR_CST_DUPLICATE_P (expr)
3151 && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3152 default:
3153 return false;
3157 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
3158 matter for decimal float constants, so don't return 1 for them.
3159 Also return 1 for location wrappers around such a constant. */
3161 bool
3162 real_minus_onep (const_tree expr)
3164 STRIP_ANY_LOCATION_WRAPPER (expr);
3166 switch (TREE_CODE (expr))
3168 case REAL_CST:
3169 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
3170 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3171 case COMPLEX_CST:
3172 return real_minus_onep (TREE_REALPART (expr))
3173 && real_zerop (TREE_IMAGPART (expr));
3174 case VECTOR_CST:
3175 return (VECTOR_CST_NPATTERNS (expr) == 1
3176 && VECTOR_CST_DUPLICATE_P (expr)
3177 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3178 default:
3179 return false;
3183 /* Nonzero if EXP is a constant or a cast of a constant. */
3185 bool
3186 really_constant_p (const_tree exp)
3188 /* This is not quite the same as STRIP_NOPS. It does more. */
3189 while (CONVERT_EXPR_P (exp)
3190 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3191 exp = TREE_OPERAND (exp, 0);
3192 return TREE_CONSTANT (exp);
3195 /* Return true if T holds a polynomial pointer difference, storing it in
3196 *VALUE if so. A true return means that T's precision is no greater
3197 than 64 bits, which is the largest address space we support, so *VALUE
3198 never loses precision. However, the signedness of the result does
3199 not necessarily match the signedness of T: sometimes an unsigned type
3200 like sizetype is used to encode a value that is actually negative. */
3202 bool
3203 ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
3205 if (!t)
3206 return false;
3207 if (TREE_CODE (t) == INTEGER_CST)
3209 if (!cst_and_fits_in_hwi (t))
3210 return false;
3211 *value = int_cst_value (t);
3212 return true;
3214 if (POLY_INT_CST_P (t))
3216 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3217 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
3218 return false;
3219 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3220 value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
3221 return true;
3223 return false;
3226 poly_int64
3227 tree_to_poly_int64 (const_tree t)
3229 gcc_assert (tree_fits_poly_int64_p (t));
3230 if (POLY_INT_CST_P (t))
3231 return poly_int_cst_value (t).force_shwi ();
3232 return TREE_INT_CST_LOW (t);
3235 poly_uint64
3236 tree_to_poly_uint64 (const_tree t)
3238 gcc_assert (tree_fits_poly_uint64_p (t));
3239 if (POLY_INT_CST_P (t))
3240 return poly_int_cst_value (t).force_uhwi ();
3241 return TREE_INT_CST_LOW (t);
3244 /* Return first list element whose TREE_VALUE is ELEM.
3245 Return 0 if ELEM is not in LIST. */
3247 tree
3248 value_member (tree elem, tree list)
3250 while (list)
3252 if (elem == TREE_VALUE (list))
3253 return list;
3254 list = TREE_CHAIN (list);
3256 return NULL_TREE;
3259 /* Return first list element whose TREE_PURPOSE is ELEM.
3260 Return 0 if ELEM is not in LIST. */
3262 tree
3263 purpose_member (const_tree elem, tree list)
3265 while (list)
3267 if (elem == TREE_PURPOSE (list))
3268 return list;
3269 list = TREE_CHAIN (list);
3271 return NULL_TREE;
3274 /* Return true if ELEM is in V. */
3276 bool
3277 vec_member (const_tree elem, vec<tree, va_gc> *v)
3279 unsigned ix;
3280 tree t;
3281 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
3282 if (elem == t)
3283 return true;
3284 return false;
3287 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3288 NULL_TREE. */
3290 tree
3291 chain_index (int idx, tree chain)
3293 for (; chain && idx > 0; --idx)
3294 chain = TREE_CHAIN (chain);
3295 return chain;
3298 /* Return nonzero if ELEM is part of the chain CHAIN. */
3300 bool
3301 chain_member (const_tree elem, const_tree chain)
3303 while (chain)
3305 if (elem == chain)
3306 return true;
3307 chain = DECL_CHAIN (chain);
3310 return false;
3313 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3314 We expect a null pointer to mark the end of the chain.
3315 This is the Lisp primitive `length'. */
3318 list_length (const_tree t)
3320 const_tree p = t;
3321 #ifdef ENABLE_TREE_CHECKING
3322 const_tree q = t;
3323 #endif
3324 int len = 0;
3326 while (p)
3328 p = TREE_CHAIN (p);
3329 #ifdef ENABLE_TREE_CHECKING
3330 if (len % 2)
3331 q = TREE_CHAIN (q);
3332 gcc_assert (p != q);
3333 #endif
3334 len++;
3337 return len;
3340 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3341 UNION_TYPE TYPE, or NULL_TREE if none. */
3343 tree
3344 first_field (const_tree type)
3346 tree t = TYPE_FIELDS (type);
3347 while (t && TREE_CODE (t) != FIELD_DECL)
3348 t = TREE_CHAIN (t);
3349 return t;
3352 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3353 UNION_TYPE TYPE, or NULL_TREE if none. */
3355 tree
3356 last_field (const_tree type)
3358 tree last = NULL_TREE;
3360 for (tree fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
3362 if (TREE_CODE (fld) != FIELD_DECL)
3363 continue;
3365 last = fld;
3368 return last;
3371 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3372 by modifying the last node in chain 1 to point to chain 2.
3373 This is the Lisp primitive `nconc'. */
3375 tree
3376 chainon (tree op1, tree op2)
3378 tree t1;
3380 if (!op1)
3381 return op2;
3382 if (!op2)
3383 return op1;
3385 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3386 continue;
3387 TREE_CHAIN (t1) = op2;
3389 #ifdef ENABLE_TREE_CHECKING
3391 tree t2;
3392 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3393 gcc_assert (t2 != t1);
3395 #endif
3397 return op1;
3400 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3402 tree
3403 tree_last (tree chain)
3405 tree next;
3406 if (chain)
3407 while ((next = TREE_CHAIN (chain)))
3408 chain = next;
3409 return chain;
3412 /* Reverse the order of elements in the chain T,
3413 and return the new head of the chain (old last element). */
3415 tree
3416 nreverse (tree t)
3418 tree prev = 0, decl, next;
3419 for (decl = t; decl; decl = next)
3421 /* We shouldn't be using this function to reverse BLOCK chains; we
3422 have blocks_nreverse for that. */
3423 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3424 next = TREE_CHAIN (decl);
3425 TREE_CHAIN (decl) = prev;
3426 prev = decl;
3428 return prev;
3431 /* Return a newly created TREE_LIST node whose
3432 purpose and value fields are PARM and VALUE. */
3434 tree
3435 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3437 tree t = make_node (TREE_LIST PASS_MEM_STAT);
3438 TREE_PURPOSE (t) = parm;
3439 TREE_VALUE (t) = value;
3440 return t;
3443 /* Build a chain of TREE_LIST nodes from a vector. */
3445 tree
3446 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3448 tree ret = NULL_TREE;
3449 tree *pp = &ret;
3450 unsigned int i;
3451 tree t;
3452 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3454 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3455 pp = &TREE_CHAIN (*pp);
3457 return ret;
3460 /* Return a newly created TREE_LIST node whose
3461 purpose and value fields are PURPOSE and VALUE
3462 and whose TREE_CHAIN is CHAIN. */
3464 tree
3465 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3467 tree node;
3469 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3470 memset (node, 0, sizeof (struct tree_common));
3472 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3474 TREE_SET_CODE (node, TREE_LIST);
3475 TREE_CHAIN (node) = chain;
3476 TREE_PURPOSE (node) = purpose;
3477 TREE_VALUE (node) = value;
3478 return node;
3481 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3482 trees. */
3484 vec<tree, va_gc> *
3485 ctor_to_vec (tree ctor)
3487 vec<tree, va_gc> *vec;
3488 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3489 unsigned int ix;
3490 tree val;
3492 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3493 vec->quick_push (val);
3495 return vec;
3498 /* Return the size nominally occupied by an object of type TYPE
3499 when it resides in memory. The value is measured in units of bytes,
3500 and its data type is that normally used for type sizes
3501 (which is the first type created by make_signed_type or
3502 make_unsigned_type). */
3504 tree
3505 size_in_bytes_loc (location_t loc, const_tree type)
3507 tree t;
3509 if (type == error_mark_node)
3510 return integer_zero_node;
3512 type = TYPE_MAIN_VARIANT (type);
3513 t = TYPE_SIZE_UNIT (type);
3515 if (t == 0)
3517 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3518 return size_zero_node;
3521 return t;
3524 /* Return the size of TYPE (in bytes) as a wide integer
3525 or return -1 if the size can vary or is larger than an integer. */
3527 HOST_WIDE_INT
3528 int_size_in_bytes (const_tree type)
3530 tree t;
3532 if (type == error_mark_node)
3533 return 0;
3535 type = TYPE_MAIN_VARIANT (type);
3536 t = TYPE_SIZE_UNIT (type);
3538 if (t && tree_fits_uhwi_p (t))
3539 return TREE_INT_CST_LOW (t);
3540 else
3541 return -1;
3544 /* Return the maximum size of TYPE (in bytes) as a wide integer
3545 or return -1 if the size can vary or is larger than an integer. */
3547 HOST_WIDE_INT
3548 max_int_size_in_bytes (const_tree type)
3550 HOST_WIDE_INT size = -1;
3551 tree size_tree;
3553 /* If this is an array type, check for a possible MAX_SIZE attached. */
3555 if (TREE_CODE (type) == ARRAY_TYPE)
3557 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3559 if (size_tree && tree_fits_uhwi_p (size_tree))
3560 size = tree_to_uhwi (size_tree);
3563 /* If we still haven't been able to get a size, see if the language
3564 can compute a maximum size. */
3566 if (size == -1)
3568 size_tree = lang_hooks.types.max_size (type);
3570 if (size_tree && tree_fits_uhwi_p (size_tree))
3571 size = tree_to_uhwi (size_tree);
3574 return size;
3577 /* Return the bit position of FIELD, in bits from the start of the record.
3578 This is a tree of type bitsizetype. */
3580 tree
3581 bit_position (const_tree field)
3583 return bit_from_pos (DECL_FIELD_OFFSET (field),
3584 DECL_FIELD_BIT_OFFSET (field));
3587 /* Return the byte position of FIELD, in bytes from the start of the record.
3588 This is a tree of type sizetype. */
3590 tree
3591 byte_position (const_tree field)
3593 return byte_from_pos (DECL_FIELD_OFFSET (field),
3594 DECL_FIELD_BIT_OFFSET (field));
3597 /* Likewise, but return as an integer. It must be representable in
3598 that way (since it could be a signed value, we don't have the
3599 option of returning -1 like int_size_in_byte can. */
3601 HOST_WIDE_INT
3602 int_byte_position (const_tree field)
3604 return tree_to_shwi (byte_position (field));
3607 /* Return, as a tree node, the number of elements for TYPE (which is an
3608 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3610 tree
3611 array_type_nelts (const_tree type)
3613 tree index_type, min, max;
3615 /* If they did it with unspecified bounds, then we should have already
3616 given an error about it before we got here. */
3617 if (! TYPE_DOMAIN (type))
3618 return error_mark_node;
3620 index_type = TYPE_DOMAIN (type);
3621 min = TYPE_MIN_VALUE (index_type);
3622 max = TYPE_MAX_VALUE (index_type);
3624 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3625 if (!max)
3627 /* zero sized arrays are represented from C FE as complete types with
3628 NULL TYPE_MAX_VALUE and zero TYPE_SIZE, while C++ FE represents
3629 them as min 0, max -1. */
3630 if (COMPLETE_TYPE_P (type)
3631 && integer_zerop (TYPE_SIZE (type))
3632 && integer_zerop (min))
3633 return build_int_cst (TREE_TYPE (min), -1);
3635 return error_mark_node;
3638 return (integer_zerop (min)
3639 ? max
3640 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3643 /* If arg is static -- a reference to an object in static storage -- then
3644 return the object. This is not the same as the C meaning of `static'.
3645 If arg isn't static, return NULL. */
3647 tree
3648 staticp (tree arg)
3650 switch (TREE_CODE (arg))
3652 case FUNCTION_DECL:
3653 /* Nested functions are static, even though taking their address will
3654 involve a trampoline as we unnest the nested function and create
3655 the trampoline on the tree level. */
3656 return arg;
3658 case VAR_DECL:
3659 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3660 && ! DECL_THREAD_LOCAL_P (arg)
3661 && ! DECL_DLLIMPORT_P (arg)
3662 ? arg : NULL);
3664 case CONST_DECL:
3665 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3666 ? arg : NULL);
3668 case CONSTRUCTOR:
3669 return TREE_STATIC (arg) ? arg : NULL;
3671 case LABEL_DECL:
3672 case STRING_CST:
3673 return arg;
3675 case COMPONENT_REF:
3676 /* If the thing being referenced is not a field, then it is
3677 something language specific. */
3678 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3680 /* If we are referencing a bitfield, we can't evaluate an
3681 ADDR_EXPR at compile time and so it isn't a constant. */
3682 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3683 return NULL;
3685 return staticp (TREE_OPERAND (arg, 0));
3687 case BIT_FIELD_REF:
3688 return NULL;
3690 case INDIRECT_REF:
3691 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3693 case ARRAY_REF:
3694 case ARRAY_RANGE_REF:
3695 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3696 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3697 return staticp (TREE_OPERAND (arg, 0));
3698 else
3699 return NULL;
3701 case COMPOUND_LITERAL_EXPR:
3702 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3704 default:
3705 return NULL;
3712 /* Return whether OP is a DECL whose address is function-invariant. */
3714 bool
3715 decl_address_invariant_p (const_tree op)
3717 /* The conditions below are slightly less strict than the one in
3718 staticp. */
3720 switch (TREE_CODE (op))
3722 case PARM_DECL:
3723 case RESULT_DECL:
3724 case LABEL_DECL:
3725 case FUNCTION_DECL:
3726 return true;
3728 case VAR_DECL:
3729 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3730 || DECL_THREAD_LOCAL_P (op)
3731 || DECL_CONTEXT (op) == current_function_decl
3732 || decl_function_context (op) == current_function_decl)
3733 return true;
3734 break;
3736 case CONST_DECL:
3737 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3738 || decl_function_context (op) == current_function_decl)
3739 return true;
3740 break;
3742 default:
3743 break;
3746 return false;
3749 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3751 bool
3752 decl_address_ip_invariant_p (const_tree op)
3754 /* The conditions below are slightly less strict than the one in
3755 staticp. */
3757 switch (TREE_CODE (op))
3759 case LABEL_DECL:
3760 case FUNCTION_DECL:
3761 case STRING_CST:
3762 return true;
3764 case VAR_DECL:
3765 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3766 && !DECL_DLLIMPORT_P (op))
3767 || DECL_THREAD_LOCAL_P (op))
3768 return true;
3769 break;
3771 case CONST_DECL:
3772 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3773 return true;
3774 break;
3776 default:
3777 break;
3780 return false;
3784 /* Return true if T is function-invariant (internal function, does
3785 not handle arithmetic; that's handled in skip_simple_arithmetic and
3786 tree_invariant_p). */
3788 static bool
3789 tree_invariant_p_1 (tree t)
3791 tree op;
3793 if (TREE_CONSTANT (t)
3794 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3795 return true;
3797 switch (TREE_CODE (t))
3799 case SAVE_EXPR:
3800 return true;
3802 case ADDR_EXPR:
3803 op = TREE_OPERAND (t, 0);
3804 while (handled_component_p (op))
3806 switch (TREE_CODE (op))
3808 case ARRAY_REF:
3809 case ARRAY_RANGE_REF:
3810 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3811 || TREE_OPERAND (op, 2) != NULL_TREE
3812 || TREE_OPERAND (op, 3) != NULL_TREE)
3813 return false;
3814 break;
3816 case COMPONENT_REF:
3817 if (TREE_OPERAND (op, 2) != NULL_TREE)
3818 return false;
3819 break;
3821 default:;
3823 op = TREE_OPERAND (op, 0);
3826 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3828 default:
3829 break;
3832 return false;
3835 /* Return true if T is function-invariant. */
3837 bool
3838 tree_invariant_p (tree t)
3840 tree inner = skip_simple_arithmetic (t);
3841 return tree_invariant_p_1 (inner);
3844 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3845 Do this to any expression which may be used in more than one place,
3846 but must be evaluated only once.
3848 Normally, expand_expr would reevaluate the expression each time.
3849 Calling save_expr produces something that is evaluated and recorded
3850 the first time expand_expr is called on it. Subsequent calls to
3851 expand_expr just reuse the recorded value.
3853 The call to expand_expr that generates code that actually computes
3854 the value is the first call *at compile time*. Subsequent calls
3855 *at compile time* generate code to use the saved value.
3856 This produces correct result provided that *at run time* control
3857 always flows through the insns made by the first expand_expr
3858 before reaching the other places where the save_expr was evaluated.
3859 You, the caller of save_expr, must make sure this is so.
3861 Constants, and certain read-only nodes, are returned with no
3862 SAVE_EXPR because that is safe. Expressions containing placeholders
3863 are not touched; see tree.def for an explanation of what these
3864 are used for. */
3866 tree
3867 save_expr (tree expr)
3869 tree inner;
3871 /* If the tree evaluates to a constant, then we don't want to hide that
3872 fact (i.e. this allows further folding, and direct checks for constants).
3873 However, a read-only object that has side effects cannot be bypassed.
3874 Since it is no problem to reevaluate literals, we just return the
3875 literal node. */
3876 inner = skip_simple_arithmetic (expr);
3877 if (TREE_CODE (inner) == ERROR_MARK)
3878 return inner;
3880 if (tree_invariant_p_1 (inner))
3881 return expr;
3883 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3884 it means that the size or offset of some field of an object depends on
3885 the value within another field.
3887 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3888 and some variable since it would then need to be both evaluated once and
3889 evaluated more than once. Front-ends must assure this case cannot
3890 happen by surrounding any such subexpressions in their own SAVE_EXPR
3891 and forcing evaluation at the proper time. */
3892 if (contains_placeholder_p (inner))
3893 return expr;
3895 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3897 /* This expression might be placed ahead of a jump to ensure that the
3898 value was computed on both sides of the jump. So make sure it isn't
3899 eliminated as dead. */
3900 TREE_SIDE_EFFECTS (expr) = 1;
3901 return expr;
3904 /* Look inside EXPR into any simple arithmetic operations. Return the
3905 outermost non-arithmetic or non-invariant node. */
3907 tree
3908 skip_simple_arithmetic (tree expr)
3910 /* We don't care about whether this can be used as an lvalue in this
3911 context. */
3912 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3913 expr = TREE_OPERAND (expr, 0);
3915 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3916 a constant, it will be more efficient to not make another SAVE_EXPR since
3917 it will allow better simplification and GCSE will be able to merge the
3918 computations if they actually occur. */
3919 while (true)
3921 if (UNARY_CLASS_P (expr))
3922 expr = TREE_OPERAND (expr, 0);
3923 else if (BINARY_CLASS_P (expr))
3925 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3926 expr = TREE_OPERAND (expr, 0);
3927 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3928 expr = TREE_OPERAND (expr, 1);
3929 else
3930 break;
3932 else
3933 break;
3936 return expr;
3939 /* Look inside EXPR into simple arithmetic operations involving constants.
3940 Return the outermost non-arithmetic or non-constant node. */
3942 tree
3943 skip_simple_constant_arithmetic (tree expr)
3945 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3946 expr = TREE_OPERAND (expr, 0);
3948 while (true)
3950 if (UNARY_CLASS_P (expr))
3951 expr = TREE_OPERAND (expr, 0);
3952 else if (BINARY_CLASS_P (expr))
3954 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3955 expr = TREE_OPERAND (expr, 0);
3956 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3957 expr = TREE_OPERAND (expr, 1);
3958 else
3959 break;
3961 else
3962 break;
3965 return expr;
3968 /* Return which tree structure is used by T. */
3970 enum tree_node_structure_enum
3971 tree_node_structure (const_tree t)
3973 const enum tree_code code = TREE_CODE (t);
3974 return tree_node_structure_for_code (code);
3977 /* Set various status flags when building a CALL_EXPR object T. */
3979 static void
3980 process_call_operands (tree t)
3982 bool side_effects = TREE_SIDE_EFFECTS (t);
3983 bool read_only = false;
3984 int i = call_expr_flags (t);
3986 /* Calls have side-effects, except those to const or pure functions. */
3987 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3988 side_effects = true;
3989 /* Propagate TREE_READONLY of arguments for const functions. */
3990 if (i & ECF_CONST)
3991 read_only = true;
3993 if (!side_effects || read_only)
3994 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3996 tree op = TREE_OPERAND (t, i);
3997 if (op && TREE_SIDE_EFFECTS (op))
3998 side_effects = true;
3999 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
4000 read_only = false;
4003 TREE_SIDE_EFFECTS (t) = side_effects;
4004 TREE_READONLY (t) = read_only;
4007 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
4008 size or offset that depends on a field within a record. */
4010 bool
4011 contains_placeholder_p (const_tree exp)
4013 enum tree_code code;
4015 if (!exp)
4016 return 0;
4018 code = TREE_CODE (exp);
4019 if (code == PLACEHOLDER_EXPR)
4020 return 1;
4022 switch (TREE_CODE_CLASS (code))
4024 case tcc_reference:
4025 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
4026 position computations since they will be converted into a
4027 WITH_RECORD_EXPR involving the reference, which will assume
4028 here will be valid. */
4029 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4031 case tcc_exceptional:
4032 if (code == TREE_LIST)
4033 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
4034 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
4035 break;
4037 case tcc_unary:
4038 case tcc_binary:
4039 case tcc_comparison:
4040 case tcc_expression:
4041 switch (code)
4043 case COMPOUND_EXPR:
4044 /* Ignoring the first operand isn't quite right, but works best. */
4045 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
4047 case COND_EXPR:
4048 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4049 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
4050 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
4052 case SAVE_EXPR:
4053 /* The save_expr function never wraps anything containing
4054 a PLACEHOLDER_EXPR. */
4055 return 0;
4057 default:
4058 break;
4061 switch (TREE_CODE_LENGTH (code))
4063 case 1:
4064 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4065 case 2:
4066 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4067 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
4068 default:
4069 return 0;
4072 case tcc_vl_exp:
4073 switch (code)
4075 case CALL_EXPR:
4077 const_tree arg;
4078 const_call_expr_arg_iterator iter;
4079 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
4080 if (CONTAINS_PLACEHOLDER_P (arg))
4081 return 1;
4082 return 0;
4084 default:
4085 return 0;
4088 default:
4089 return 0;
4091 return 0;
4094 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
4095 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
4096 field positions. */
4098 static bool
4099 type_contains_placeholder_1 (const_tree type)
4101 /* If the size contains a placeholder or the parent type (component type in
4102 the case of arrays) type involves a placeholder, this type does. */
4103 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
4104 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
4105 || (!POINTER_TYPE_P (type)
4106 && TREE_TYPE (type)
4107 && type_contains_placeholder_p (TREE_TYPE (type))))
4108 return true;
4110 /* Now do type-specific checks. Note that the last part of the check above
4111 greatly limits what we have to do below. */
4112 switch (TREE_CODE (type))
4114 case VOID_TYPE:
4115 case OPAQUE_TYPE:
4116 case COMPLEX_TYPE:
4117 case ENUMERAL_TYPE:
4118 case BOOLEAN_TYPE:
4119 case POINTER_TYPE:
4120 case OFFSET_TYPE:
4121 case REFERENCE_TYPE:
4122 case METHOD_TYPE:
4123 case FUNCTION_TYPE:
4124 case VECTOR_TYPE:
4125 case NULLPTR_TYPE:
4126 return false;
4128 case INTEGER_TYPE:
4129 case REAL_TYPE:
4130 case FIXED_POINT_TYPE:
4131 /* Here we just check the bounds. */
4132 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
4133 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
4135 case ARRAY_TYPE:
4136 /* We have already checked the component type above, so just check
4137 the domain type. Flexible array members have a null domain. */
4138 return TYPE_DOMAIN (type) ?
4139 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
4141 case RECORD_TYPE:
4142 case UNION_TYPE:
4143 case QUAL_UNION_TYPE:
4145 tree field;
4147 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4148 if (TREE_CODE (field) == FIELD_DECL
4149 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
4150 || (TREE_CODE (type) == QUAL_UNION_TYPE
4151 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
4152 || type_contains_placeholder_p (TREE_TYPE (field))))
4153 return true;
4155 return false;
4158 default:
4159 gcc_unreachable ();
4163 /* Wrapper around above function used to cache its result. */
4165 bool
4166 type_contains_placeholder_p (tree type)
4168 bool result;
4170 /* If the contains_placeholder_bits field has been initialized,
4171 then we know the answer. */
4172 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
4173 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
4175 /* Indicate that we've seen this type node, and the answer is false.
4176 This is what we want to return if we run into recursion via fields. */
4177 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
4179 /* Compute the real value. */
4180 result = type_contains_placeholder_1 (type);
4182 /* Store the real value. */
4183 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
4185 return result;
4188 /* Push tree EXP onto vector QUEUE if it is not already present. */
4190 static void
4191 push_without_duplicates (tree exp, vec<tree> *queue)
4193 unsigned int i;
4194 tree iter;
4196 FOR_EACH_VEC_ELT (*queue, i, iter)
4197 if (simple_cst_equal (iter, exp) == 1)
4198 break;
4200 if (!iter)
4201 queue->safe_push (exp);
4204 /* Given a tree EXP, find all occurrences of references to fields
4205 in a PLACEHOLDER_EXPR and place them in vector REFS without
4206 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
4207 we assume here that EXP contains only arithmetic expressions
4208 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
4209 argument list. */
4211 void
4212 find_placeholder_in_expr (tree exp, vec<tree> *refs)
4214 enum tree_code code = TREE_CODE (exp);
4215 tree inner;
4216 int i;
4218 /* We handle TREE_LIST and COMPONENT_REF separately. */
4219 if (code == TREE_LIST)
4221 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
4222 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
4224 else if (code == COMPONENT_REF)
4226 for (inner = TREE_OPERAND (exp, 0);
4227 REFERENCE_CLASS_P (inner);
4228 inner = TREE_OPERAND (inner, 0))
4231 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
4232 push_without_duplicates (exp, refs);
4233 else
4234 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
4236 else
4237 switch (TREE_CODE_CLASS (code))
4239 case tcc_constant:
4240 break;
4242 case tcc_declaration:
4243 /* Variables allocated to static storage can stay. */
4244 if (!TREE_STATIC (exp))
4245 push_without_duplicates (exp, refs);
4246 break;
4248 case tcc_expression:
4249 /* This is the pattern built in ada/make_aligning_type. */
4250 if (code == ADDR_EXPR
4251 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
4253 push_without_duplicates (exp, refs);
4254 break;
4257 /* Fall through. */
4259 case tcc_exceptional:
4260 case tcc_unary:
4261 case tcc_binary:
4262 case tcc_comparison:
4263 case tcc_reference:
4264 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
4265 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4266 break;
4268 case tcc_vl_exp:
4269 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4270 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4271 break;
4273 default:
4274 gcc_unreachable ();
4278 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4279 return a tree with all occurrences of references to F in a
4280 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4281 CONST_DECLs. Note that we assume here that EXP contains only
4282 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4283 occurring only in their argument list. */
4285 tree
4286 substitute_in_expr (tree exp, tree f, tree r)
4288 enum tree_code code = TREE_CODE (exp);
4289 tree op0, op1, op2, op3;
4290 tree new_tree;
4292 /* We handle TREE_LIST and COMPONENT_REF separately. */
4293 if (code == TREE_LIST)
4295 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
4296 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
4297 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4298 return exp;
4300 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4302 else if (code == COMPONENT_REF)
4304 tree inner;
4306 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4307 and it is the right field, replace it with R. */
4308 for (inner = TREE_OPERAND (exp, 0);
4309 REFERENCE_CLASS_P (inner);
4310 inner = TREE_OPERAND (inner, 0))
4313 /* The field. */
4314 op1 = TREE_OPERAND (exp, 1);
4316 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
4317 return r;
4319 /* If this expression hasn't been completed let, leave it alone. */
4320 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
4321 return exp;
4323 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4324 if (op0 == TREE_OPERAND (exp, 0))
4325 return exp;
4327 new_tree
4328 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4330 else
4331 switch (TREE_CODE_CLASS (code))
4333 case tcc_constant:
4334 return exp;
4336 case tcc_declaration:
4337 if (exp == f)
4338 return r;
4339 else
4340 return exp;
4342 case tcc_expression:
4343 if (exp == f)
4344 return r;
4346 /* Fall through. */
4348 case tcc_exceptional:
4349 case tcc_unary:
4350 case tcc_binary:
4351 case tcc_comparison:
4352 case tcc_reference:
4353 switch (TREE_CODE_LENGTH (code))
4355 case 0:
4356 return exp;
4358 case 1:
4359 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4360 if (op0 == TREE_OPERAND (exp, 0))
4361 return exp;
4363 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4364 break;
4366 case 2:
4367 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4368 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4370 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4371 return exp;
4373 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4374 break;
4376 case 3:
4377 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4378 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4379 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4381 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4382 && op2 == TREE_OPERAND (exp, 2))
4383 return exp;
4385 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4386 break;
4388 case 4:
4389 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4390 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4391 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4392 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4394 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4395 && op2 == TREE_OPERAND (exp, 2)
4396 && op3 == TREE_OPERAND (exp, 3))
4397 return exp;
4399 new_tree
4400 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4401 break;
4403 default:
4404 gcc_unreachable ();
4406 break;
4408 case tcc_vl_exp:
4410 int i;
4412 new_tree = NULL_TREE;
4414 /* If we are trying to replace F with a constant or with another
4415 instance of one of the arguments of the call, inline back
4416 functions which do nothing else than computing a value from
4417 the arguments they are passed. This makes it possible to
4418 fold partially or entirely the replacement expression. */
4419 if (code == CALL_EXPR)
4421 bool maybe_inline = false;
4422 if (CONSTANT_CLASS_P (r))
4423 maybe_inline = true;
4424 else
4425 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4426 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4428 maybe_inline = true;
4429 break;
4431 if (maybe_inline)
4433 tree t = maybe_inline_call_in_expr (exp);
4434 if (t)
4435 return SUBSTITUTE_IN_EXPR (t, f, r);
4439 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4441 tree op = TREE_OPERAND (exp, i);
4442 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4443 if (new_op != op)
4445 if (!new_tree)
4446 new_tree = copy_node (exp);
4447 TREE_OPERAND (new_tree, i) = new_op;
4451 if (new_tree)
4453 new_tree = fold (new_tree);
4454 if (TREE_CODE (new_tree) == CALL_EXPR)
4455 process_call_operands (new_tree);
4457 else
4458 return exp;
4460 break;
4462 default:
4463 gcc_unreachable ();
4466 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4468 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4469 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4471 return new_tree;
4474 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4475 for it within OBJ, a tree that is an object or a chain of references. */
4477 tree
4478 substitute_placeholder_in_expr (tree exp, tree obj)
4480 enum tree_code code = TREE_CODE (exp);
4481 tree op0, op1, op2, op3;
4482 tree new_tree;
4484 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4485 in the chain of OBJ. */
4486 if (code == PLACEHOLDER_EXPR)
4488 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4489 tree elt;
4491 for (elt = obj; elt != 0;
4492 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4493 || TREE_CODE (elt) == COND_EXPR)
4494 ? TREE_OPERAND (elt, 1)
4495 : (REFERENCE_CLASS_P (elt)
4496 || UNARY_CLASS_P (elt)
4497 || BINARY_CLASS_P (elt)
4498 || VL_EXP_CLASS_P (elt)
4499 || EXPRESSION_CLASS_P (elt))
4500 ? TREE_OPERAND (elt, 0) : 0))
4501 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4502 return elt;
4504 for (elt = obj; elt != 0;
4505 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4506 || TREE_CODE (elt) == COND_EXPR)
4507 ? TREE_OPERAND (elt, 1)
4508 : (REFERENCE_CLASS_P (elt)
4509 || UNARY_CLASS_P (elt)
4510 || BINARY_CLASS_P (elt)
4511 || VL_EXP_CLASS_P (elt)
4512 || EXPRESSION_CLASS_P (elt))
4513 ? TREE_OPERAND (elt, 0) : 0))
4514 if (POINTER_TYPE_P (TREE_TYPE (elt))
4515 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4516 == need_type))
4517 return fold_build1 (INDIRECT_REF, need_type, elt);
4519 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4520 survives until RTL generation, there will be an error. */
4521 return exp;
4524 /* TREE_LIST is special because we need to look at TREE_VALUE
4525 and TREE_CHAIN, not TREE_OPERANDS. */
4526 else if (code == TREE_LIST)
4528 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4529 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4530 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4531 return exp;
4533 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4535 else
4536 switch (TREE_CODE_CLASS (code))
4538 case tcc_constant:
4539 case tcc_declaration:
4540 return exp;
4542 case tcc_exceptional:
4543 case tcc_unary:
4544 case tcc_binary:
4545 case tcc_comparison:
4546 case tcc_expression:
4547 case tcc_reference:
4548 case tcc_statement:
4549 switch (TREE_CODE_LENGTH (code))
4551 case 0:
4552 return exp;
4554 case 1:
4555 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4556 if (op0 == TREE_OPERAND (exp, 0))
4557 return exp;
4559 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4560 break;
4562 case 2:
4563 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4564 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4566 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4567 return exp;
4569 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4570 break;
4572 case 3:
4573 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4574 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4575 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4577 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4578 && op2 == TREE_OPERAND (exp, 2))
4579 return exp;
4581 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4582 break;
4584 case 4:
4585 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4586 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4587 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4588 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4590 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4591 && op2 == TREE_OPERAND (exp, 2)
4592 && op3 == TREE_OPERAND (exp, 3))
4593 return exp;
4595 new_tree
4596 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4597 break;
4599 default:
4600 gcc_unreachable ();
4602 break;
4604 case tcc_vl_exp:
4606 int i;
4608 new_tree = NULL_TREE;
4610 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4612 tree op = TREE_OPERAND (exp, i);
4613 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4614 if (new_op != op)
4616 if (!new_tree)
4617 new_tree = copy_node (exp);
4618 TREE_OPERAND (new_tree, i) = new_op;
4622 if (new_tree)
4624 new_tree = fold (new_tree);
4625 if (TREE_CODE (new_tree) == CALL_EXPR)
4626 process_call_operands (new_tree);
4628 else
4629 return exp;
4631 break;
4633 default:
4634 gcc_unreachable ();
4637 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4639 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4640 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4642 return new_tree;
4646 /* Subroutine of stabilize_reference; this is called for subtrees of
4647 references. Any expression with side-effects must be put in a SAVE_EXPR
4648 to ensure that it is only evaluated once.
4650 We don't put SAVE_EXPR nodes around everything, because assigning very
4651 simple expressions to temporaries causes us to miss good opportunities
4652 for optimizations. Among other things, the opportunity to fold in the
4653 addition of a constant into an addressing mode often gets lost, e.g.
4654 "y[i+1] += x;". In general, we take the approach that we should not make
4655 an assignment unless we are forced into it - i.e., that any non-side effect
4656 operator should be allowed, and that cse should take care of coalescing
4657 multiple utterances of the same expression should that prove fruitful. */
4659 static tree
4660 stabilize_reference_1 (tree e)
4662 tree result;
4663 enum tree_code code = TREE_CODE (e);
4665 /* We cannot ignore const expressions because it might be a reference
4666 to a const array but whose index contains side-effects. But we can
4667 ignore things that are actual constant or that already have been
4668 handled by this function. */
4670 if (tree_invariant_p (e))
4671 return e;
4673 switch (TREE_CODE_CLASS (code))
4675 case tcc_exceptional:
4676 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4677 have side-effects. */
4678 if (code == STATEMENT_LIST)
4679 return save_expr (e);
4680 /* FALLTHRU */
4681 case tcc_type:
4682 case tcc_declaration:
4683 case tcc_comparison:
4684 case tcc_statement:
4685 case tcc_expression:
4686 case tcc_reference:
4687 case tcc_vl_exp:
4688 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4689 so that it will only be evaluated once. */
4690 /* The reference (r) and comparison (<) classes could be handled as
4691 below, but it is generally faster to only evaluate them once. */
4692 if (TREE_SIDE_EFFECTS (e))
4693 return save_expr (e);
4694 return e;
4696 case tcc_constant:
4697 /* Constants need no processing. In fact, we should never reach
4698 here. */
4699 return e;
4701 case tcc_binary:
4702 /* Division is slow and tends to be compiled with jumps,
4703 especially the division by powers of 2 that is often
4704 found inside of an array reference. So do it just once. */
4705 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4706 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4707 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4708 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4709 return save_expr (e);
4710 /* Recursively stabilize each operand. */
4711 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4712 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4713 break;
4715 case tcc_unary:
4716 /* Recursively stabilize each operand. */
4717 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4718 break;
4720 default:
4721 gcc_unreachable ();
4724 TREE_TYPE (result) = TREE_TYPE (e);
4725 TREE_READONLY (result) = TREE_READONLY (e);
4726 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4727 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4729 return result;
4732 /* Stabilize a reference so that we can use it any number of times
4733 without causing its operands to be evaluated more than once.
4734 Returns the stabilized reference. This works by means of save_expr,
4735 so see the caveats in the comments about save_expr.
4737 Also allows conversion expressions whose operands are references.
4738 Any other kind of expression is returned unchanged. */
4740 tree
4741 stabilize_reference (tree ref)
4743 tree result;
4744 enum tree_code code = TREE_CODE (ref);
4746 switch (code)
4748 case VAR_DECL:
4749 case PARM_DECL:
4750 case RESULT_DECL:
4751 /* No action is needed in this case. */
4752 return ref;
4754 CASE_CONVERT:
4755 case FLOAT_EXPR:
4756 case FIX_TRUNC_EXPR:
4757 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4758 break;
4760 case INDIRECT_REF:
4761 result = build_nt (INDIRECT_REF,
4762 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4763 break;
4765 case COMPONENT_REF:
4766 result = build_nt (COMPONENT_REF,
4767 stabilize_reference (TREE_OPERAND (ref, 0)),
4768 TREE_OPERAND (ref, 1), NULL_TREE);
4769 break;
4771 case BIT_FIELD_REF:
4772 result = build_nt (BIT_FIELD_REF,
4773 stabilize_reference (TREE_OPERAND (ref, 0)),
4774 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4775 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4776 break;
4778 case ARRAY_REF:
4779 result = build_nt (ARRAY_REF,
4780 stabilize_reference (TREE_OPERAND (ref, 0)),
4781 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4782 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4783 break;
4785 case ARRAY_RANGE_REF:
4786 result = build_nt (ARRAY_RANGE_REF,
4787 stabilize_reference (TREE_OPERAND (ref, 0)),
4788 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4789 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4790 break;
4792 case COMPOUND_EXPR:
4793 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4794 it wouldn't be ignored. This matters when dealing with
4795 volatiles. */
4796 return stabilize_reference_1 (ref);
4798 /* If arg isn't a kind of lvalue we recognize, make no change.
4799 Caller should recognize the error for an invalid lvalue. */
4800 default:
4801 return ref;
4803 case ERROR_MARK:
4804 return error_mark_node;
4807 TREE_TYPE (result) = TREE_TYPE (ref);
4808 TREE_READONLY (result) = TREE_READONLY (ref);
4809 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4810 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4811 protected_set_expr_location (result, EXPR_LOCATION (ref));
4813 return result;
4816 /* Low-level constructors for expressions. */
4818 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4819 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4821 void
4822 recompute_tree_invariant_for_addr_expr (tree t)
4824 tree node;
4825 bool tc = true, se = false;
4827 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4829 /* We started out assuming this address is both invariant and constant, but
4830 does not have side effects. Now go down any handled components and see if
4831 any of them involve offsets that are either non-constant or non-invariant.
4832 Also check for side-effects.
4834 ??? Note that this code makes no attempt to deal with the case where
4835 taking the address of something causes a copy due to misalignment. */
4837 #define UPDATE_FLAGS(NODE) \
4838 do { tree _node = (NODE); \
4839 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4840 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4842 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4843 node = TREE_OPERAND (node, 0))
4845 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4846 array reference (probably made temporarily by the G++ front end),
4847 so ignore all the operands. */
4848 if ((TREE_CODE (node) == ARRAY_REF
4849 || TREE_CODE (node) == ARRAY_RANGE_REF)
4850 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4852 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4853 if (TREE_OPERAND (node, 2))
4854 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4855 if (TREE_OPERAND (node, 3))
4856 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4858 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4859 FIELD_DECL, apparently. The G++ front end can put something else
4860 there, at least temporarily. */
4861 else if (TREE_CODE (node) == COMPONENT_REF
4862 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4864 if (TREE_OPERAND (node, 2))
4865 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4869 node = lang_hooks.expr_to_decl (node, &tc, &se);
4871 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4872 the address, since &(*a)->b is a form of addition. If it's a constant, the
4873 address is constant too. If it's a decl, its address is constant if the
4874 decl is static. Everything else is not constant and, furthermore,
4875 taking the address of a volatile variable is not volatile. */
4876 if (TREE_CODE (node) == INDIRECT_REF
4877 || TREE_CODE (node) == MEM_REF)
4878 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4879 else if (CONSTANT_CLASS_P (node))
4881 else if (DECL_P (node))
4882 tc &= (staticp (node) != NULL_TREE);
4883 else
4885 tc = false;
4886 se |= TREE_SIDE_EFFECTS (node);
4890 TREE_CONSTANT (t) = tc;
4891 TREE_SIDE_EFFECTS (t) = se;
4892 #undef UPDATE_FLAGS
4895 /* Build an expression of code CODE, data type TYPE, and operands as
4896 specified. Expressions and reference nodes can be created this way.
4897 Constants, decls, types and misc nodes cannot be.
4899 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4900 enough for all extant tree codes. */
4902 tree
4903 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4905 tree t;
4907 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4909 t = make_node (code PASS_MEM_STAT);
4910 TREE_TYPE (t) = tt;
4912 return t;
4915 tree
4916 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4918 int length = sizeof (struct tree_exp);
4919 tree t;
4921 record_node_allocation_statistics (code, length);
4923 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4925 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4927 memset (t, 0, sizeof (struct tree_common));
4929 TREE_SET_CODE (t, code);
4931 TREE_TYPE (t) = type;
4932 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4933 TREE_OPERAND (t, 0) = node;
4934 if (node && !TYPE_P (node))
4936 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4937 TREE_READONLY (t) = TREE_READONLY (node);
4940 if (TREE_CODE_CLASS (code) == tcc_statement)
4942 if (code != DEBUG_BEGIN_STMT)
4943 TREE_SIDE_EFFECTS (t) = 1;
4945 else switch (code)
4947 case VA_ARG_EXPR:
4948 /* All of these have side-effects, no matter what their
4949 operands are. */
4950 TREE_SIDE_EFFECTS (t) = 1;
4951 TREE_READONLY (t) = 0;
4952 break;
4954 case INDIRECT_REF:
4955 /* Whether a dereference is readonly has nothing to do with whether
4956 its operand is readonly. */
4957 TREE_READONLY (t) = 0;
4958 break;
4960 case ADDR_EXPR:
4961 if (node)
4962 recompute_tree_invariant_for_addr_expr (t);
4963 break;
4965 default:
4966 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4967 && node && !TYPE_P (node)
4968 && TREE_CONSTANT (node))
4969 TREE_CONSTANT (t) = 1;
4970 if (TREE_CODE_CLASS (code) == tcc_reference
4971 && node && TREE_THIS_VOLATILE (node))
4972 TREE_THIS_VOLATILE (t) = 1;
4973 break;
4976 return t;
4979 #define PROCESS_ARG(N) \
4980 do { \
4981 TREE_OPERAND (t, N) = arg##N; \
4982 if (arg##N &&!TYPE_P (arg##N)) \
4984 if (TREE_SIDE_EFFECTS (arg##N)) \
4985 side_effects = 1; \
4986 if (!TREE_READONLY (arg##N) \
4987 && !CONSTANT_CLASS_P (arg##N)) \
4988 (void) (read_only = 0); \
4989 if (!TREE_CONSTANT (arg##N)) \
4990 (void) (constant = 0); \
4992 } while (0)
4994 tree
4995 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4997 bool constant, read_only, side_effects, div_by_zero;
4998 tree t;
5000 gcc_assert (TREE_CODE_LENGTH (code) == 2);
5002 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
5003 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
5004 /* When sizetype precision doesn't match that of pointers
5005 we need to be able to build explicit extensions or truncations
5006 of the offset argument. */
5007 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
5008 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
5009 && TREE_CODE (arg1) == INTEGER_CST);
5011 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
5012 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
5013 && ptrofftype_p (TREE_TYPE (arg1)));
5015 t = make_node (code PASS_MEM_STAT);
5016 TREE_TYPE (t) = tt;
5018 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
5019 result based on those same flags for the arguments. But if the
5020 arguments aren't really even `tree' expressions, we shouldn't be trying
5021 to do this. */
5023 /* Expressions without side effects may be constant if their
5024 arguments are as well. */
5025 constant = (TREE_CODE_CLASS (code) == tcc_comparison
5026 || TREE_CODE_CLASS (code) == tcc_binary);
5027 read_only = 1;
5028 side_effects = TREE_SIDE_EFFECTS (t);
5030 switch (code)
5032 case TRUNC_DIV_EXPR:
5033 case CEIL_DIV_EXPR:
5034 case FLOOR_DIV_EXPR:
5035 case ROUND_DIV_EXPR:
5036 case EXACT_DIV_EXPR:
5037 case CEIL_MOD_EXPR:
5038 case FLOOR_MOD_EXPR:
5039 case ROUND_MOD_EXPR:
5040 case TRUNC_MOD_EXPR:
5041 div_by_zero = integer_zerop (arg1);
5042 break;
5043 default:
5044 div_by_zero = false;
5047 PROCESS_ARG (0);
5048 PROCESS_ARG (1);
5050 TREE_SIDE_EFFECTS (t) = side_effects;
5051 if (code == MEM_REF)
5053 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5055 tree o = TREE_OPERAND (arg0, 0);
5056 TREE_READONLY (t) = TREE_READONLY (o);
5057 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5060 else
5062 TREE_READONLY (t) = read_only;
5063 /* Don't mark X / 0 as constant. */
5064 TREE_CONSTANT (t) = constant && !div_by_zero;
5065 TREE_THIS_VOLATILE (t)
5066 = (TREE_CODE_CLASS (code) == tcc_reference
5067 && arg0 && TREE_THIS_VOLATILE (arg0));
5070 return t;
5074 tree
5075 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
5076 tree arg2 MEM_STAT_DECL)
5078 bool constant, read_only, side_effects;
5079 tree t;
5081 gcc_assert (TREE_CODE_LENGTH (code) == 3);
5082 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5084 t = make_node (code PASS_MEM_STAT);
5085 TREE_TYPE (t) = tt;
5087 read_only = 1;
5089 /* As a special exception, if COND_EXPR has NULL branches, we
5090 assume that it is a gimple statement and always consider
5091 it to have side effects. */
5092 if (code == COND_EXPR
5093 && tt == void_type_node
5094 && arg1 == NULL_TREE
5095 && arg2 == NULL_TREE)
5096 side_effects = true;
5097 else
5098 side_effects = TREE_SIDE_EFFECTS (t);
5100 PROCESS_ARG (0);
5101 PROCESS_ARG (1);
5102 PROCESS_ARG (2);
5104 if (code == COND_EXPR)
5105 TREE_READONLY (t) = read_only;
5107 TREE_SIDE_EFFECTS (t) = side_effects;
5108 TREE_THIS_VOLATILE (t)
5109 = (TREE_CODE_CLASS (code) == tcc_reference
5110 && arg0 && TREE_THIS_VOLATILE (arg0));
5112 return t;
5115 tree
5116 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
5117 tree arg2, tree arg3 MEM_STAT_DECL)
5119 bool constant, read_only, side_effects;
5120 tree t;
5122 gcc_assert (TREE_CODE_LENGTH (code) == 4);
5124 t = make_node (code PASS_MEM_STAT);
5125 TREE_TYPE (t) = tt;
5127 side_effects = TREE_SIDE_EFFECTS (t);
5129 PROCESS_ARG (0);
5130 PROCESS_ARG (1);
5131 PROCESS_ARG (2);
5132 PROCESS_ARG (3);
5134 TREE_SIDE_EFFECTS (t) = side_effects;
5135 TREE_THIS_VOLATILE (t)
5136 = (TREE_CODE_CLASS (code) == tcc_reference
5137 && arg0 && TREE_THIS_VOLATILE (arg0));
5139 return t;
5142 tree
5143 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
5144 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
5146 bool constant, read_only, side_effects;
5147 tree t;
5149 gcc_assert (TREE_CODE_LENGTH (code) == 5);
5151 t = make_node (code PASS_MEM_STAT);
5152 TREE_TYPE (t) = tt;
5154 side_effects = TREE_SIDE_EFFECTS (t);
5156 PROCESS_ARG (0);
5157 PROCESS_ARG (1);
5158 PROCESS_ARG (2);
5159 PROCESS_ARG (3);
5160 PROCESS_ARG (4);
5162 TREE_SIDE_EFFECTS (t) = side_effects;
5163 if (code == TARGET_MEM_REF)
5165 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5167 tree o = TREE_OPERAND (arg0, 0);
5168 TREE_READONLY (t) = TREE_READONLY (o);
5169 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5172 else
5173 TREE_THIS_VOLATILE (t)
5174 = (TREE_CODE_CLASS (code) == tcc_reference
5175 && arg0 && TREE_THIS_VOLATILE (arg0));
5177 return t;
5180 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
5181 on the pointer PTR. */
5183 tree
5184 build_simple_mem_ref_loc (location_t loc, tree ptr)
5186 poly_int64 offset = 0;
5187 tree ptype = TREE_TYPE (ptr);
5188 tree tem;
5189 /* For convenience allow addresses that collapse to a simple base
5190 and offset. */
5191 if (TREE_CODE (ptr) == ADDR_EXPR
5192 && (handled_component_p (TREE_OPERAND (ptr, 0))
5193 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
5195 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
5196 gcc_assert (ptr);
5197 if (TREE_CODE (ptr) == MEM_REF)
5199 offset += mem_ref_offset (ptr).force_shwi ();
5200 ptr = TREE_OPERAND (ptr, 0);
5202 else
5203 ptr = build_fold_addr_expr (ptr);
5204 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
5206 tem = build2 (MEM_REF, TREE_TYPE (ptype),
5207 ptr, build_int_cst (ptype, offset));
5208 SET_EXPR_LOCATION (tem, loc);
5209 return tem;
5212 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
5214 poly_offset_int
5215 mem_ref_offset (const_tree t)
5217 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
5218 SIGNED);
5221 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
5222 offsetted by OFFSET units. */
5224 tree
5225 build_invariant_address (tree type, tree base, poly_int64 offset)
5227 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
5228 build_fold_addr_expr (base),
5229 build_int_cst (ptr_type_node, offset));
5230 tree addr = build1 (ADDR_EXPR, type, ref);
5231 recompute_tree_invariant_for_addr_expr (addr);
5232 return addr;
5235 /* Similar except don't specify the TREE_TYPE
5236 and leave the TREE_SIDE_EFFECTS as 0.
5237 It is permissible for arguments to be null,
5238 or even garbage if their values do not matter. */
5240 tree
5241 build_nt (enum tree_code code, ...)
5243 tree t;
5244 int length;
5245 int i;
5246 va_list p;
5248 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5250 va_start (p, code);
5252 t = make_node (code);
5253 length = TREE_CODE_LENGTH (code);
5255 for (i = 0; i < length; i++)
5256 TREE_OPERAND (t, i) = va_arg (p, tree);
5258 va_end (p);
5259 return t;
5262 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5263 tree vec. */
5265 tree
5266 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
5268 tree ret, t;
5269 unsigned int ix;
5271 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
5272 CALL_EXPR_FN (ret) = fn;
5273 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
5274 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
5275 CALL_EXPR_ARG (ret, ix) = t;
5276 return ret;
5279 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5280 and data type TYPE.
5281 We do NOT enter this node in any sort of symbol table.
5283 LOC is the location of the decl.
5285 layout_decl is used to set up the decl's storage layout.
5286 Other slots are initialized to 0 or null pointers. */
5288 tree
5289 build_decl (location_t loc, enum tree_code code, tree name,
5290 tree type MEM_STAT_DECL)
5292 tree t;
5294 t = make_node (code PASS_MEM_STAT);
5295 DECL_SOURCE_LOCATION (t) = loc;
5297 /* if (type == error_mark_node)
5298 type = integer_type_node; */
5299 /* That is not done, deliberately, so that having error_mark_node
5300 as the type can suppress useless errors in the use of this variable. */
5302 DECL_NAME (t) = name;
5303 TREE_TYPE (t) = type;
5305 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
5306 layout_decl (t, 0);
5308 return t;
5311 /* Create and return a DEBUG_EXPR_DECL node of the given TYPE. */
5313 tree
5314 build_debug_expr_decl (tree type)
5316 tree vexpr = make_node (DEBUG_EXPR_DECL);
5317 DECL_ARTIFICIAL (vexpr) = 1;
5318 TREE_TYPE (vexpr) = type;
5319 SET_DECL_MODE (vexpr, TYPE_MODE (type));
5320 return vexpr;
5323 /* Builds and returns function declaration with NAME and TYPE. */
5325 tree
5326 build_fn_decl (const char *name, tree type)
5328 tree id = get_identifier (name);
5329 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
5331 DECL_EXTERNAL (decl) = 1;
5332 TREE_PUBLIC (decl) = 1;
5333 DECL_ARTIFICIAL (decl) = 1;
5334 TREE_NOTHROW (decl) = 1;
5336 return decl;
5339 vec<tree, va_gc> *all_translation_units;
5341 /* Builds a new translation-unit decl with name NAME, queues it in the
5342 global list of translation-unit decls and returns it. */
5344 tree
5345 build_translation_unit_decl (tree name)
5347 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5348 name, NULL_TREE);
5349 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5350 vec_safe_push (all_translation_units, tu);
5351 return tu;
5355 /* BLOCK nodes are used to represent the structure of binding contours
5356 and declarations, once those contours have been exited and their contents
5357 compiled. This information is used for outputting debugging info. */
5359 tree
5360 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5362 tree block = make_node (BLOCK);
5364 BLOCK_VARS (block) = vars;
5365 BLOCK_SUBBLOCKS (block) = subblocks;
5366 BLOCK_SUPERCONTEXT (block) = supercontext;
5367 BLOCK_CHAIN (block) = chain;
5368 return block;
5372 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5374 LOC is the location to use in tree T. */
5376 void
5377 protected_set_expr_location (tree t, location_t loc)
5379 if (CAN_HAVE_LOCATION_P (t))
5380 SET_EXPR_LOCATION (t, loc);
5381 else if (t && TREE_CODE (t) == STATEMENT_LIST)
5383 t = expr_single (t);
5384 if (t && CAN_HAVE_LOCATION_P (t))
5385 SET_EXPR_LOCATION (t, loc);
5389 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
5390 UNKNOWN_LOCATION. */
5392 void
5393 protected_set_expr_location_if_unset (tree t, location_t loc)
5395 t = expr_single (t);
5396 if (t && !EXPR_HAS_LOCATION (t))
5397 protected_set_expr_location (t, loc);
5400 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5401 of the various TYPE_QUAL values. */
5403 static void
5404 set_type_quals (tree type, int type_quals)
5406 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
5407 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
5408 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
5409 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
5410 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
5413 /* Returns true iff CAND and BASE have equivalent language-specific
5414 qualifiers. */
5416 bool
5417 check_lang_type (const_tree cand, const_tree base)
5419 if (lang_hooks.types.type_hash_eq == NULL)
5420 return true;
5421 /* type_hash_eq currently only applies to these types. */
5422 if (TREE_CODE (cand) != FUNCTION_TYPE
5423 && TREE_CODE (cand) != METHOD_TYPE)
5424 return true;
5425 return lang_hooks.types.type_hash_eq (cand, base);
5428 /* This function checks to see if TYPE matches the size one of the built-in
5429 atomic types, and returns that core atomic type. */
5431 static tree
5432 find_atomic_core_type (const_tree type)
5434 tree base_atomic_type;
5436 /* Only handle complete types. */
5437 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
5438 return NULL_TREE;
5440 switch (tree_to_uhwi (TYPE_SIZE (type)))
5442 case 8:
5443 base_atomic_type = atomicQI_type_node;
5444 break;
5446 case 16:
5447 base_atomic_type = atomicHI_type_node;
5448 break;
5450 case 32:
5451 base_atomic_type = atomicSI_type_node;
5452 break;
5454 case 64:
5455 base_atomic_type = atomicDI_type_node;
5456 break;
5458 case 128:
5459 base_atomic_type = atomicTI_type_node;
5460 break;
5462 default:
5463 base_atomic_type = NULL_TREE;
5466 return base_atomic_type;
5469 /* Returns true iff unqualified CAND and BASE are equivalent. */
5471 bool
5472 check_base_type (const_tree cand, const_tree base)
5474 if (TYPE_NAME (cand) != TYPE_NAME (base)
5475 /* Apparently this is needed for Objective-C. */
5476 || TYPE_CONTEXT (cand) != TYPE_CONTEXT (base)
5477 || !attribute_list_equal (TYPE_ATTRIBUTES (cand),
5478 TYPE_ATTRIBUTES (base)))
5479 return false;
5480 /* Check alignment. */
5481 if (TYPE_ALIGN (cand) == TYPE_ALIGN (base)
5482 && TYPE_USER_ALIGN (cand) == TYPE_USER_ALIGN (base))
5483 return true;
5484 /* Atomic types increase minimal alignment. We must to do so as well
5485 or we get duplicated canonical types. See PR88686. */
5486 if ((TYPE_QUALS (cand) & TYPE_QUAL_ATOMIC))
5488 /* See if this object can map to a basic atomic type. */
5489 tree atomic_type = find_atomic_core_type (cand);
5490 if (atomic_type && TYPE_ALIGN (atomic_type) == TYPE_ALIGN (cand))
5491 return true;
5493 return false;
5496 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5498 bool
5499 check_qualified_type (const_tree cand, const_tree base, int type_quals)
5501 return (TYPE_QUALS (cand) == type_quals
5502 && check_base_type (cand, base)
5503 && check_lang_type (cand, base));
5506 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
5508 static bool
5509 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
5511 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
5512 && TYPE_NAME (cand) == TYPE_NAME (base)
5513 /* Apparently this is needed for Objective-C. */
5514 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5515 /* Check alignment. */
5516 && TYPE_ALIGN (cand) == align
5517 /* Check this is a user-aligned type as build_aligned_type
5518 would create. */
5519 && TYPE_USER_ALIGN (cand)
5520 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5521 TYPE_ATTRIBUTES (base))
5522 && check_lang_type (cand, base));
5525 /* Return a version of the TYPE, qualified as indicated by the
5526 TYPE_QUALS, if one exists. If no qualified version exists yet,
5527 return NULL_TREE. */
5529 tree
5530 get_qualified_type (tree type, int type_quals)
5532 if (TYPE_QUALS (type) == type_quals)
5533 return type;
5535 tree mv = TYPE_MAIN_VARIANT (type);
5536 if (check_qualified_type (mv, type, type_quals))
5537 return mv;
5539 /* Search the chain of variants to see if there is already one there just
5540 like the one we need to have. If so, use that existing one. We must
5541 preserve the TYPE_NAME, since there is code that depends on this. */
5542 for (tree *tp = &TYPE_NEXT_VARIANT (mv); *tp; tp = &TYPE_NEXT_VARIANT (*tp))
5543 if (check_qualified_type (*tp, type, type_quals))
5545 /* Put the found variant at the head of the variant list so
5546 frequently searched variants get found faster. The C++ FE
5547 benefits greatly from this. */
5548 tree t = *tp;
5549 *tp = TYPE_NEXT_VARIANT (t);
5550 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
5551 TYPE_NEXT_VARIANT (mv) = t;
5552 return t;
5555 return NULL_TREE;
5558 /* Like get_qualified_type, but creates the type if it does not
5559 exist. This function never returns NULL_TREE. */
5561 tree
5562 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
5564 tree t;
5566 /* See if we already have the appropriate qualified variant. */
5567 t = get_qualified_type (type, type_quals);
5569 /* If not, build it. */
5570 if (!t)
5572 t = build_variant_type_copy (type PASS_MEM_STAT);
5573 set_type_quals (t, type_quals);
5575 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
5577 /* See if this object can map to a basic atomic type. */
5578 tree atomic_type = find_atomic_core_type (type);
5579 if (atomic_type)
5581 /* Ensure the alignment of this type is compatible with
5582 the required alignment of the atomic type. */
5583 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
5584 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
5588 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5589 /* Propagate structural equality. */
5590 SET_TYPE_STRUCTURAL_EQUALITY (t);
5591 else if (TYPE_CANONICAL (type) != type)
5592 /* Build the underlying canonical type, since it is different
5593 from TYPE. */
5595 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
5596 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
5598 else
5599 /* T is its own canonical type. */
5600 TYPE_CANONICAL (t) = t;
5604 return t;
5607 /* Create a variant of type T with alignment ALIGN. */
5609 tree
5610 build_aligned_type (tree type, unsigned int align)
5612 tree t;
5614 if (TYPE_PACKED (type)
5615 || TYPE_ALIGN (type) == align)
5616 return type;
5618 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5619 if (check_aligned_type (t, type, align))
5620 return t;
5622 t = build_variant_type_copy (type);
5623 SET_TYPE_ALIGN (t, align);
5624 TYPE_USER_ALIGN (t) = 1;
5626 return t;
5629 /* Create a new distinct copy of TYPE. The new type is made its own
5630 MAIN_VARIANT. If TYPE requires structural equality checks, the
5631 resulting type requires structural equality checks; otherwise, its
5632 TYPE_CANONICAL points to itself. */
5634 tree
5635 build_distinct_type_copy (tree type MEM_STAT_DECL)
5637 tree t = copy_node (type PASS_MEM_STAT);
5639 TYPE_POINTER_TO (t) = 0;
5640 TYPE_REFERENCE_TO (t) = 0;
5642 /* Set the canonical type either to a new equivalence class, or
5643 propagate the need for structural equality checks. */
5644 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5645 SET_TYPE_STRUCTURAL_EQUALITY (t);
5646 else
5647 TYPE_CANONICAL (t) = t;
5649 /* Make it its own variant. */
5650 TYPE_MAIN_VARIANT (t) = t;
5651 TYPE_NEXT_VARIANT (t) = 0;
5653 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
5654 whose TREE_TYPE is not t. This can also happen in the Ada
5655 frontend when using subtypes. */
5657 return t;
5660 /* Create a new variant of TYPE, equivalent but distinct. This is so
5661 the caller can modify it. TYPE_CANONICAL for the return type will
5662 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
5663 are considered equal by the language itself (or that both types
5664 require structural equality checks). */
5666 tree
5667 build_variant_type_copy (tree type MEM_STAT_DECL)
5669 tree t, m = TYPE_MAIN_VARIANT (type);
5671 t = build_distinct_type_copy (type PASS_MEM_STAT);
5673 /* Since we're building a variant, assume that it is a non-semantic
5674 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
5675 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
5676 /* Type variants have no alias set defined. */
5677 TYPE_ALIAS_SET (t) = -1;
5679 /* Add the new type to the chain of variants of TYPE. */
5680 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
5681 TYPE_NEXT_VARIANT (m) = t;
5682 TYPE_MAIN_VARIANT (t) = m;
5684 return t;
5687 /* Return true if the from tree in both tree maps are equal. */
5690 tree_map_base_eq (const void *va, const void *vb)
5692 const struct tree_map_base *const a = (const struct tree_map_base *) va,
5693 *const b = (const struct tree_map_base *) vb;
5694 return (a->from == b->from);
5697 /* Hash a from tree in a tree_base_map. */
5699 unsigned int
5700 tree_map_base_hash (const void *item)
5702 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
5705 /* Return true if this tree map structure is marked for garbage collection
5706 purposes. We simply return true if the from tree is marked, so that this
5707 structure goes away when the from tree goes away. */
5710 tree_map_base_marked_p (const void *p)
5712 return ggc_marked_p (((const struct tree_map_base *) p)->from);
5715 /* Hash a from tree in a tree_map. */
5717 unsigned int
5718 tree_map_hash (const void *item)
5720 return (((const struct tree_map *) item)->hash);
5723 /* Hash a from tree in a tree_decl_map. */
5725 unsigned int
5726 tree_decl_map_hash (const void *item)
5728 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
5731 /* Return the initialization priority for DECL. */
5733 priority_type
5734 decl_init_priority_lookup (tree decl)
5736 symtab_node *snode = symtab_node::get (decl);
5738 if (!snode)
5739 return DEFAULT_INIT_PRIORITY;
5740 return
5741 snode->get_init_priority ();
5744 /* Return the finalization priority for DECL. */
5746 priority_type
5747 decl_fini_priority_lookup (tree decl)
5749 cgraph_node *node = cgraph_node::get (decl);
5751 if (!node)
5752 return DEFAULT_INIT_PRIORITY;
5753 return
5754 node->get_fini_priority ();
5757 /* Set the initialization priority for DECL to PRIORITY. */
5759 void
5760 decl_init_priority_insert (tree decl, priority_type priority)
5762 struct symtab_node *snode;
5764 if (priority == DEFAULT_INIT_PRIORITY)
5766 snode = symtab_node::get (decl);
5767 if (!snode)
5768 return;
5770 else if (VAR_P (decl))
5771 snode = varpool_node::get_create (decl);
5772 else
5773 snode = cgraph_node::get_create (decl);
5774 snode->set_init_priority (priority);
5777 /* Set the finalization priority for DECL to PRIORITY. */
5779 void
5780 decl_fini_priority_insert (tree decl, priority_type priority)
5782 struct cgraph_node *node;
5784 if (priority == DEFAULT_INIT_PRIORITY)
5786 node = cgraph_node::get (decl);
5787 if (!node)
5788 return;
5790 else
5791 node = cgraph_node::get_create (decl);
5792 node->set_fini_priority (priority);
5795 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
5797 static void
5798 print_debug_expr_statistics (void)
5800 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
5801 (long) debug_expr_for_decl->size (),
5802 (long) debug_expr_for_decl->elements (),
5803 debug_expr_for_decl->collisions ());
5806 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
5808 static void
5809 print_value_expr_statistics (void)
5811 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
5812 (long) value_expr_for_decl->size (),
5813 (long) value_expr_for_decl->elements (),
5814 value_expr_for_decl->collisions ());
5817 /* Lookup a debug expression for FROM, and return it if we find one. */
5819 tree
5820 decl_debug_expr_lookup (tree from)
5822 struct tree_decl_map *h, in;
5823 in.base.from = from;
5825 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5826 if (h)
5827 return h->to;
5828 return NULL_TREE;
5831 /* Insert a mapping FROM->TO in the debug expression hashtable. */
5833 void
5834 decl_debug_expr_insert (tree from, tree to)
5836 struct tree_decl_map *h;
5838 h = ggc_alloc<tree_decl_map> ();
5839 h->base.from = from;
5840 h->to = to;
5841 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5844 /* Lookup a value expression for FROM, and return it if we find one. */
5846 tree
5847 decl_value_expr_lookup (tree from)
5849 struct tree_decl_map *h, in;
5850 in.base.from = from;
5852 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5853 if (h)
5854 return h->to;
5855 return NULL_TREE;
5858 /* Insert a mapping FROM->TO in the value expression hashtable. */
5860 void
5861 decl_value_expr_insert (tree from, tree to)
5863 struct tree_decl_map *h;
5865 h = ggc_alloc<tree_decl_map> ();
5866 h->base.from = from;
5867 h->to = to;
5868 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5871 /* Lookup a vector of debug arguments for FROM, and return it if we
5872 find one. */
5874 vec<tree, va_gc> **
5875 decl_debug_args_lookup (tree from)
5877 struct tree_vec_map *h, in;
5879 if (!DECL_HAS_DEBUG_ARGS_P (from))
5880 return NULL;
5881 gcc_checking_assert (debug_args_for_decl != NULL);
5882 in.base.from = from;
5883 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
5884 if (h)
5885 return &h->to;
5886 return NULL;
5889 /* Insert a mapping FROM->empty vector of debug arguments in the value
5890 expression hashtable. */
5892 vec<tree, va_gc> **
5893 decl_debug_args_insert (tree from)
5895 struct tree_vec_map *h;
5896 tree_vec_map **loc;
5898 if (DECL_HAS_DEBUG_ARGS_P (from))
5899 return decl_debug_args_lookup (from);
5900 if (debug_args_for_decl == NULL)
5901 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
5902 h = ggc_alloc<tree_vec_map> ();
5903 h->base.from = from;
5904 h->to = NULL;
5905 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
5906 *loc = h;
5907 DECL_HAS_DEBUG_ARGS_P (from) = 1;
5908 return &h->to;
5911 /* Hashing of types so that we don't make duplicates.
5912 The entry point is `type_hash_canon'. */
5914 /* Generate the default hash code for TYPE. This is designed for
5915 speed, rather than maximum entropy. */
5917 hashval_t
5918 type_hash_canon_hash (tree type)
5920 inchash::hash hstate;
5922 hstate.add_int (TREE_CODE (type));
5924 if (TREE_TYPE (type))
5925 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
5927 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
5928 /* Just the identifier is adequate to distinguish. */
5929 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
5931 switch (TREE_CODE (type))
5933 case METHOD_TYPE:
5934 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
5935 /* FALLTHROUGH. */
5936 case FUNCTION_TYPE:
5937 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
5938 if (TREE_VALUE (t) != error_mark_node)
5939 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
5940 break;
5942 case OFFSET_TYPE:
5943 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
5944 break;
5946 case ARRAY_TYPE:
5948 if (TYPE_DOMAIN (type))
5949 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
5950 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
5952 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
5953 hstate.add_object (typeless);
5956 break;
5958 case INTEGER_TYPE:
5960 tree t = TYPE_MAX_VALUE (type);
5961 if (!t)
5962 t = TYPE_MIN_VALUE (type);
5963 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
5964 hstate.add_object (TREE_INT_CST_ELT (t, i));
5965 break;
5968 case REAL_TYPE:
5969 case FIXED_POINT_TYPE:
5971 unsigned prec = TYPE_PRECISION (type);
5972 hstate.add_object (prec);
5973 break;
5976 case VECTOR_TYPE:
5977 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
5978 break;
5980 default:
5981 break;
5984 return hstate.end ();
5987 /* These are the Hashtable callback functions. */
5989 /* Returns true iff the types are equivalent. */
5991 bool
5992 type_cache_hasher::equal (type_hash *a, type_hash *b)
5994 /* First test the things that are the same for all types. */
5995 if (a->hash != b->hash
5996 || TREE_CODE (a->type) != TREE_CODE (b->type)
5997 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
5998 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
5999 TYPE_ATTRIBUTES (b->type))
6000 || (TREE_CODE (a->type) != COMPLEX_TYPE
6001 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6002 return 0;
6004 /* Be careful about comparing arrays before and after the element type
6005 has been completed; don't compare TYPE_ALIGN unless both types are
6006 complete. */
6007 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6008 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6009 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6010 return 0;
6012 switch (TREE_CODE (a->type))
6014 case VOID_TYPE:
6015 case OPAQUE_TYPE:
6016 case COMPLEX_TYPE:
6017 case POINTER_TYPE:
6018 case REFERENCE_TYPE:
6019 case NULLPTR_TYPE:
6020 return 1;
6022 case VECTOR_TYPE:
6023 return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
6024 TYPE_VECTOR_SUBPARTS (b->type));
6026 case ENUMERAL_TYPE:
6027 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6028 && !(TYPE_VALUES (a->type)
6029 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6030 && TYPE_VALUES (b->type)
6031 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6032 && type_list_equal (TYPE_VALUES (a->type),
6033 TYPE_VALUES (b->type))))
6034 return 0;
6036 /* fall through */
6038 case INTEGER_TYPE:
6039 case REAL_TYPE:
6040 case BOOLEAN_TYPE:
6041 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6042 return false;
6043 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6044 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6045 TYPE_MAX_VALUE (b->type)))
6046 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6047 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6048 TYPE_MIN_VALUE (b->type))));
6050 case FIXED_POINT_TYPE:
6051 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6053 case OFFSET_TYPE:
6054 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6056 case METHOD_TYPE:
6057 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6058 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6059 || (TYPE_ARG_TYPES (a->type)
6060 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6061 && TYPE_ARG_TYPES (b->type)
6062 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6063 && type_list_equal (TYPE_ARG_TYPES (a->type),
6064 TYPE_ARG_TYPES (b->type)))))
6065 break;
6066 return 0;
6067 case ARRAY_TYPE:
6068 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6069 where the flag should be inherited from the element type
6070 and can change after ARRAY_TYPEs are created; on non-aggregates
6071 compare it and hash it, scalars will never have that flag set
6072 and we need to differentiate between arrays created by different
6073 front-ends or middle-end created arrays. */
6074 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
6075 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
6076 || (TYPE_TYPELESS_STORAGE (a->type)
6077 == TYPE_TYPELESS_STORAGE (b->type))));
6079 case RECORD_TYPE:
6080 case UNION_TYPE:
6081 case QUAL_UNION_TYPE:
6082 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6083 || (TYPE_FIELDS (a->type)
6084 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6085 && TYPE_FIELDS (b->type)
6086 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6087 && type_list_equal (TYPE_FIELDS (a->type),
6088 TYPE_FIELDS (b->type))));
6090 case FUNCTION_TYPE:
6091 if ((TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6092 && (TYPE_NO_NAMED_ARGS_STDARG_P (a->type)
6093 == TYPE_NO_NAMED_ARGS_STDARG_P (b->type)))
6094 || (TYPE_ARG_TYPES (a->type)
6095 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6096 && TYPE_ARG_TYPES (b->type)
6097 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6098 && type_list_equal (TYPE_ARG_TYPES (a->type),
6099 TYPE_ARG_TYPES (b->type))))
6100 break;
6101 return 0;
6103 default:
6104 return 0;
6107 if (lang_hooks.types.type_hash_eq != NULL)
6108 return lang_hooks.types.type_hash_eq (a->type, b->type);
6110 return 1;
6113 /* Given TYPE, and HASHCODE its hash code, return the canonical
6114 object for an identical type if one already exists.
6115 Otherwise, return TYPE, and record it as the canonical object.
6117 To use this function, first create a type of the sort you want.
6118 Then compute its hash code from the fields of the type that
6119 make it different from other similar types.
6120 Then call this function and use the value. */
6122 tree
6123 type_hash_canon (unsigned int hashcode, tree type)
6125 type_hash in;
6126 type_hash **loc;
6128 /* The hash table only contains main variants, so ensure that's what we're
6129 being passed. */
6130 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6132 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6133 must call that routine before comparing TYPE_ALIGNs. */
6134 layout_type (type);
6136 in.hash = hashcode;
6137 in.type = type;
6139 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6140 if (*loc)
6142 tree t1 = ((type_hash *) *loc)->type;
6143 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
6144 && t1 != type);
6145 if (TYPE_UID (type) + 1 == next_type_uid)
6146 --next_type_uid;
6147 /* Free also min/max values and the cache for integer
6148 types. This can't be done in free_node, as LTO frees
6149 those on its own. */
6150 if (TREE_CODE (type) == INTEGER_TYPE)
6152 if (TYPE_MIN_VALUE (type)
6153 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
6155 /* Zero is always in TYPE_CACHED_VALUES. */
6156 if (! TYPE_UNSIGNED (type))
6157 int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
6158 ggc_free (TYPE_MIN_VALUE (type));
6160 if (TYPE_MAX_VALUE (type)
6161 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
6163 int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
6164 ggc_free (TYPE_MAX_VALUE (type));
6166 if (TYPE_CACHED_VALUES_P (type))
6167 ggc_free (TYPE_CACHED_VALUES (type));
6169 free_node (type);
6170 return t1;
6172 else
6174 struct type_hash *h;
6176 h = ggc_alloc<type_hash> ();
6177 h->hash = hashcode;
6178 h->type = type;
6179 *loc = h;
6181 return type;
6185 static void
6186 print_type_hash_statistics (void)
6188 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6189 (long) type_hash_table->size (),
6190 (long) type_hash_table->elements (),
6191 type_hash_table->collisions ());
6194 /* Given two lists of types
6195 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6196 return 1 if the lists contain the same types in the same order.
6197 Also, the TREE_PURPOSEs must match. */
6199 bool
6200 type_list_equal (const_tree l1, const_tree l2)
6202 const_tree t1, t2;
6204 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6205 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6206 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6207 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6208 && (TREE_TYPE (TREE_PURPOSE (t1))
6209 == TREE_TYPE (TREE_PURPOSE (t2))))))
6210 return false;
6212 return t1 == t2;
6215 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6216 given by TYPE. If the argument list accepts variable arguments,
6217 then this function counts only the ordinary arguments. */
6220 type_num_arguments (const_tree fntype)
6222 int i = 0;
6224 for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
6225 /* If the function does not take a variable number of arguments,
6226 the last element in the list will have type `void'. */
6227 if (VOID_TYPE_P (TREE_VALUE (t)))
6228 break;
6229 else
6230 ++i;
6232 return i;
6235 /* Return the type of the function TYPE's argument ARGNO if known.
6236 For vararg function's where ARGNO refers to one of the variadic
6237 arguments return null. Otherwise, return a void_type_node for
6238 out-of-bounds ARGNO. */
6240 tree
6241 type_argument_type (const_tree fntype, unsigned argno)
6243 /* Treat zero the same as an out-of-bounds argument number. */
6244 if (!argno)
6245 return void_type_node;
6247 function_args_iterator iter;
6249 tree argtype;
6250 unsigned i = 1;
6251 FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
6253 /* A vararg function's argument list ends in a null. Otherwise,
6254 an ordinary function's argument list ends with void. Return
6255 null if ARGNO refers to a vararg argument, void_type_node if
6256 it's out of bounds, and the formal argument type otherwise. */
6257 if (!argtype)
6258 break;
6260 if (i == argno || VOID_TYPE_P (argtype))
6261 return argtype;
6263 ++i;
6266 return NULL_TREE;
6269 /* Nonzero if integer constants T1 and T2
6270 represent the same constant value. */
6273 tree_int_cst_equal (const_tree t1, const_tree t2)
6275 if (t1 == t2)
6276 return 1;
6278 if (t1 == 0 || t2 == 0)
6279 return 0;
6281 STRIP_ANY_LOCATION_WRAPPER (t1);
6282 STRIP_ANY_LOCATION_WRAPPER (t2);
6284 if (TREE_CODE (t1) == INTEGER_CST
6285 && TREE_CODE (t2) == INTEGER_CST
6286 && wi::to_widest (t1) == wi::to_widest (t2))
6287 return 1;
6289 return 0;
6292 /* Return true if T is an INTEGER_CST whose numerical value (extended
6293 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6295 bool
6296 tree_fits_shwi_p (const_tree t)
6298 return (t != NULL_TREE
6299 && TREE_CODE (t) == INTEGER_CST
6300 && wi::fits_shwi_p (wi::to_widest (t)));
6303 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6304 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
6306 bool
6307 tree_fits_poly_int64_p (const_tree t)
6309 if (t == NULL_TREE)
6310 return false;
6311 if (POLY_INT_CST_P (t))
6313 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6314 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
6315 return false;
6316 return true;
6318 return (TREE_CODE (t) == INTEGER_CST
6319 && wi::fits_shwi_p (wi::to_widest (t)));
6322 /* Return true if T is an INTEGER_CST whose numerical value (extended
6323 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6325 bool
6326 tree_fits_uhwi_p (const_tree t)
6328 return (t != NULL_TREE
6329 && TREE_CODE (t) == INTEGER_CST
6330 && wi::fits_uhwi_p (wi::to_widest (t)));
6333 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6334 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
6336 bool
6337 tree_fits_poly_uint64_p (const_tree t)
6339 if (t == NULL_TREE)
6340 return false;
6341 if (POLY_INT_CST_P (t))
6343 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6344 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
6345 return false;
6346 return true;
6348 return (TREE_CODE (t) == INTEGER_CST
6349 && wi::fits_uhwi_p (wi::to_widest (t)));
6352 /* T is an INTEGER_CST whose numerical value (extended according to
6353 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6354 HOST_WIDE_INT. */
6356 HOST_WIDE_INT
6357 tree_to_shwi (const_tree t)
6359 gcc_assert (tree_fits_shwi_p (t));
6360 return TREE_INT_CST_LOW (t);
6363 /* T is an INTEGER_CST whose numerical value (extended according to
6364 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6365 HOST_WIDE_INT. */
6367 unsigned HOST_WIDE_INT
6368 tree_to_uhwi (const_tree t)
6370 gcc_assert (tree_fits_uhwi_p (t));
6371 return TREE_INT_CST_LOW (t);
6374 /* Return the most significant (sign) bit of T. */
6377 tree_int_cst_sign_bit (const_tree t)
6379 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
6381 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
6384 /* Return an indication of the sign of the integer constant T.
6385 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6386 Note that -1 will never be returned if T's type is unsigned. */
6389 tree_int_cst_sgn (const_tree t)
6391 if (wi::to_wide (t) == 0)
6392 return 0;
6393 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
6394 return 1;
6395 else if (wi::neg_p (wi::to_wide (t)))
6396 return -1;
6397 else
6398 return 1;
6401 /* Return the minimum number of bits needed to represent VALUE in a
6402 signed or unsigned type, UNSIGNEDP says which. */
6404 unsigned int
6405 tree_int_cst_min_precision (tree value, signop sgn)
6407 /* If the value is negative, compute its negative minus 1. The latter
6408 adjustment is because the absolute value of the largest negative value
6409 is one larger than the largest positive value. This is equivalent to
6410 a bit-wise negation, so use that operation instead. */
6412 if (tree_int_cst_sgn (value) < 0)
6413 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
6415 /* Return the number of bits needed, taking into account the fact
6416 that we need one more bit for a signed than unsigned type.
6417 If value is 0 or -1, the minimum precision is 1 no matter
6418 whether unsignedp is true or false. */
6420 if (integer_zerop (value))
6421 return 1;
6422 else
6423 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
6426 /* Return truthvalue of whether T1 is the same tree structure as T2.
6427 Return 1 if they are the same.
6428 Return 0 if they are understandably different.
6429 Return -1 if either contains tree structure not understood by
6430 this function. */
6433 simple_cst_equal (const_tree t1, const_tree t2)
6435 enum tree_code code1, code2;
6436 int cmp;
6437 int i;
6439 if (t1 == t2)
6440 return 1;
6441 if (t1 == 0 || t2 == 0)
6442 return 0;
6444 /* For location wrappers to be the same, they must be at the same
6445 source location (and wrap the same thing). */
6446 if (location_wrapper_p (t1) && location_wrapper_p (t2))
6448 if (EXPR_LOCATION (t1) != EXPR_LOCATION (t2))
6449 return 0;
6450 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6453 code1 = TREE_CODE (t1);
6454 code2 = TREE_CODE (t2);
6456 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
6458 if (CONVERT_EXPR_CODE_P (code2)
6459 || code2 == NON_LVALUE_EXPR)
6460 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6461 else
6462 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
6465 else if (CONVERT_EXPR_CODE_P (code2)
6466 || code2 == NON_LVALUE_EXPR)
6467 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
6469 if (code1 != code2)
6470 return 0;
6472 switch (code1)
6474 case INTEGER_CST:
6475 return wi::to_widest (t1) == wi::to_widest (t2);
6477 case REAL_CST:
6478 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
6480 case FIXED_CST:
6481 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
6483 case STRING_CST:
6484 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
6485 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
6486 TREE_STRING_LENGTH (t1)));
6488 case CONSTRUCTOR:
6490 unsigned HOST_WIDE_INT idx;
6491 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
6492 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
6494 if (vec_safe_length (v1) != vec_safe_length (v2))
6495 return false;
6497 for (idx = 0; idx < vec_safe_length (v1); ++idx)
6498 /* ??? Should we handle also fields here? */
6499 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
6500 return false;
6501 return true;
6504 case SAVE_EXPR:
6505 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6507 case CALL_EXPR:
6508 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
6509 if (cmp <= 0)
6510 return cmp;
6511 if (call_expr_nargs (t1) != call_expr_nargs (t2))
6512 return 0;
6514 const_tree arg1, arg2;
6515 const_call_expr_arg_iterator iter1, iter2;
6516 for (arg1 = first_const_call_expr_arg (t1, &iter1),
6517 arg2 = first_const_call_expr_arg (t2, &iter2);
6518 arg1 && arg2;
6519 arg1 = next_const_call_expr_arg (&iter1),
6520 arg2 = next_const_call_expr_arg (&iter2))
6522 cmp = simple_cst_equal (arg1, arg2);
6523 if (cmp <= 0)
6524 return cmp;
6526 return arg1 == arg2;
6529 case TARGET_EXPR:
6530 /* Special case: if either target is an unallocated VAR_DECL,
6531 it means that it's going to be unified with whatever the
6532 TARGET_EXPR is really supposed to initialize, so treat it
6533 as being equivalent to anything. */
6534 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
6535 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
6536 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
6537 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
6538 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
6539 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
6540 cmp = 1;
6541 else
6542 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6544 if (cmp <= 0)
6545 return cmp;
6547 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
6549 case WITH_CLEANUP_EXPR:
6550 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6551 if (cmp <= 0)
6552 return cmp;
6554 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
6556 case COMPONENT_REF:
6557 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
6558 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6560 return 0;
6562 case VAR_DECL:
6563 case PARM_DECL:
6564 case CONST_DECL:
6565 case FUNCTION_DECL:
6566 return 0;
6568 default:
6569 if (POLY_INT_CST_P (t1))
6570 /* A false return means maybe_ne rather than known_ne. */
6571 return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
6572 TYPE_SIGN (TREE_TYPE (t1))),
6573 poly_widest_int::from (poly_int_cst_value (t2),
6574 TYPE_SIGN (TREE_TYPE (t2))));
6575 break;
6578 /* This general rule works for most tree codes. All exceptions should be
6579 handled above. If this is a language-specific tree code, we can't
6580 trust what might be in the operand, so say we don't know
6581 the situation. */
6582 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
6583 return -1;
6585 switch (TREE_CODE_CLASS (code1))
6587 case tcc_unary:
6588 case tcc_binary:
6589 case tcc_comparison:
6590 case tcc_expression:
6591 case tcc_reference:
6592 case tcc_statement:
6593 cmp = 1;
6594 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
6596 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
6597 if (cmp <= 0)
6598 return cmp;
6601 return cmp;
6603 default:
6604 return -1;
6608 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
6609 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
6610 than U, respectively. */
6613 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
6615 if (tree_int_cst_sgn (t) < 0)
6616 return -1;
6617 else if (!tree_fits_uhwi_p (t))
6618 return 1;
6619 else if (TREE_INT_CST_LOW (t) == u)
6620 return 0;
6621 else if (TREE_INT_CST_LOW (t) < u)
6622 return -1;
6623 else
6624 return 1;
6627 /* Return true if SIZE represents a constant size that is in bounds of
6628 what the middle-end and the backend accepts (covering not more than
6629 half of the address-space).
6630 When PERR is non-null, set *PERR on failure to the description of
6631 why SIZE is not valid. */
6633 bool
6634 valid_constant_size_p (const_tree size, cst_size_error *perr /* = NULL */)
6636 if (POLY_INT_CST_P (size))
6638 if (TREE_OVERFLOW (size))
6639 return false;
6640 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
6641 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
6642 return false;
6643 return true;
6646 cst_size_error error;
6647 if (!perr)
6648 perr = &error;
6650 if (TREE_CODE (size) != INTEGER_CST)
6652 *perr = cst_size_not_constant;
6653 return false;
6656 if (TREE_OVERFLOW_P (size))
6658 *perr = cst_size_overflow;
6659 return false;
6662 if (tree_int_cst_sgn (size) < 0)
6664 *perr = cst_size_negative;
6665 return false;
6667 if (!tree_fits_uhwi_p (size)
6668 || (wi::to_widest (TYPE_MAX_VALUE (sizetype))
6669 < wi::to_widest (size) * 2))
6671 *perr = cst_size_too_big;
6672 return false;
6675 return true;
6678 /* Return the precision of the type, or for a complex or vector type the
6679 precision of the type of its elements. */
6681 unsigned int
6682 element_precision (const_tree type)
6684 if (!TYPE_P (type))
6685 type = TREE_TYPE (type);
6686 enum tree_code code = TREE_CODE (type);
6687 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
6688 type = TREE_TYPE (type);
6690 return TYPE_PRECISION (type);
6693 /* Return true if CODE represents an associative tree code. Otherwise
6694 return false. */
6695 bool
6696 associative_tree_code (enum tree_code code)
6698 switch (code)
6700 case BIT_IOR_EXPR:
6701 case BIT_AND_EXPR:
6702 case BIT_XOR_EXPR:
6703 case PLUS_EXPR:
6704 case MULT_EXPR:
6705 case MIN_EXPR:
6706 case MAX_EXPR:
6707 return true;
6709 default:
6710 break;
6712 return false;
6715 /* Return true if CODE represents a commutative tree code. Otherwise
6716 return false. */
6717 bool
6718 commutative_tree_code (enum tree_code code)
6720 switch (code)
6722 case PLUS_EXPR:
6723 case MULT_EXPR:
6724 case MULT_HIGHPART_EXPR:
6725 case MIN_EXPR:
6726 case MAX_EXPR:
6727 case BIT_IOR_EXPR:
6728 case BIT_XOR_EXPR:
6729 case BIT_AND_EXPR:
6730 case NE_EXPR:
6731 case EQ_EXPR:
6732 case UNORDERED_EXPR:
6733 case ORDERED_EXPR:
6734 case UNEQ_EXPR:
6735 case LTGT_EXPR:
6736 case TRUTH_AND_EXPR:
6737 case TRUTH_XOR_EXPR:
6738 case TRUTH_OR_EXPR:
6739 case WIDEN_MULT_EXPR:
6740 case VEC_WIDEN_MULT_HI_EXPR:
6741 case VEC_WIDEN_MULT_LO_EXPR:
6742 case VEC_WIDEN_MULT_EVEN_EXPR:
6743 case VEC_WIDEN_MULT_ODD_EXPR:
6744 return true;
6746 default:
6747 break;
6749 return false;
6752 /* Return true if CODE represents a ternary tree code for which the
6753 first two operands are commutative. Otherwise return false. */
6754 bool
6755 commutative_ternary_tree_code (enum tree_code code)
6757 switch (code)
6759 case WIDEN_MULT_PLUS_EXPR:
6760 case WIDEN_MULT_MINUS_EXPR:
6761 case DOT_PROD_EXPR:
6762 return true;
6764 default:
6765 break;
6767 return false;
6770 /* Returns true if CODE can overflow. */
6772 bool
6773 operation_can_overflow (enum tree_code code)
6775 switch (code)
6777 case PLUS_EXPR:
6778 case MINUS_EXPR:
6779 case MULT_EXPR:
6780 case LSHIFT_EXPR:
6781 /* Can overflow in various ways. */
6782 return true;
6783 case TRUNC_DIV_EXPR:
6784 case EXACT_DIV_EXPR:
6785 case FLOOR_DIV_EXPR:
6786 case CEIL_DIV_EXPR:
6787 /* For INT_MIN / -1. */
6788 return true;
6789 case NEGATE_EXPR:
6790 case ABS_EXPR:
6791 /* For -INT_MIN. */
6792 return true;
6793 default:
6794 /* These operators cannot overflow. */
6795 return false;
6799 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
6800 ftrapv doesn't generate trapping insns for CODE. */
6802 bool
6803 operation_no_trapping_overflow (tree type, enum tree_code code)
6805 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
6807 /* We don't generate instructions that trap on overflow for complex or vector
6808 types. */
6809 if (!INTEGRAL_TYPE_P (type))
6810 return true;
6812 if (!TYPE_OVERFLOW_TRAPS (type))
6813 return true;
6815 switch (code)
6817 case PLUS_EXPR:
6818 case MINUS_EXPR:
6819 case MULT_EXPR:
6820 case NEGATE_EXPR:
6821 case ABS_EXPR:
6822 /* These operators can overflow, and -ftrapv generates trapping code for
6823 these. */
6824 return false;
6825 case TRUNC_DIV_EXPR:
6826 case EXACT_DIV_EXPR:
6827 case FLOOR_DIV_EXPR:
6828 case CEIL_DIV_EXPR:
6829 case LSHIFT_EXPR:
6830 /* These operators can overflow, but -ftrapv does not generate trapping
6831 code for these. */
6832 return true;
6833 default:
6834 /* These operators cannot overflow. */
6835 return true;
6839 /* Constructors for pointer, array and function types.
6840 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
6841 constructed by language-dependent code, not here.) */
6843 /* Construct, lay out and return the type of pointers to TO_TYPE with
6844 mode MODE. If MODE is VOIDmode, a pointer mode for the address
6845 space of TO_TYPE will be picked. If CAN_ALIAS_ALL is TRUE,
6846 indicate this type can reference all of memory. If such a type has
6847 already been constructed, reuse it. */
6849 tree
6850 build_pointer_type_for_mode (tree to_type, machine_mode mode,
6851 bool can_alias_all)
6853 tree t;
6854 bool could_alias = can_alias_all;
6856 if (to_type == error_mark_node)
6857 return error_mark_node;
6859 if (mode == VOIDmode)
6861 addr_space_t as = TYPE_ADDR_SPACE (to_type);
6862 mode = targetm.addr_space.pointer_mode (as);
6865 /* If the pointed-to type has the may_alias attribute set, force
6866 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6867 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
6868 can_alias_all = true;
6870 /* In some cases, languages will have things that aren't a POINTER_TYPE
6871 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
6872 In that case, return that type without regard to the rest of our
6873 operands.
6875 ??? This is a kludge, but consistent with the way this function has
6876 always operated and there doesn't seem to be a good way to avoid this
6877 at the moment. */
6878 if (TYPE_POINTER_TO (to_type) != 0
6879 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
6880 return TYPE_POINTER_TO (to_type);
6882 /* First, if we already have a type for pointers to TO_TYPE and it's
6883 the proper mode, use it. */
6884 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
6885 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
6886 return t;
6888 t = make_node (POINTER_TYPE);
6890 TREE_TYPE (t) = to_type;
6891 SET_TYPE_MODE (t, mode);
6892 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
6893 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
6894 TYPE_POINTER_TO (to_type) = t;
6896 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6897 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
6898 SET_TYPE_STRUCTURAL_EQUALITY (t);
6899 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
6900 TYPE_CANONICAL (t)
6901 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
6902 mode, false);
6904 /* Lay out the type. This function has many callers that are concerned
6905 with expression-construction, and this simplifies them all. */
6906 layout_type (t);
6908 return t;
6911 /* By default build pointers in ptr_mode. */
6913 tree
6914 build_pointer_type (tree to_type)
6916 return build_pointer_type_for_mode (to_type, VOIDmode, false);
6919 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
6921 tree
6922 build_reference_type_for_mode (tree to_type, machine_mode mode,
6923 bool can_alias_all)
6925 tree t;
6926 bool could_alias = can_alias_all;
6928 if (to_type == error_mark_node)
6929 return error_mark_node;
6931 if (mode == VOIDmode)
6933 addr_space_t as = TYPE_ADDR_SPACE (to_type);
6934 mode = targetm.addr_space.pointer_mode (as);
6937 /* If the pointed-to type has the may_alias attribute set, force
6938 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6939 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
6940 can_alias_all = true;
6942 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
6943 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
6944 In that case, return that type without regard to the rest of our
6945 operands.
6947 ??? This is a kludge, but consistent with the way this function has
6948 always operated and there doesn't seem to be a good way to avoid this
6949 at the moment. */
6950 if (TYPE_REFERENCE_TO (to_type) != 0
6951 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
6952 return TYPE_REFERENCE_TO (to_type);
6954 /* First, if we already have a type for pointers to TO_TYPE and it's
6955 the proper mode, use it. */
6956 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
6957 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
6958 return t;
6960 t = make_node (REFERENCE_TYPE);
6962 TREE_TYPE (t) = to_type;
6963 SET_TYPE_MODE (t, mode);
6964 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
6965 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
6966 TYPE_REFERENCE_TO (to_type) = t;
6968 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6969 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
6970 SET_TYPE_STRUCTURAL_EQUALITY (t);
6971 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
6972 TYPE_CANONICAL (t)
6973 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
6974 mode, false);
6976 layout_type (t);
6978 return t;
6982 /* Build the node for the type of references-to-TO_TYPE by default
6983 in ptr_mode. */
6985 tree
6986 build_reference_type (tree to_type)
6988 return build_reference_type_for_mode (to_type, VOIDmode, false);
6991 #define MAX_INT_CACHED_PREC \
6992 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
6993 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
6995 static void
6996 clear_nonstandard_integer_type_cache (void)
6998 for (size_t i = 0 ; i < 2 * MAX_INT_CACHED_PREC + 2 ; i++)
7000 nonstandard_integer_type_cache[i] = NULL;
7004 /* Builds a signed or unsigned integer type of precision PRECISION.
7005 Used for C bitfields whose precision does not match that of
7006 built-in target types. */
7007 tree
7008 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7009 int unsignedp)
7011 tree itype, ret;
7013 if (unsignedp)
7014 unsignedp = MAX_INT_CACHED_PREC + 1;
7016 if (precision <= MAX_INT_CACHED_PREC)
7018 itype = nonstandard_integer_type_cache[precision + unsignedp];
7019 if (itype)
7020 return itype;
7023 itype = make_node (INTEGER_TYPE);
7024 TYPE_PRECISION (itype) = precision;
7026 if (unsignedp)
7027 fixup_unsigned_type (itype);
7028 else
7029 fixup_signed_type (itype);
7031 inchash::hash hstate;
7032 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7033 ret = type_hash_canon (hstate.end (), itype);
7034 if (precision <= MAX_INT_CACHED_PREC)
7035 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7037 return ret;
7040 #define MAX_BOOL_CACHED_PREC \
7041 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7042 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
7044 /* Builds a boolean type of precision PRECISION.
7045 Used for boolean vectors to choose proper vector element size. */
7046 tree
7047 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
7049 tree type;
7051 if (precision <= MAX_BOOL_CACHED_PREC)
7053 type = nonstandard_boolean_type_cache[precision];
7054 if (type)
7055 return type;
7058 type = make_node (BOOLEAN_TYPE);
7059 TYPE_PRECISION (type) = precision;
7060 fixup_signed_type (type);
7062 if (precision <= MAX_INT_CACHED_PREC)
7063 nonstandard_boolean_type_cache[precision] = type;
7065 return type;
7068 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7069 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7070 is true, reuse such a type that has already been constructed. */
7072 static tree
7073 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7075 tree itype = make_node (INTEGER_TYPE);
7077 TREE_TYPE (itype) = type;
7079 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7080 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7082 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7083 SET_TYPE_MODE (itype, TYPE_MODE (type));
7084 TYPE_SIZE (itype) = TYPE_SIZE (type);
7085 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7086 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
7087 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7088 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
7090 if (!shared)
7091 return itype;
7093 if ((TYPE_MIN_VALUE (itype)
7094 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7095 || (TYPE_MAX_VALUE (itype)
7096 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7098 /* Since we cannot reliably merge this type, we need to compare it using
7099 structural equality checks. */
7100 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7101 return itype;
7104 hashval_t hash = type_hash_canon_hash (itype);
7105 itype = type_hash_canon (hash, itype);
7107 return itype;
7110 /* Wrapper around build_range_type_1 with SHARED set to true. */
7112 tree
7113 build_range_type (tree type, tree lowval, tree highval)
7115 return build_range_type_1 (type, lowval, highval, true);
7118 /* Wrapper around build_range_type_1 with SHARED set to false. */
7120 tree
7121 build_nonshared_range_type (tree type, tree lowval, tree highval)
7123 return build_range_type_1 (type, lowval, highval, false);
7126 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7127 MAXVAL should be the maximum value in the domain
7128 (one less than the length of the array).
7130 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7131 We don't enforce this limit, that is up to caller (e.g. language front end).
7132 The limit exists because the result is a signed type and we don't handle
7133 sizes that use more than one HOST_WIDE_INT. */
7135 tree
7136 build_index_type (tree maxval)
7138 return build_range_type (sizetype, size_zero_node, maxval);
7141 /* Return true if the debug information for TYPE, a subtype, should be emitted
7142 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7143 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7144 debug info and doesn't reflect the source code. */
7146 bool
7147 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7149 tree base_type = TREE_TYPE (type), low, high;
7151 /* Subrange types have a base type which is an integral type. */
7152 if (!INTEGRAL_TYPE_P (base_type))
7153 return false;
7155 /* Get the real bounds of the subtype. */
7156 if (lang_hooks.types.get_subrange_bounds)
7157 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7158 else
7160 low = TYPE_MIN_VALUE (type);
7161 high = TYPE_MAX_VALUE (type);
7164 /* If the type and its base type have the same representation and the same
7165 name, then the type is not a subrange but a copy of the base type. */
7166 if ((TREE_CODE (base_type) == INTEGER_TYPE
7167 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7168 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7169 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7170 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7171 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7172 return false;
7174 if (lowval)
7175 *lowval = low;
7176 if (highval)
7177 *highval = high;
7178 return true;
7181 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7182 and number of elements specified by the range of values of INDEX_TYPE.
7183 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7184 If SHARED is true, reuse such a type that has already been constructed.
7185 If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type. */
7187 tree
7188 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
7189 bool shared, bool set_canonical)
7191 tree t;
7193 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7195 error ("arrays of functions are not meaningful");
7196 elt_type = integer_type_node;
7199 t = make_node (ARRAY_TYPE);
7200 TREE_TYPE (t) = elt_type;
7201 TYPE_DOMAIN (t) = index_type;
7202 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7203 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
7204 layout_type (t);
7206 if (shared)
7208 hashval_t hash = type_hash_canon_hash (t);
7209 t = type_hash_canon (hash, t);
7212 if (TYPE_CANONICAL (t) == t && set_canonical)
7214 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7215 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
7216 || in_lto_p)
7217 SET_TYPE_STRUCTURAL_EQUALITY (t);
7218 else if (TYPE_CANONICAL (elt_type) != elt_type
7219 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7220 TYPE_CANONICAL (t)
7221 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7222 index_type
7223 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7224 typeless_storage, shared, set_canonical);
7227 return t;
7230 /* Wrapper around build_array_type_1 with SHARED set to true. */
7232 tree
7233 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
7235 return
7236 build_array_type_1 (elt_type, index_type, typeless_storage, true, true);
7239 /* Wrapper around build_array_type_1 with SHARED set to false. */
7241 tree
7242 build_nonshared_array_type (tree elt_type, tree index_type)
7244 return build_array_type_1 (elt_type, index_type, false, false, true);
7247 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7248 sizetype. */
7250 tree
7251 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
7253 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7256 /* Recursively examines the array elements of TYPE, until a non-array
7257 element type is found. */
7259 tree
7260 strip_array_types (tree type)
7262 while (TREE_CODE (type) == ARRAY_TYPE)
7263 type = TREE_TYPE (type);
7265 return type;
7268 /* Computes the canonical argument types from the argument type list
7269 ARGTYPES.
7271 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7272 on entry to this function, or if any of the ARGTYPES are
7273 structural.
7275 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7276 true on entry to this function, or if any of the ARGTYPES are
7277 non-canonical.
7279 Returns a canonical argument list, which may be ARGTYPES when the
7280 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7281 true) or would not differ from ARGTYPES. */
7283 static tree
7284 maybe_canonicalize_argtypes (tree argtypes,
7285 bool *any_structural_p,
7286 bool *any_noncanonical_p)
7288 tree arg;
7289 bool any_noncanonical_argtypes_p = false;
7291 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7293 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7294 /* Fail gracefully by stating that the type is structural. */
7295 *any_structural_p = true;
7296 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7297 *any_structural_p = true;
7298 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7299 || TREE_PURPOSE (arg))
7300 /* If the argument has a default argument, we consider it
7301 non-canonical even though the type itself is canonical.
7302 That way, different variants of function and method types
7303 with default arguments will all point to the variant with
7304 no defaults as their canonical type. */
7305 any_noncanonical_argtypes_p = true;
7308 if (*any_structural_p)
7309 return argtypes;
7311 if (any_noncanonical_argtypes_p)
7313 /* Build the canonical list of argument types. */
7314 tree canon_argtypes = NULL_TREE;
7315 bool is_void = false;
7317 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7319 if (arg == void_list_node)
7320 is_void = true;
7321 else
7322 canon_argtypes = tree_cons (NULL_TREE,
7323 TYPE_CANONICAL (TREE_VALUE (arg)),
7324 canon_argtypes);
7327 canon_argtypes = nreverse (canon_argtypes);
7328 if (is_void)
7329 canon_argtypes = chainon (canon_argtypes, void_list_node);
7331 /* There is a non-canonical type. */
7332 *any_noncanonical_p = true;
7333 return canon_argtypes;
7336 /* The canonical argument types are the same as ARGTYPES. */
7337 return argtypes;
7340 /* Construct, lay out and return
7341 the type of functions returning type VALUE_TYPE
7342 given arguments of types ARG_TYPES.
7343 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7344 are data type nodes for the arguments of the function.
7345 NO_NAMED_ARGS_STDARG_P is true if this is a prototyped
7346 variable-arguments function with (...) prototype (no named arguments).
7347 If such a type has already been constructed, reuse it. */
7349 tree
7350 build_function_type (tree value_type, tree arg_types,
7351 bool no_named_args_stdarg_p)
7353 tree t;
7354 inchash::hash hstate;
7355 bool any_structural_p, any_noncanonical_p;
7356 tree canon_argtypes;
7358 gcc_assert (arg_types != error_mark_node);
7360 if (TREE_CODE (value_type) == FUNCTION_TYPE)
7362 error ("function return type cannot be function");
7363 value_type = integer_type_node;
7366 /* Make a node of the sort we want. */
7367 t = make_node (FUNCTION_TYPE);
7368 TREE_TYPE (t) = value_type;
7369 TYPE_ARG_TYPES (t) = arg_types;
7370 if (no_named_args_stdarg_p)
7372 gcc_assert (arg_types == NULL_TREE);
7373 TYPE_NO_NAMED_ARGS_STDARG_P (t) = 1;
7376 /* If we already have such a type, use the old one. */
7377 hashval_t hash = type_hash_canon_hash (t);
7378 t = type_hash_canon (hash, t);
7380 /* Set up the canonical type. */
7381 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
7382 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
7383 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
7384 &any_structural_p,
7385 &any_noncanonical_p);
7386 if (any_structural_p)
7387 SET_TYPE_STRUCTURAL_EQUALITY (t);
7388 else if (any_noncanonical_p)
7389 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
7390 canon_argtypes);
7392 if (!COMPLETE_TYPE_P (t))
7393 layout_type (t);
7394 return t;
7397 /* Build a function type. The RETURN_TYPE is the type returned by the
7398 function. If VAARGS is set, no void_type_node is appended to the
7399 list. ARGP must be always be terminated be a NULL_TREE. */
7401 static tree
7402 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
7404 tree t, args, last;
7406 t = va_arg (argp, tree);
7407 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
7408 args = tree_cons (NULL_TREE, t, args);
7410 if (vaargs)
7412 last = args;
7413 if (args != NULL_TREE)
7414 args = nreverse (args);
7415 gcc_assert (last != void_list_node);
7417 else if (args == NULL_TREE)
7418 args = void_list_node;
7419 else
7421 last = args;
7422 args = nreverse (args);
7423 TREE_CHAIN (last) = void_list_node;
7425 args = build_function_type (return_type, args, vaargs && args == NULL_TREE);
7427 return args;
7430 /* Build a function type. The RETURN_TYPE is the type returned by the
7431 function. If additional arguments are provided, they are
7432 additional argument types. The list of argument types must always
7433 be terminated by NULL_TREE. */
7435 tree
7436 build_function_type_list (tree return_type, ...)
7438 tree args;
7439 va_list p;
7441 va_start (p, return_type);
7442 args = build_function_type_list_1 (false, return_type, p);
7443 va_end (p);
7444 return args;
7447 /* Build a variable argument function type. The RETURN_TYPE is the
7448 type returned by the function. If additional arguments are provided,
7449 they are additional argument types. The list of argument types must
7450 always be terminated by NULL_TREE. */
7452 tree
7453 build_varargs_function_type_list (tree return_type, ...)
7455 tree args;
7456 va_list p;
7458 va_start (p, return_type);
7459 args = build_function_type_list_1 (true, return_type, p);
7460 va_end (p);
7462 return args;
7465 /* Build a function type. RETURN_TYPE is the type returned by the
7466 function; VAARGS indicates whether the function takes varargs. The
7467 function takes N named arguments, the types of which are provided in
7468 ARG_TYPES. */
7470 static tree
7471 build_function_type_array_1 (bool vaargs, tree return_type, int n,
7472 tree *arg_types)
7474 int i;
7475 tree t = vaargs ? NULL_TREE : void_list_node;
7477 for (i = n - 1; i >= 0; i--)
7478 t = tree_cons (NULL_TREE, arg_types[i], t);
7480 return build_function_type (return_type, t, vaargs && n == 0);
7483 /* Build a function type. RETURN_TYPE is the type returned by the
7484 function. The function takes N named arguments, the types of which
7485 are provided in ARG_TYPES. */
7487 tree
7488 build_function_type_array (tree return_type, int n, tree *arg_types)
7490 return build_function_type_array_1 (false, return_type, n, arg_types);
7493 /* Build a variable argument function type. RETURN_TYPE is the type
7494 returned by the function. The function takes N named arguments, the
7495 types of which are provided in ARG_TYPES. */
7497 tree
7498 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
7500 return build_function_type_array_1 (true, return_type, n, arg_types);
7503 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
7504 and ARGTYPES (a TREE_LIST) are the return type and arguments types
7505 for the method. An implicit additional parameter (of type
7506 pointer-to-BASETYPE) is added to the ARGTYPES. */
7508 tree
7509 build_method_type_directly (tree basetype,
7510 tree rettype,
7511 tree argtypes)
7513 tree t;
7514 tree ptype;
7515 bool any_structural_p, any_noncanonical_p;
7516 tree canon_argtypes;
7518 /* Make a node of the sort we want. */
7519 t = make_node (METHOD_TYPE);
7521 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7522 TREE_TYPE (t) = rettype;
7523 ptype = build_pointer_type (basetype);
7525 /* The actual arglist for this function includes a "hidden" argument
7526 which is "this". Put it into the list of argument types. */
7527 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
7528 TYPE_ARG_TYPES (t) = argtypes;
7530 /* If we already have such a type, use the old one. */
7531 hashval_t hash = type_hash_canon_hash (t);
7532 t = type_hash_canon (hash, t);
7534 /* Set up the canonical type. */
7535 any_structural_p
7536 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7537 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
7538 any_noncanonical_p
7539 = (TYPE_CANONICAL (basetype) != basetype
7540 || TYPE_CANONICAL (rettype) != rettype);
7541 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
7542 &any_structural_p,
7543 &any_noncanonical_p);
7544 if (any_structural_p)
7545 SET_TYPE_STRUCTURAL_EQUALITY (t);
7546 else if (any_noncanonical_p)
7547 TYPE_CANONICAL (t)
7548 = build_method_type_directly (TYPE_CANONICAL (basetype),
7549 TYPE_CANONICAL (rettype),
7550 canon_argtypes);
7551 if (!COMPLETE_TYPE_P (t))
7552 layout_type (t);
7554 return t;
7557 /* Construct, lay out and return the type of methods belonging to class
7558 BASETYPE and whose arguments and values are described by TYPE.
7559 If that type exists already, reuse it.
7560 TYPE must be a FUNCTION_TYPE node. */
7562 tree
7563 build_method_type (tree basetype, tree type)
7565 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
7567 return build_method_type_directly (basetype,
7568 TREE_TYPE (type),
7569 TYPE_ARG_TYPES (type));
7572 /* Construct, lay out and return the type of offsets to a value
7573 of type TYPE, within an object of type BASETYPE.
7574 If a suitable offset type exists already, reuse it. */
7576 tree
7577 build_offset_type (tree basetype, tree type)
7579 tree t;
7581 /* Make a node of the sort we want. */
7582 t = make_node (OFFSET_TYPE);
7584 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7585 TREE_TYPE (t) = type;
7587 /* If we already have such a type, use the old one. */
7588 hashval_t hash = type_hash_canon_hash (t);
7589 t = type_hash_canon (hash, t);
7591 if (!COMPLETE_TYPE_P (t))
7592 layout_type (t);
7594 if (TYPE_CANONICAL (t) == t)
7596 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7597 || TYPE_STRUCTURAL_EQUALITY_P (type))
7598 SET_TYPE_STRUCTURAL_EQUALITY (t);
7599 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
7600 || TYPE_CANONICAL (type) != type)
7601 TYPE_CANONICAL (t)
7602 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
7603 TYPE_CANONICAL (type));
7606 return t;
7609 /* Create a complex type whose components are COMPONENT_TYPE.
7611 If NAMED is true, the type is given a TYPE_NAME. We do not always
7612 do so because this creates a DECL node and thus make the DECL_UIDs
7613 dependent on the type canonicalization hashtable, which is GC-ed,
7614 so the DECL_UIDs would not be stable wrt garbage collection. */
7616 tree
7617 build_complex_type (tree component_type, bool named)
7619 gcc_assert (INTEGRAL_TYPE_P (component_type)
7620 || SCALAR_FLOAT_TYPE_P (component_type)
7621 || FIXED_POINT_TYPE_P (component_type));
7623 /* Make a node of the sort we want. */
7624 tree probe = make_node (COMPLEX_TYPE);
7626 TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
7628 /* If we already have such a type, use the old one. */
7629 hashval_t hash = type_hash_canon_hash (probe);
7630 tree t = type_hash_canon (hash, probe);
7632 if (t == probe)
7634 /* We created a new type. The hash insertion will have laid
7635 out the type. We need to check the canonicalization and
7636 maybe set the name. */
7637 gcc_checking_assert (COMPLETE_TYPE_P (t)
7638 && !TYPE_NAME (t)
7639 && TYPE_CANONICAL (t) == t);
7641 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
7642 SET_TYPE_STRUCTURAL_EQUALITY (t);
7643 else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
7644 TYPE_CANONICAL (t)
7645 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
7647 /* We need to create a name, since complex is a fundamental type. */
7648 if (named)
7650 const char *name = NULL;
7652 if (TREE_TYPE (t) == char_type_node)
7653 name = "complex char";
7654 else if (TREE_TYPE (t) == signed_char_type_node)
7655 name = "complex signed char";
7656 else if (TREE_TYPE (t) == unsigned_char_type_node)
7657 name = "complex unsigned char";
7658 else if (TREE_TYPE (t) == short_integer_type_node)
7659 name = "complex short int";
7660 else if (TREE_TYPE (t) == short_unsigned_type_node)
7661 name = "complex short unsigned int";
7662 else if (TREE_TYPE (t) == integer_type_node)
7663 name = "complex int";
7664 else if (TREE_TYPE (t) == unsigned_type_node)
7665 name = "complex unsigned int";
7666 else if (TREE_TYPE (t) == long_integer_type_node)
7667 name = "complex long int";
7668 else if (TREE_TYPE (t) == long_unsigned_type_node)
7669 name = "complex long unsigned int";
7670 else if (TREE_TYPE (t) == long_long_integer_type_node)
7671 name = "complex long long int";
7672 else if (TREE_TYPE (t) == long_long_unsigned_type_node)
7673 name = "complex long long unsigned int";
7675 if (name != NULL)
7676 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
7677 get_identifier (name), t);
7681 return build_qualified_type (t, TYPE_QUALS (component_type));
7684 /* If TYPE is a real or complex floating-point type and the target
7685 does not directly support arithmetic on TYPE then return the wider
7686 type to be used for arithmetic on TYPE. Otherwise, return
7687 NULL_TREE. */
7689 tree
7690 excess_precision_type (tree type)
7692 /* The target can give two different responses to the question of
7693 which excess precision mode it would like depending on whether we
7694 are in -fexcess-precision=standard or -fexcess-precision=fast. */
7696 enum excess_precision_type requested_type
7697 = (flag_excess_precision == EXCESS_PRECISION_FAST
7698 ? EXCESS_PRECISION_TYPE_FAST
7699 : (flag_excess_precision == EXCESS_PRECISION_FLOAT16
7700 ? EXCESS_PRECISION_TYPE_FLOAT16 : EXCESS_PRECISION_TYPE_STANDARD));
7702 enum flt_eval_method target_flt_eval_method
7703 = targetm.c.excess_precision (requested_type);
7705 /* The target should not ask for unpredictable float evaluation (though
7706 it might advertise that implicitly the evaluation is unpredictable,
7707 but we don't care about that here, it will have been reported
7708 elsewhere). If it does ask for unpredictable evaluation, we have
7709 nothing to do here. */
7710 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
7712 /* Nothing to do. The target has asked for all types we know about
7713 to be computed with their native precision and range. */
7714 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
7715 return NULL_TREE;
7717 /* The target will promote this type in a target-dependent way, so excess
7718 precision ought to leave it alone. */
7719 if (targetm.promoted_type (type) != NULL_TREE)
7720 return NULL_TREE;
7722 machine_mode float16_type_mode = (float16_type_node
7723 ? TYPE_MODE (float16_type_node)
7724 : VOIDmode);
7725 machine_mode bfloat16_type_mode = (bfloat16_type_node
7726 ? TYPE_MODE (bfloat16_type_node)
7727 : VOIDmode);
7728 machine_mode float_type_mode = TYPE_MODE (float_type_node);
7729 machine_mode double_type_mode = TYPE_MODE (double_type_node);
7731 switch (TREE_CODE (type))
7733 case REAL_TYPE:
7735 machine_mode type_mode = TYPE_MODE (type);
7736 switch (target_flt_eval_method)
7738 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7739 if (type_mode == float16_type_mode
7740 || type_mode == bfloat16_type_mode)
7741 return float_type_node;
7742 break;
7743 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7744 if (type_mode == float16_type_mode
7745 || type_mode == bfloat16_type_mode
7746 || type_mode == float_type_mode)
7747 return double_type_node;
7748 break;
7749 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7750 if (type_mode == float16_type_mode
7751 || type_mode == bfloat16_type_mode
7752 || type_mode == float_type_mode
7753 || type_mode == double_type_mode)
7754 return long_double_type_node;
7755 break;
7756 default:
7757 gcc_unreachable ();
7759 break;
7761 case COMPLEX_TYPE:
7763 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
7764 return NULL_TREE;
7765 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
7766 switch (target_flt_eval_method)
7768 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7769 if (type_mode == float16_type_mode
7770 || type_mode == bfloat16_type_mode)
7771 return complex_float_type_node;
7772 break;
7773 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7774 if (type_mode == float16_type_mode
7775 || type_mode == bfloat16_type_mode
7776 || type_mode == float_type_mode)
7777 return complex_double_type_node;
7778 break;
7779 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7780 if (type_mode == float16_type_mode
7781 || type_mode == bfloat16_type_mode
7782 || type_mode == float_type_mode
7783 || type_mode == double_type_mode)
7784 return complex_long_double_type_node;
7785 break;
7786 default:
7787 gcc_unreachable ();
7789 break;
7791 default:
7792 break;
7795 return NULL_TREE;
7798 /* Return OP, stripped of any conversions to wider types as much as is safe.
7799 Converting the value back to OP's type makes a value equivalent to OP.
7801 If FOR_TYPE is nonzero, we return a value which, if converted to
7802 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
7804 OP must have integer, real or enumeral type. Pointers are not allowed!
7806 There are some cases where the obvious value we could return
7807 would regenerate to OP if converted to OP's type,
7808 but would not extend like OP to wider types.
7809 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
7810 For example, if OP is (unsigned short)(signed char)-1,
7811 we avoid returning (signed char)-1 if FOR_TYPE is int,
7812 even though extending that to an unsigned short would regenerate OP,
7813 since the result of extending (signed char)-1 to (int)
7814 is different from (int) OP. */
7816 tree
7817 get_unwidened (tree op, tree for_type)
7819 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
7820 tree type = TREE_TYPE (op);
7821 unsigned final_prec
7822 = TYPE_PRECISION (for_type != 0 ? for_type : type);
7823 int uns
7824 = (for_type != 0 && for_type != type
7825 && final_prec > TYPE_PRECISION (type)
7826 && TYPE_UNSIGNED (type));
7827 tree win = op;
7829 while (CONVERT_EXPR_P (op))
7831 int bitschange;
7833 /* TYPE_PRECISION on vector types has different meaning
7834 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
7835 so avoid them here. */
7836 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
7837 break;
7839 bitschange = TYPE_PRECISION (TREE_TYPE (op))
7840 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
7842 /* Truncations are many-one so cannot be removed.
7843 Unless we are later going to truncate down even farther. */
7844 if (bitschange < 0
7845 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
7846 break;
7848 /* See what's inside this conversion. If we decide to strip it,
7849 we will set WIN. */
7850 op = TREE_OPERAND (op, 0);
7852 /* If we have not stripped any zero-extensions (uns is 0),
7853 we can strip any kind of extension.
7854 If we have previously stripped a zero-extension,
7855 only zero-extensions can safely be stripped.
7856 Any extension can be stripped if the bits it would produce
7857 are all going to be discarded later by truncating to FOR_TYPE. */
7859 if (bitschange > 0)
7861 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
7862 win = op;
7863 /* TYPE_UNSIGNED says whether this is a zero-extension.
7864 Let's avoid computing it if it does not affect WIN
7865 and if UNS will not be needed again. */
7866 if ((uns
7867 || CONVERT_EXPR_P (op))
7868 && TYPE_UNSIGNED (TREE_TYPE (op)))
7870 uns = 1;
7871 win = op;
7876 /* If we finally reach a constant see if it fits in sth smaller and
7877 in that case convert it. */
7878 if (TREE_CODE (win) == INTEGER_CST)
7880 tree wtype = TREE_TYPE (win);
7881 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
7882 if (for_type)
7883 prec = MAX (prec, final_prec);
7884 if (prec < TYPE_PRECISION (wtype))
7886 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
7887 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
7888 win = fold_convert (t, win);
7892 return win;
7895 /* Return OP or a simpler expression for a narrower value
7896 which can be sign-extended or zero-extended to give back OP.
7897 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
7898 or 0 if the value should be sign-extended. */
7900 tree
7901 get_narrower (tree op, int *unsignedp_ptr)
7903 int uns = 0;
7904 int first = 1;
7905 tree win = op;
7906 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
7908 if (TREE_CODE (op) == COMPOUND_EXPR)
7911 op = TREE_OPERAND (op, 1);
7912 while (TREE_CODE (op) == COMPOUND_EXPR);
7913 tree ret = get_narrower (op, unsignedp_ptr);
7914 if (ret == op)
7915 return win;
7916 auto_vec <tree, 16> v;
7917 unsigned int i;
7918 for (op = win; TREE_CODE (op) == COMPOUND_EXPR;
7919 op = TREE_OPERAND (op, 1))
7920 v.safe_push (op);
7921 FOR_EACH_VEC_ELT_REVERSE (v, i, op)
7922 ret = build2_loc (EXPR_LOCATION (op), COMPOUND_EXPR,
7923 TREE_TYPE (ret), TREE_OPERAND (op, 0),
7924 ret);
7925 return ret;
7927 while (TREE_CODE (op) == NOP_EXPR)
7929 int bitschange
7930 = (TYPE_PRECISION (TREE_TYPE (op))
7931 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
7933 /* Truncations are many-one so cannot be removed. */
7934 if (bitschange < 0)
7935 break;
7937 /* See what's inside this conversion. If we decide to strip it,
7938 we will set WIN. */
7940 if (bitschange > 0)
7942 op = TREE_OPERAND (op, 0);
7943 /* An extension: the outermost one can be stripped,
7944 but remember whether it is zero or sign extension. */
7945 if (first)
7946 uns = TYPE_UNSIGNED (TREE_TYPE (op));
7947 /* Otherwise, if a sign extension has been stripped,
7948 only sign extensions can now be stripped;
7949 if a zero extension has been stripped, only zero-extensions. */
7950 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
7951 break;
7952 first = 0;
7954 else /* bitschange == 0 */
7956 /* A change in nominal type can always be stripped, but we must
7957 preserve the unsignedness. */
7958 if (first)
7959 uns = TYPE_UNSIGNED (TREE_TYPE (op));
7960 first = 0;
7961 op = TREE_OPERAND (op, 0);
7962 /* Keep trying to narrow, but don't assign op to win if it
7963 would turn an integral type into something else. */
7964 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
7965 continue;
7968 win = op;
7971 if (TREE_CODE (op) == COMPONENT_REF
7972 /* Since type_for_size always gives an integer type. */
7973 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
7974 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
7975 /* Ensure field is laid out already. */
7976 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
7977 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
7979 unsigned HOST_WIDE_INT innerprec
7980 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
7981 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
7982 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
7983 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
7985 /* We can get this structure field in a narrower type that fits it,
7986 but the resulting extension to its nominal type (a fullword type)
7987 must satisfy the same conditions as for other extensions.
7989 Do this only for fields that are aligned (not bit-fields),
7990 because when bit-field insns will be used there is no
7991 advantage in doing this. */
7993 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
7994 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
7995 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
7996 && type != 0)
7998 if (first)
7999 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8000 win = fold_convert (type, op);
8004 *unsignedp_ptr = uns;
8005 return win;
8008 /* Return true if integer constant C has a value that is permissible
8009 for TYPE, an integral type. */
8011 bool
8012 int_fits_type_p (const_tree c, const_tree type)
8014 tree type_low_bound, type_high_bound;
8015 bool ok_for_low_bound, ok_for_high_bound;
8016 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8018 /* Non-standard boolean types can have arbitrary precision but various
8019 transformations assume that they can only take values 0 and +/-1. */
8020 if (TREE_CODE (type) == BOOLEAN_TYPE)
8021 return wi::fits_to_boolean_p (wi::to_wide (c), type);
8023 retry:
8024 type_low_bound = TYPE_MIN_VALUE (type);
8025 type_high_bound = TYPE_MAX_VALUE (type);
8027 /* If at least one bound of the type is a constant integer, we can check
8028 ourselves and maybe make a decision. If no such decision is possible, but
8029 this type is a subtype, try checking against that. Otherwise, use
8030 fits_to_tree_p, which checks against the precision.
8032 Compute the status for each possibly constant bound, and return if we see
8033 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8034 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8035 for "constant known to fit". */
8037 /* Check if c >= type_low_bound. */
8038 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8040 if (tree_int_cst_lt (c, type_low_bound))
8041 return false;
8042 ok_for_low_bound = true;
8044 else
8045 ok_for_low_bound = false;
8047 /* Check if c <= type_high_bound. */
8048 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8050 if (tree_int_cst_lt (type_high_bound, c))
8051 return false;
8052 ok_for_high_bound = true;
8054 else
8055 ok_for_high_bound = false;
8057 /* If the constant fits both bounds, the result is known. */
8058 if (ok_for_low_bound && ok_for_high_bound)
8059 return true;
8061 /* Perform some generic filtering which may allow making a decision
8062 even if the bounds are not constant. First, negative integers
8063 never fit in unsigned types, */
8064 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
8065 return false;
8067 /* Second, narrower types always fit in wider ones. */
8068 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8069 return true;
8071 /* Third, unsigned integers with top bit set never fit signed types. */
8072 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8074 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
8075 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8077 /* When a tree_cst is converted to a wide-int, the precision
8078 is taken from the type. However, if the precision of the
8079 mode underneath the type is smaller than that, it is
8080 possible that the value will not fit. The test below
8081 fails if any bit is set between the sign bit of the
8082 underlying mode and the top bit of the type. */
8083 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
8084 return false;
8086 else if (wi::neg_p (wi::to_wide (c)))
8087 return false;
8090 /* If we haven't been able to decide at this point, there nothing more we
8091 can check ourselves here. Look at the base type if we have one and it
8092 has the same precision. */
8093 if (TREE_CODE (type) == INTEGER_TYPE
8094 && TREE_TYPE (type) != 0
8095 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8097 type = TREE_TYPE (type);
8098 goto retry;
8101 /* Or to fits_to_tree_p, if nothing else. */
8102 return wi::fits_to_tree_p (wi::to_wide (c), type);
8105 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8106 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8107 represented (assuming two's-complement arithmetic) within the bit
8108 precision of the type are returned instead. */
8110 void
8111 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8113 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8114 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8115 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
8116 else
8118 if (TYPE_UNSIGNED (type))
8119 mpz_set_ui (min, 0);
8120 else
8122 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8123 wi::to_mpz (mn, min, SIGNED);
8127 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8128 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8129 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
8130 else
8132 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8133 wi::to_mpz (mn, max, TYPE_SIGN (type));
8137 /* Return true if VAR is an automatic variable. */
8139 bool
8140 auto_var_p (const_tree var)
8142 return ((((VAR_P (var) && ! DECL_EXTERNAL (var))
8143 || TREE_CODE (var) == PARM_DECL)
8144 && ! TREE_STATIC (var))
8145 || TREE_CODE (var) == RESULT_DECL);
8148 /* Return true if VAR is an automatic variable defined in function FN. */
8150 bool
8151 auto_var_in_fn_p (const_tree var, const_tree fn)
8153 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8154 && (auto_var_p (var)
8155 || TREE_CODE (var) == LABEL_DECL));
8158 /* Subprogram of following function. Called by walk_tree.
8160 Return *TP if it is an automatic variable or parameter of the
8161 function passed in as DATA. */
8163 static tree
8164 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8166 tree fn = (tree) data;
8168 if (TYPE_P (*tp))
8169 *walk_subtrees = 0;
8171 else if (DECL_P (*tp)
8172 && auto_var_in_fn_p (*tp, fn))
8173 return *tp;
8175 return NULL_TREE;
8178 /* Returns true if T is, contains, or refers to a type with variable
8179 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8180 arguments, but not the return type. If FN is nonzero, only return
8181 true if a modifier of the type or position of FN is a variable or
8182 parameter inside FN.
8184 This concept is more general than that of C99 'variably modified types':
8185 in C99, a struct type is never variably modified because a VLA may not
8186 appear as a structure member. However, in GNU C code like:
8188 struct S { int i[f()]; };
8190 is valid, and other languages may define similar constructs. */
8192 bool
8193 variably_modified_type_p (tree type, tree fn)
8195 tree t;
8197 /* Test if T is either variable (if FN is zero) or an expression containing
8198 a variable in FN. If TYPE isn't gimplified, return true also if
8199 gimplify_one_sizepos would gimplify the expression into a local
8200 variable. */
8201 #define RETURN_TRUE_IF_VAR(T) \
8202 do { tree _t = (T); \
8203 if (_t != NULL_TREE \
8204 && _t != error_mark_node \
8205 && !CONSTANT_CLASS_P (_t) \
8206 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8207 && (!fn \
8208 || (!TYPE_SIZES_GIMPLIFIED (type) \
8209 && (TREE_CODE (_t) != VAR_DECL \
8210 && !CONTAINS_PLACEHOLDER_P (_t))) \
8211 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8212 return true; } while (0)
8214 if (type == error_mark_node)
8215 return false;
8217 /* If TYPE itself has variable size, it is variably modified. */
8218 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8219 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8221 switch (TREE_CODE (type))
8223 case POINTER_TYPE:
8224 case REFERENCE_TYPE:
8225 case VECTOR_TYPE:
8226 /* Ada can have pointer types refering to themselves indirectly. */
8227 if (TREE_VISITED (type))
8228 return false;
8229 TREE_VISITED (type) = true;
8230 if (variably_modified_type_p (TREE_TYPE (type), fn))
8232 TREE_VISITED (type) = false;
8233 return true;
8235 TREE_VISITED (type) = false;
8236 break;
8238 case FUNCTION_TYPE:
8239 case METHOD_TYPE:
8240 /* If TYPE is a function type, it is variably modified if the
8241 return type is variably modified. */
8242 if (variably_modified_type_p (TREE_TYPE (type), fn))
8243 return true;
8244 break;
8246 case INTEGER_TYPE:
8247 case REAL_TYPE:
8248 case FIXED_POINT_TYPE:
8249 case ENUMERAL_TYPE:
8250 case BOOLEAN_TYPE:
8251 /* Scalar types are variably modified if their end points
8252 aren't constant. */
8253 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8254 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8255 break;
8257 case RECORD_TYPE:
8258 case UNION_TYPE:
8259 case QUAL_UNION_TYPE:
8260 /* We can't see if any of the fields are variably-modified by the
8261 definition we normally use, since that would produce infinite
8262 recursion via pointers. */
8263 /* This is variably modified if some field's type is. */
8264 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8265 if (TREE_CODE (t) == FIELD_DECL)
8267 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8268 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8269 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8271 /* If the type is a qualified union, then the DECL_QUALIFIER
8272 of fields can also be an expression containing a variable. */
8273 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8274 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8276 /* If the field is a qualified union, then it's only a container
8277 for what's inside so we look into it. That's necessary in LTO
8278 mode because the sizes of the field tested above have been set
8279 to PLACEHOLDER_EXPRs by free_lang_data. */
8280 if (TREE_CODE (TREE_TYPE (t)) == QUAL_UNION_TYPE
8281 && variably_modified_type_p (TREE_TYPE (t), fn))
8282 return true;
8284 break;
8286 case ARRAY_TYPE:
8287 /* Do not call ourselves to avoid infinite recursion. This is
8288 variably modified if the element type is. */
8289 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8290 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8291 break;
8293 default:
8294 break;
8297 /* The current language may have other cases to check, but in general,
8298 all other types are not variably modified. */
8299 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8301 #undef RETURN_TRUE_IF_VAR
8304 /* Given a DECL or TYPE, return the scope in which it was declared, or
8305 NULL_TREE if there is no containing scope. */
8307 tree
8308 get_containing_scope (const_tree t)
8310 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8313 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
8315 const_tree
8316 get_ultimate_context (const_tree decl)
8318 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
8320 if (TREE_CODE (decl) == BLOCK)
8321 decl = BLOCK_SUPERCONTEXT (decl);
8322 else
8323 decl = get_containing_scope (decl);
8325 return decl;
8328 /* Return the innermost context enclosing DECL that is
8329 a FUNCTION_DECL, or zero if none. */
8331 tree
8332 decl_function_context (const_tree decl)
8334 tree context;
8336 if (TREE_CODE (decl) == ERROR_MARK)
8337 return 0;
8339 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8340 where we look up the function at runtime. Such functions always take
8341 a first argument of type 'pointer to real context'.
8343 C++ should really be fixed to use DECL_CONTEXT for the real context,
8344 and use something else for the "virtual context". */
8345 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
8346 context
8347 = TYPE_MAIN_VARIANT
8348 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8349 else
8350 context = DECL_CONTEXT (decl);
8352 while (context && TREE_CODE (context) != FUNCTION_DECL)
8354 if (TREE_CODE (context) == BLOCK)
8355 context = BLOCK_SUPERCONTEXT (context);
8356 else
8357 context = get_containing_scope (context);
8360 return context;
8363 /* Return the innermost context enclosing DECL that is
8364 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8365 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8367 tree
8368 decl_type_context (const_tree decl)
8370 tree context = DECL_CONTEXT (decl);
8372 while (context)
8373 switch (TREE_CODE (context))
8375 case NAMESPACE_DECL:
8376 case TRANSLATION_UNIT_DECL:
8377 return NULL_TREE;
8379 case RECORD_TYPE:
8380 case UNION_TYPE:
8381 case QUAL_UNION_TYPE:
8382 return context;
8384 case TYPE_DECL:
8385 case FUNCTION_DECL:
8386 context = DECL_CONTEXT (context);
8387 break;
8389 case BLOCK:
8390 context = BLOCK_SUPERCONTEXT (context);
8391 break;
8393 default:
8394 gcc_unreachable ();
8397 return NULL_TREE;
8400 /* CALL is a CALL_EXPR. Return the declaration for the function
8401 called, or NULL_TREE if the called function cannot be
8402 determined. */
8404 tree
8405 get_callee_fndecl (const_tree call)
8407 tree addr;
8409 if (call == error_mark_node)
8410 return error_mark_node;
8412 /* It's invalid to call this function with anything but a
8413 CALL_EXPR. */
8414 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8416 /* The first operand to the CALL is the address of the function
8417 called. */
8418 addr = CALL_EXPR_FN (call);
8420 /* If there is no function, return early. */
8421 if (addr == NULL_TREE)
8422 return NULL_TREE;
8424 STRIP_NOPS (addr);
8426 /* If this is a readonly function pointer, extract its initial value. */
8427 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8428 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8429 && DECL_INITIAL (addr))
8430 addr = DECL_INITIAL (addr);
8432 /* If the address is just `&f' for some function `f', then we know
8433 that `f' is being called. */
8434 if (TREE_CODE (addr) == ADDR_EXPR
8435 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8436 return TREE_OPERAND (addr, 0);
8438 /* We couldn't figure out what was being called. */
8439 return NULL_TREE;
8442 /* Return true when STMTs arguments and return value match those of FNDECL,
8443 a decl of a builtin function. */
8445 static bool
8446 tree_builtin_call_types_compatible_p (const_tree call, tree fndecl)
8448 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN);
8450 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8451 if (tree decl = builtin_decl_explicit (DECL_FUNCTION_CODE (fndecl)))
8452 fndecl = decl;
8454 bool gimple_form = (cfun && (cfun->curr_properties & PROP_gimple)) != 0;
8455 if (gimple_form
8456 ? !useless_type_conversion_p (TREE_TYPE (call),
8457 TREE_TYPE (TREE_TYPE (fndecl)))
8458 : (TYPE_MAIN_VARIANT (TREE_TYPE (call))
8459 != TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (fndecl)))))
8460 return false;
8462 tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8463 unsigned nargs = call_expr_nargs (call);
8464 for (unsigned i = 0; i < nargs; ++i, targs = TREE_CHAIN (targs))
8466 /* Variadic args follow. */
8467 if (!targs)
8468 return true;
8469 tree arg = CALL_EXPR_ARG (call, i);
8470 tree type = TREE_VALUE (targs);
8471 if (gimple_form
8472 ? !useless_type_conversion_p (type, TREE_TYPE (arg))
8473 : TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (TREE_TYPE (arg)))
8475 /* For pointer arguments be more forgiving, e.g. due to
8476 FILE * vs. fileptr_type_node, or say char * vs. const char *
8477 differences etc. */
8478 if (!gimple_form
8479 && POINTER_TYPE_P (type)
8480 && POINTER_TYPE_P (TREE_TYPE (arg))
8481 && tree_nop_conversion_p (type, TREE_TYPE (arg)))
8482 continue;
8483 /* char/short integral arguments are promoted to int
8484 by several frontends if targetm.calls.promote_prototypes
8485 is true. Allow such promotion too. */
8486 if (INTEGRAL_TYPE_P (type)
8487 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
8488 && INTEGRAL_TYPE_P (TREE_TYPE (arg))
8489 && !TYPE_UNSIGNED (TREE_TYPE (arg))
8490 && targetm.calls.promote_prototypes (TREE_TYPE (fndecl))
8491 && (gimple_form
8492 ? useless_type_conversion_p (integer_type_node,
8493 TREE_TYPE (arg))
8494 : tree_nop_conversion_p (integer_type_node,
8495 TREE_TYPE (arg))))
8496 continue;
8497 return false;
8500 if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
8501 return false;
8502 return true;
8505 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
8506 return the associated function code, otherwise return CFN_LAST. */
8508 combined_fn
8509 get_call_combined_fn (const_tree call)
8511 /* It's invalid to call this function with anything but a CALL_EXPR. */
8512 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8514 if (!CALL_EXPR_FN (call))
8515 return as_combined_fn (CALL_EXPR_IFN (call));
8517 tree fndecl = get_callee_fndecl (call);
8518 if (fndecl
8519 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)
8520 && tree_builtin_call_types_compatible_p (call, fndecl))
8521 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
8523 return CFN_LAST;
8526 /* Comparator of indices based on tree_node_counts. */
8528 static int
8529 tree_nodes_cmp (const void *p1, const void *p2)
8531 const unsigned *n1 = (const unsigned *)p1;
8532 const unsigned *n2 = (const unsigned *)p2;
8534 return tree_node_counts[*n1] - tree_node_counts[*n2];
8537 /* Comparator of indices based on tree_code_counts. */
8539 static int
8540 tree_codes_cmp (const void *p1, const void *p2)
8542 const unsigned *n1 = (const unsigned *)p1;
8543 const unsigned *n2 = (const unsigned *)p2;
8545 return tree_code_counts[*n1] - tree_code_counts[*n2];
8548 #define TREE_MEM_USAGE_SPACES 40
8550 /* Print debugging information about tree nodes generated during the compile,
8551 and any language-specific information. */
8553 void
8554 dump_tree_statistics (void)
8556 if (GATHER_STATISTICS)
8558 uint64_t total_nodes, total_bytes;
8559 fprintf (stderr, "\nKind Nodes Bytes\n");
8560 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8561 total_nodes = total_bytes = 0;
8564 auto_vec<unsigned> indices (all_kinds);
8565 for (unsigned i = 0; i < all_kinds; i++)
8566 indices.quick_push (i);
8567 indices.qsort (tree_nodes_cmp);
8569 for (unsigned i = 0; i < (int) all_kinds; i++)
8571 unsigned j = indices[i];
8572 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
8573 tree_node_kind_names[j], SIZE_AMOUNT (tree_node_counts[j]),
8574 SIZE_AMOUNT (tree_node_sizes[j]));
8575 total_nodes += tree_node_counts[j];
8576 total_bytes += tree_node_sizes[j];
8578 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8579 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
8580 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
8581 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8585 fprintf (stderr, "Code Nodes\n");
8586 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8588 auto_vec<unsigned> indices (MAX_TREE_CODES);
8589 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8590 indices.quick_push (i);
8591 indices.qsort (tree_codes_cmp);
8593 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8595 unsigned j = indices[i];
8596 fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
8597 get_tree_code_name ((enum tree_code) j),
8598 SIZE_AMOUNT (tree_code_counts[j]));
8600 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8601 fprintf (stderr, "\n");
8602 ssanames_print_statistics ();
8603 fprintf (stderr, "\n");
8604 phinodes_print_statistics ();
8605 fprintf (stderr, "\n");
8608 else
8609 fprintf (stderr, "(No per-node statistics)\n");
8611 print_type_hash_statistics ();
8612 print_debug_expr_statistics ();
8613 print_value_expr_statistics ();
8614 lang_hooks.print_statistics ();
8617 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8619 /* Generate a crc32 of the low BYTES bytes of VALUE. */
8621 unsigned
8622 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
8624 /* This relies on the raw feedback's top 4 bits being zero. */
8625 #define FEEDBACK(X) ((X) * 0x04c11db7)
8626 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
8627 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
8628 static const unsigned syndromes[16] =
8630 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
8631 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
8632 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
8633 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
8635 #undef FEEDBACK
8636 #undef SYNDROME
8638 value <<= (32 - bytes * 8);
8639 for (unsigned ix = bytes * 2; ix--; value <<= 4)
8641 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
8643 chksum = (chksum << 4) ^ feedback;
8646 return chksum;
8649 /* Generate a crc32 of a string. */
8651 unsigned
8652 crc32_string (unsigned chksum, const char *string)
8655 chksum = crc32_byte (chksum, *string);
8656 while (*string++);
8657 return chksum;
8660 /* P is a string that will be used in a symbol. Mask out any characters
8661 that are not valid in that context. */
8663 void
8664 clean_symbol_name (char *p)
8666 for (; *p; p++)
8667 if (! (ISALNUM (*p)
8668 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8669 || *p == '$'
8670 #endif
8671 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
8672 || *p == '.'
8673 #endif
8675 *p = '_';
8678 static GTY(()) unsigned anon_cnt = 0; /* Saved for PCH. */
8680 /* Create a unique anonymous identifier. The identifier is still a
8681 valid assembly label. */
8683 tree
8684 make_anon_name ()
8686 const char *fmt =
8687 #if !defined (NO_DOT_IN_LABEL)
8689 #elif !defined (NO_DOLLAR_IN_LABEL)
8691 #else
8693 #endif
8694 "_anon_%d";
8696 char buf[24];
8697 int len = snprintf (buf, sizeof (buf), fmt, anon_cnt++);
8698 gcc_checking_assert (len < int (sizeof (buf)));
8700 tree id = get_identifier_with_length (buf, len);
8701 IDENTIFIER_ANON_P (id) = true;
8703 return id;
8706 /* Generate a name for a special-purpose function.
8707 The generated name may need to be unique across the whole link.
8708 Changes to this function may also require corresponding changes to
8709 xstrdup_mask_random.
8710 TYPE is some string to identify the purpose of this function to the
8711 linker or collect2; it must start with an uppercase letter,
8712 one of:
8713 I - for constructors
8714 D - for destructors
8715 N - for C++ anonymous namespaces
8716 F - for DWARF unwind frame information. */
8718 tree
8719 get_file_function_name (const char *type)
8721 char *buf;
8722 const char *p;
8723 char *q;
8725 /* If we already have a name we know to be unique, just use that. */
8726 if (first_global_object_name)
8727 p = q = ASTRDUP (first_global_object_name);
8728 /* If the target is handling the constructors/destructors, they
8729 will be local to this file and the name is only necessary for
8730 debugging purposes.
8731 We also assign sub_I and sub_D sufixes to constructors called from
8732 the global static constructors. These are always local. */
8733 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
8734 || (startswith (type, "sub_")
8735 && (type[4] == 'I' || type[4] == 'D')))
8737 const char *file = main_input_filename;
8738 if (! file)
8739 file = LOCATION_FILE (input_location);
8740 /* Just use the file's basename, because the full pathname
8741 might be quite long. */
8742 p = q = ASTRDUP (lbasename (file));
8744 else
8746 /* Otherwise, the name must be unique across the entire link.
8747 We don't have anything that we know to be unique to this translation
8748 unit, so use what we do have and throw in some randomness. */
8749 unsigned len;
8750 const char *name = weak_global_object_name;
8751 const char *file = main_input_filename;
8753 if (! name)
8754 name = "";
8755 if (! file)
8756 file = LOCATION_FILE (input_location);
8758 len = strlen (file);
8759 q = (char *) alloca (9 + 19 + len + 1);
8760 memcpy (q, file, len + 1);
8762 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
8763 crc32_string (0, name), get_random_seed (false));
8765 p = q;
8768 clean_symbol_name (q);
8769 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
8770 + strlen (type));
8772 /* Set up the name of the file-level functions we may need.
8773 Use a global object (which is already required to be unique over
8774 the program) rather than the file name (which imposes extra
8775 constraints). */
8776 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
8778 return get_identifier (buf);
8781 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
8783 /* Complain that the tree code of NODE does not match the expected 0
8784 terminated list of trailing codes. The trailing code list can be
8785 empty, for a more vague error message. FILE, LINE, and FUNCTION
8786 are of the caller. */
8788 void
8789 tree_check_failed (const_tree node, const char *file,
8790 int line, const char *function, ...)
8792 va_list args;
8793 const char *buffer;
8794 unsigned length = 0;
8795 enum tree_code code;
8797 va_start (args, function);
8798 while ((code = (enum tree_code) va_arg (args, int)))
8799 length += 4 + strlen (get_tree_code_name (code));
8800 va_end (args);
8801 if (length)
8803 char *tmp;
8804 va_start (args, function);
8805 length += strlen ("expected ");
8806 buffer = tmp = (char *) alloca (length);
8807 length = 0;
8808 while ((code = (enum tree_code) va_arg (args, int)))
8810 const char *prefix = length ? " or " : "expected ";
8812 strcpy (tmp + length, prefix);
8813 length += strlen (prefix);
8814 strcpy (tmp + length, get_tree_code_name (code));
8815 length += strlen (get_tree_code_name (code));
8817 va_end (args);
8819 else
8820 buffer = "unexpected node";
8822 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8823 buffer, get_tree_code_name (TREE_CODE (node)),
8824 function, trim_filename (file), line);
8827 /* Complain that the tree code of NODE does match the expected 0
8828 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
8829 the caller. */
8831 void
8832 tree_not_check_failed (const_tree node, const char *file,
8833 int line, const char *function, ...)
8835 va_list args;
8836 char *buffer;
8837 unsigned length = 0;
8838 enum tree_code code;
8840 va_start (args, function);
8841 while ((code = (enum tree_code) va_arg (args, int)))
8842 length += 4 + strlen (get_tree_code_name (code));
8843 va_end (args);
8844 va_start (args, function);
8845 buffer = (char *) alloca (length);
8846 length = 0;
8847 while ((code = (enum tree_code) va_arg (args, int)))
8849 if (length)
8851 strcpy (buffer + length, " or ");
8852 length += 4;
8854 strcpy (buffer + length, get_tree_code_name (code));
8855 length += strlen (get_tree_code_name (code));
8857 va_end (args);
8859 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
8860 buffer, get_tree_code_name (TREE_CODE (node)),
8861 function, trim_filename (file), line);
8864 /* Similar to tree_check_failed, except that we check for a class of tree
8865 code, given in CL. */
8867 void
8868 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
8869 const char *file, int line, const char *function)
8871 internal_error
8872 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
8873 TREE_CODE_CLASS_STRING (cl),
8874 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
8875 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8878 /* Similar to tree_check_failed, except that instead of specifying a
8879 dozen codes, use the knowledge that they're all sequential. */
8881 void
8882 tree_range_check_failed (const_tree node, const char *file, int line,
8883 const char *function, enum tree_code c1,
8884 enum tree_code c2)
8886 char *buffer;
8887 unsigned length = 0;
8888 unsigned int c;
8890 for (c = c1; c <= c2; ++c)
8891 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
8893 length += strlen ("expected ");
8894 buffer = (char *) alloca (length);
8895 length = 0;
8897 for (c = c1; c <= c2; ++c)
8899 const char *prefix = length ? " or " : "expected ";
8901 strcpy (buffer + length, prefix);
8902 length += strlen (prefix);
8903 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
8904 length += strlen (get_tree_code_name ((enum tree_code) c));
8907 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8908 buffer, get_tree_code_name (TREE_CODE (node)),
8909 function, trim_filename (file), line);
8913 /* Similar to tree_check_failed, except that we check that a tree does
8914 not have the specified code, given in CL. */
8916 void
8917 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
8918 const char *file, int line, const char *function)
8920 internal_error
8921 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
8922 TREE_CODE_CLASS_STRING (cl),
8923 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
8924 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8928 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
8930 void
8931 omp_clause_check_failed (const_tree node, const char *file, int line,
8932 const char *function, enum omp_clause_code code)
8934 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
8935 "in %s, at %s:%d",
8936 omp_clause_code_name[code],
8937 get_tree_code_name (TREE_CODE (node)),
8938 function, trim_filename (file), line);
8942 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
8944 void
8945 omp_clause_range_check_failed (const_tree node, const char *file, int line,
8946 const char *function, enum omp_clause_code c1,
8947 enum omp_clause_code c2)
8949 char *buffer;
8950 unsigned length = 0;
8951 unsigned int c;
8953 for (c = c1; c <= c2; ++c)
8954 length += 4 + strlen (omp_clause_code_name[c]);
8956 length += strlen ("expected ");
8957 buffer = (char *) alloca (length);
8958 length = 0;
8960 for (c = c1; c <= c2; ++c)
8962 const char *prefix = length ? " or " : "expected ";
8964 strcpy (buffer + length, prefix);
8965 length += strlen (prefix);
8966 strcpy (buffer + length, omp_clause_code_name[c]);
8967 length += strlen (omp_clause_code_name[c]);
8970 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8971 buffer, omp_clause_code_name[TREE_CODE (node)],
8972 function, trim_filename (file), line);
8976 #undef DEFTREESTRUCT
8977 #define DEFTREESTRUCT(VAL, NAME) NAME,
8979 static const char *ts_enum_names[] = {
8980 #include "treestruct.def"
8982 #undef DEFTREESTRUCT
8984 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
8986 /* Similar to tree_class_check_failed, except that we check for
8987 whether CODE contains the tree structure identified by EN. */
8989 void
8990 tree_contains_struct_check_failed (const_tree node,
8991 const enum tree_node_structure_enum en,
8992 const char *file, int line,
8993 const char *function)
8995 internal_error
8996 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
8997 TS_ENUM_NAME (en),
8998 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9002 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9003 (dynamically sized) vector. */
9005 void
9006 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9007 const char *function)
9009 internal_error
9010 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
9011 "at %s:%d",
9012 idx + 1, len, function, trim_filename (file), line);
9015 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9016 (dynamically sized) vector. */
9018 void
9019 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9020 const char *function)
9022 internal_error
9023 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
9024 idx + 1, len, function, trim_filename (file), line);
9027 /* Similar to above, except that the check is for the bounds of the operand
9028 vector of an expression node EXP. */
9030 void
9031 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9032 int line, const char *function)
9034 enum tree_code code = TREE_CODE (exp);
9035 internal_error
9036 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9037 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9038 function, trim_filename (file), line);
9041 /* Similar to above, except that the check is for the number of
9042 operands of an OMP_CLAUSE node. */
9044 void
9045 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9046 int line, const char *function)
9048 internal_error
9049 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
9050 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9051 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9052 trim_filename (file), line);
9054 #endif /* ENABLE_TREE_CHECKING */
9056 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
9057 and mapped to the machine mode MODE. Initialize its fields and build
9058 the information necessary for debugging output. */
9060 static tree
9061 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
9063 tree t;
9064 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9066 t = make_node (VECTOR_TYPE);
9067 TREE_TYPE (t) = mv_innertype;
9068 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9069 SET_TYPE_MODE (t, mode);
9071 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9072 SET_TYPE_STRUCTURAL_EQUALITY (t);
9073 else if ((TYPE_CANONICAL (mv_innertype) != innertype
9074 || mode != VOIDmode)
9075 && !VECTOR_BOOLEAN_TYPE_P (t))
9076 TYPE_CANONICAL (t)
9077 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
9079 layout_type (t);
9081 hashval_t hash = type_hash_canon_hash (t);
9082 t = type_hash_canon (hash, t);
9084 /* We have built a main variant, based on the main variant of the
9085 inner type. Use it to build the variant we return. */
9086 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9087 && TREE_TYPE (t) != innertype)
9088 return build_type_attribute_qual_variant (t,
9089 TYPE_ATTRIBUTES (innertype),
9090 TYPE_QUALS (innertype));
9092 return t;
9095 static tree
9096 make_or_reuse_type (unsigned size, int unsignedp)
9098 int i;
9100 if (size == INT_TYPE_SIZE)
9101 return unsignedp ? unsigned_type_node : integer_type_node;
9102 if (size == CHAR_TYPE_SIZE)
9103 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9104 if (size == SHORT_TYPE_SIZE)
9105 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9106 if (size == LONG_TYPE_SIZE)
9107 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9108 if (size == LONG_LONG_TYPE_SIZE)
9109 return (unsignedp ? long_long_unsigned_type_node
9110 : long_long_integer_type_node);
9112 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9113 if (size == int_n_data[i].bitsize
9114 && int_n_enabled_p[i])
9115 return (unsignedp ? int_n_trees[i].unsigned_type
9116 : int_n_trees[i].signed_type);
9118 if (unsignedp)
9119 return make_unsigned_type (size);
9120 else
9121 return make_signed_type (size);
9124 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9126 static tree
9127 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9129 if (satp)
9131 if (size == SHORT_FRACT_TYPE_SIZE)
9132 return unsignedp ? sat_unsigned_short_fract_type_node
9133 : sat_short_fract_type_node;
9134 if (size == FRACT_TYPE_SIZE)
9135 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9136 if (size == LONG_FRACT_TYPE_SIZE)
9137 return unsignedp ? sat_unsigned_long_fract_type_node
9138 : sat_long_fract_type_node;
9139 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9140 return unsignedp ? sat_unsigned_long_long_fract_type_node
9141 : sat_long_long_fract_type_node;
9143 else
9145 if (size == SHORT_FRACT_TYPE_SIZE)
9146 return unsignedp ? unsigned_short_fract_type_node
9147 : short_fract_type_node;
9148 if (size == FRACT_TYPE_SIZE)
9149 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9150 if (size == LONG_FRACT_TYPE_SIZE)
9151 return unsignedp ? unsigned_long_fract_type_node
9152 : long_fract_type_node;
9153 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9154 return unsignedp ? unsigned_long_long_fract_type_node
9155 : long_long_fract_type_node;
9158 return make_fract_type (size, unsignedp, satp);
9161 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9163 static tree
9164 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9166 if (satp)
9168 if (size == SHORT_ACCUM_TYPE_SIZE)
9169 return unsignedp ? sat_unsigned_short_accum_type_node
9170 : sat_short_accum_type_node;
9171 if (size == ACCUM_TYPE_SIZE)
9172 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9173 if (size == LONG_ACCUM_TYPE_SIZE)
9174 return unsignedp ? sat_unsigned_long_accum_type_node
9175 : sat_long_accum_type_node;
9176 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9177 return unsignedp ? sat_unsigned_long_long_accum_type_node
9178 : sat_long_long_accum_type_node;
9180 else
9182 if (size == SHORT_ACCUM_TYPE_SIZE)
9183 return unsignedp ? unsigned_short_accum_type_node
9184 : short_accum_type_node;
9185 if (size == ACCUM_TYPE_SIZE)
9186 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9187 if (size == LONG_ACCUM_TYPE_SIZE)
9188 return unsignedp ? unsigned_long_accum_type_node
9189 : long_accum_type_node;
9190 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9191 return unsignedp ? unsigned_long_long_accum_type_node
9192 : long_long_accum_type_node;
9195 return make_accum_type (size, unsignedp, satp);
9199 /* Create an atomic variant node for TYPE. This routine is called
9200 during initialization of data types to create the 5 basic atomic
9201 types. The generic build_variant_type function requires these to
9202 already be set up in order to function properly, so cannot be
9203 called from there. If ALIGN is non-zero, then ensure alignment is
9204 overridden to this value. */
9206 static tree
9207 build_atomic_base (tree type, unsigned int align)
9209 tree t;
9211 /* Make sure its not already registered. */
9212 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9213 return t;
9215 t = build_variant_type_copy (type);
9216 set_type_quals (t, TYPE_QUAL_ATOMIC);
9218 if (align)
9219 SET_TYPE_ALIGN (t, align);
9221 return t;
9224 /* Information about the _FloatN and _FloatNx types. This must be in
9225 the same order as the corresponding TI_* enum values. */
9226 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
9228 { 16, false },
9229 { 32, false },
9230 { 64, false },
9231 { 128, false },
9232 { 32, true },
9233 { 64, true },
9234 { 128, true },
9238 /* Create nodes for all integer types (and error_mark_node) using the sizes
9239 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9241 void
9242 build_common_tree_nodes (bool signed_char)
9244 int i;
9246 error_mark_node = make_node (ERROR_MARK);
9247 TREE_TYPE (error_mark_node) = error_mark_node;
9249 initialize_sizetypes ();
9251 /* Define both `signed char' and `unsigned char'. */
9252 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9253 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9254 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9255 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9257 /* Define `char', which is like either `signed char' or `unsigned char'
9258 but not the same as either. */
9259 char_type_node
9260 = (signed_char
9261 ? make_signed_type (CHAR_TYPE_SIZE)
9262 : make_unsigned_type (CHAR_TYPE_SIZE));
9263 TYPE_STRING_FLAG (char_type_node) = 1;
9265 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9266 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9267 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9268 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9269 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9270 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9271 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9272 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9274 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9276 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9277 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9279 if (int_n_enabled_p[i])
9281 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9282 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9286 /* Define a boolean type. This type only represents boolean values but
9287 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9288 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9289 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9290 TYPE_PRECISION (boolean_type_node) = 1;
9291 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9293 /* Define what type to use for size_t. */
9294 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9295 size_type_node = unsigned_type_node;
9296 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9297 size_type_node = long_unsigned_type_node;
9298 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9299 size_type_node = long_long_unsigned_type_node;
9300 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9301 size_type_node = short_unsigned_type_node;
9302 else
9304 int i;
9306 size_type_node = NULL_TREE;
9307 for (i = 0; i < NUM_INT_N_ENTS; i++)
9308 if (int_n_enabled_p[i])
9310 char name[50], altname[50];
9311 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9312 sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
9314 if (strcmp (name, SIZE_TYPE) == 0
9315 || strcmp (altname, SIZE_TYPE) == 0)
9317 size_type_node = int_n_trees[i].unsigned_type;
9320 if (size_type_node == NULL_TREE)
9321 gcc_unreachable ();
9324 /* Define what type to use for ptrdiff_t. */
9325 if (strcmp (PTRDIFF_TYPE, "int") == 0)
9326 ptrdiff_type_node = integer_type_node;
9327 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
9328 ptrdiff_type_node = long_integer_type_node;
9329 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
9330 ptrdiff_type_node = long_long_integer_type_node;
9331 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
9332 ptrdiff_type_node = short_integer_type_node;
9333 else
9335 ptrdiff_type_node = NULL_TREE;
9336 for (int i = 0; i < NUM_INT_N_ENTS; i++)
9337 if (int_n_enabled_p[i])
9339 char name[50], altname[50];
9340 sprintf (name, "__int%d", int_n_data[i].bitsize);
9341 sprintf (altname, "__int%d__", int_n_data[i].bitsize);
9343 if (strcmp (name, PTRDIFF_TYPE) == 0
9344 || strcmp (altname, PTRDIFF_TYPE) == 0)
9345 ptrdiff_type_node = int_n_trees[i].signed_type;
9347 if (ptrdiff_type_node == NULL_TREE)
9348 gcc_unreachable ();
9351 /* Fill in the rest of the sized types. Reuse existing type nodes
9352 when possible. */
9353 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9354 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9355 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9356 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9357 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9359 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9360 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9361 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9362 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9363 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9365 /* Don't call build_qualified type for atomics. That routine does
9366 special processing for atomics, and until they are initialized
9367 it's better not to make that call.
9369 Check to see if there is a target override for atomic types. */
9371 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9372 targetm.atomic_align_for_mode (QImode));
9373 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9374 targetm.atomic_align_for_mode (HImode));
9375 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9376 targetm.atomic_align_for_mode (SImode));
9377 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9378 targetm.atomic_align_for_mode (DImode));
9379 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9380 targetm.atomic_align_for_mode (TImode));
9382 access_public_node = get_identifier ("public");
9383 access_protected_node = get_identifier ("protected");
9384 access_private_node = get_identifier ("private");
9386 /* Define these next since types below may used them. */
9387 integer_zero_node = build_int_cst (integer_type_node, 0);
9388 integer_one_node = build_int_cst (integer_type_node, 1);
9389 integer_three_node = build_int_cst (integer_type_node, 3);
9390 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9392 size_zero_node = size_int (0);
9393 size_one_node = size_int (1);
9394 bitsize_zero_node = bitsize_int (0);
9395 bitsize_one_node = bitsize_int (1);
9396 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9398 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9399 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9401 void_type_node = make_node (VOID_TYPE);
9402 layout_type (void_type_node);
9404 /* We are not going to have real types in C with less than byte alignment,
9405 so we might as well not have any types that claim to have it. */
9406 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
9407 TYPE_USER_ALIGN (void_type_node) = 0;
9409 void_node = make_node (VOID_CST);
9410 TREE_TYPE (void_node) = void_type_node;
9412 void_list_node = build_tree_list (NULL_TREE, void_type_node);
9414 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9415 layout_type (TREE_TYPE (null_pointer_node));
9417 ptr_type_node = build_pointer_type (void_type_node);
9418 const_ptr_type_node
9419 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9420 for (unsigned i = 0; i < ARRAY_SIZE (builtin_structptr_types); ++i)
9421 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
9423 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9425 float_type_node = make_node (REAL_TYPE);
9426 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9427 layout_type (float_type_node);
9429 double_type_node = make_node (REAL_TYPE);
9430 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9431 layout_type (double_type_node);
9433 long_double_type_node = make_node (REAL_TYPE);
9434 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9435 layout_type (long_double_type_node);
9437 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9439 int n = floatn_nx_types[i].n;
9440 bool extended = floatn_nx_types[i].extended;
9441 scalar_float_mode mode;
9442 if (!targetm.floatn_mode (n, extended).exists (&mode))
9443 continue;
9444 int precision = GET_MODE_PRECISION (mode);
9445 /* Work around the rs6000 KFmode having precision 113 not
9446 128. */
9447 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
9448 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
9449 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
9450 if (!extended)
9451 gcc_assert (min_precision == n);
9452 if (precision < min_precision)
9453 precision = min_precision;
9454 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
9455 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
9456 layout_type (FLOATN_NX_TYPE_NODE (i));
9457 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
9459 float128t_type_node = float128_type_node;
9460 #ifdef HAVE_BFmode
9461 if (REAL_MODE_FORMAT (BFmode) == &arm_bfloat_half_format
9462 && targetm.scalar_mode_supported_p (BFmode)
9463 && targetm.libgcc_floating_mode_supported_p (BFmode))
9465 bfloat16_type_node = make_node (REAL_TYPE);
9466 TYPE_PRECISION (bfloat16_type_node) = GET_MODE_PRECISION (BFmode);
9467 layout_type (bfloat16_type_node);
9468 SET_TYPE_MODE (bfloat16_type_node, BFmode);
9470 #endif
9472 float_ptr_type_node = build_pointer_type (float_type_node);
9473 double_ptr_type_node = build_pointer_type (double_type_node);
9474 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9475 integer_ptr_type_node = build_pointer_type (integer_type_node);
9477 /* Fixed size integer types. */
9478 uint16_type_node = make_or_reuse_type (16, 1);
9479 uint32_type_node = make_or_reuse_type (32, 1);
9480 uint64_type_node = make_or_reuse_type (64, 1);
9481 if (targetm.scalar_mode_supported_p (TImode))
9482 uint128_type_node = make_or_reuse_type (128, 1);
9484 /* Decimal float types. */
9485 if (targetm.decimal_float_supported_p ())
9487 dfloat32_type_node = make_node (REAL_TYPE);
9488 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9489 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9490 layout_type (dfloat32_type_node);
9492 dfloat64_type_node = make_node (REAL_TYPE);
9493 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9494 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9495 layout_type (dfloat64_type_node);
9497 dfloat128_type_node = make_node (REAL_TYPE);
9498 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9499 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9500 layout_type (dfloat128_type_node);
9503 complex_integer_type_node = build_complex_type (integer_type_node, true);
9504 complex_float_type_node = build_complex_type (float_type_node, true);
9505 complex_double_type_node = build_complex_type (double_type_node, true);
9506 complex_long_double_type_node = build_complex_type (long_double_type_node,
9507 true);
9509 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9511 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
9512 COMPLEX_FLOATN_NX_TYPE_NODE (i)
9513 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
9516 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9517 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9518 sat_ ## KIND ## _type_node = \
9519 make_sat_signed_ ## KIND ## _type (SIZE); \
9520 sat_unsigned_ ## KIND ## _type_node = \
9521 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9522 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9523 unsigned_ ## KIND ## _type_node = \
9524 make_unsigned_ ## KIND ## _type (SIZE);
9526 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9527 sat_ ## WIDTH ## KIND ## _type_node = \
9528 make_sat_signed_ ## KIND ## _type (SIZE); \
9529 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9530 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9531 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9532 unsigned_ ## WIDTH ## KIND ## _type_node = \
9533 make_unsigned_ ## KIND ## _type (SIZE);
9535 /* Make fixed-point type nodes based on four different widths. */
9536 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9537 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9538 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9539 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9540 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9542 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9543 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9544 NAME ## _type_node = \
9545 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9546 u ## NAME ## _type_node = \
9547 make_or_reuse_unsigned_ ## KIND ## _type \
9548 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9549 sat_ ## NAME ## _type_node = \
9550 make_or_reuse_sat_signed_ ## KIND ## _type \
9551 (GET_MODE_BITSIZE (MODE ## mode)); \
9552 sat_u ## NAME ## _type_node = \
9553 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9554 (GET_MODE_BITSIZE (U ## MODE ## mode));
9556 /* Fixed-point type and mode nodes. */
9557 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9558 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9559 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9560 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9561 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9562 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9563 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9564 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9565 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9566 MAKE_FIXED_MODE_NODE (accum, da, DA)
9567 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9570 tree t = targetm.build_builtin_va_list ();
9572 /* Many back-ends define record types without setting TYPE_NAME.
9573 If we copied the record type here, we'd keep the original
9574 record type without a name. This breaks name mangling. So,
9575 don't copy record types and let c_common_nodes_and_builtins()
9576 declare the type to be __builtin_va_list. */
9577 if (TREE_CODE (t) != RECORD_TYPE)
9578 t = build_variant_type_copy (t);
9580 va_list_type_node = t;
9583 /* SCEV analyzer global shared trees. */
9584 chrec_dont_know = make_node (SCEV_NOT_KNOWN);
9585 TREE_TYPE (chrec_dont_know) = void_type_node;
9586 chrec_known = make_node (SCEV_KNOWN);
9587 TREE_TYPE (chrec_known) = void_type_node;
9590 /* Modify DECL for given flags.
9591 TM_PURE attribute is set only on types, so the function will modify
9592 DECL's type when ECF_TM_PURE is used. */
9594 void
9595 set_call_expr_flags (tree decl, int flags)
9597 if (flags & ECF_NOTHROW)
9598 TREE_NOTHROW (decl) = 1;
9599 if (flags & ECF_CONST)
9600 TREE_READONLY (decl) = 1;
9601 if (flags & ECF_PURE)
9602 DECL_PURE_P (decl) = 1;
9603 if (flags & ECF_LOOPING_CONST_OR_PURE)
9604 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9605 if (flags & ECF_NOVOPS)
9606 DECL_IS_NOVOPS (decl) = 1;
9607 if (flags & ECF_NORETURN)
9608 TREE_THIS_VOLATILE (decl) = 1;
9609 if (flags & ECF_MALLOC)
9610 DECL_IS_MALLOC (decl) = 1;
9611 if (flags & ECF_RETURNS_TWICE)
9612 DECL_IS_RETURNS_TWICE (decl) = 1;
9613 if (flags & ECF_LEAF)
9614 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9615 NULL, DECL_ATTRIBUTES (decl));
9616 if (flags & ECF_COLD)
9617 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
9618 NULL, DECL_ATTRIBUTES (decl));
9619 if (flags & ECF_RET1)
9620 DECL_ATTRIBUTES (decl)
9621 = tree_cons (get_identifier ("fn spec"),
9622 build_tree_list (NULL_TREE, build_string (2, "1 ")),
9623 DECL_ATTRIBUTES (decl));
9624 if ((flags & ECF_TM_PURE) && flag_tm)
9625 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9626 /* Looping const or pure is implied by noreturn.
9627 There is currently no way to declare looping const or looping pure alone. */
9628 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9629 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9633 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9635 static void
9636 local_define_builtin (const char *name, tree type, enum built_in_function code,
9637 const char *library_name, int ecf_flags)
9639 tree decl;
9641 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9642 library_name, NULL_TREE);
9643 set_call_expr_flags (decl, ecf_flags);
9645 set_builtin_decl (code, decl, true);
9648 /* Call this function after instantiating all builtins that the language
9649 front end cares about. This will build the rest of the builtins
9650 and internal functions that are relied upon by the tree optimizers and
9651 the middle-end. */
9653 void
9654 build_common_builtin_nodes (void)
9656 tree tmp, ftype;
9657 int ecf_flags;
9659 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_PADDING))
9661 ftype = build_function_type_list (void_type_node,
9662 ptr_type_node,
9663 ptr_type_node,
9664 integer_type_node,
9665 NULL_TREE);
9666 local_define_builtin ("__builtin_clear_padding", ftype,
9667 BUILT_IN_CLEAR_PADDING,
9668 "__builtin_clear_padding",
9669 ECF_LEAF | ECF_NOTHROW);
9672 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
9673 || !builtin_decl_explicit_p (BUILT_IN_TRAP)
9674 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
9676 ftype = build_function_type (void_type_node, void_list_node);
9677 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9678 local_define_builtin ("__builtin_unreachable", ftype,
9679 BUILT_IN_UNREACHABLE,
9680 "__builtin_unreachable",
9681 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9682 | ECF_CONST | ECF_COLD);
9683 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
9684 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
9685 "abort",
9686 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
9687 if (!builtin_decl_explicit_p (BUILT_IN_TRAP))
9688 local_define_builtin ("__builtin_trap", ftype, BUILT_IN_TRAP,
9689 "__builtin_trap",
9690 ECF_NORETURN | ECF_NOTHROW | ECF_LEAF | ECF_COLD);
9693 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9694 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9696 ftype = build_function_type_list (ptr_type_node,
9697 ptr_type_node, const_ptr_type_node,
9698 size_type_node, NULL_TREE);
9700 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9701 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9702 "memcpy", ECF_NOTHROW | ECF_LEAF);
9703 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9704 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9705 "memmove", ECF_NOTHROW | ECF_LEAF);
9708 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9710 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9711 const_ptr_type_node, size_type_node,
9712 NULL_TREE);
9713 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9714 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9717 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9719 ftype = build_function_type_list (ptr_type_node,
9720 ptr_type_node, integer_type_node,
9721 size_type_node, NULL_TREE);
9722 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9723 "memset", ECF_NOTHROW | ECF_LEAF);
9726 /* If we're checking the stack, `alloca' can throw. */
9727 const int alloca_flags
9728 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
9730 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9732 ftype = build_function_type_list (ptr_type_node,
9733 size_type_node, NULL_TREE);
9734 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9735 "alloca", alloca_flags);
9738 ftype = build_function_type_list (ptr_type_node, size_type_node,
9739 size_type_node, NULL_TREE);
9740 local_define_builtin ("__builtin_alloca_with_align", ftype,
9741 BUILT_IN_ALLOCA_WITH_ALIGN,
9742 "__builtin_alloca_with_align",
9743 alloca_flags);
9745 ftype = build_function_type_list (ptr_type_node, size_type_node,
9746 size_type_node, size_type_node, NULL_TREE);
9747 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
9748 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
9749 "__builtin_alloca_with_align_and_max",
9750 alloca_flags);
9752 ftype = build_function_type_list (void_type_node,
9753 ptr_type_node, ptr_type_node,
9754 ptr_type_node, NULL_TREE);
9755 local_define_builtin ("__builtin_init_trampoline", ftype,
9756 BUILT_IN_INIT_TRAMPOLINE,
9757 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9758 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9759 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9760 "__builtin_init_heap_trampoline",
9761 ECF_NOTHROW | ECF_LEAF);
9762 local_define_builtin ("__builtin_init_descriptor", ftype,
9763 BUILT_IN_INIT_DESCRIPTOR,
9764 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
9766 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9767 local_define_builtin ("__builtin_adjust_trampoline", ftype,
9768 BUILT_IN_ADJUST_TRAMPOLINE,
9769 "__builtin_adjust_trampoline",
9770 ECF_CONST | ECF_NOTHROW);
9771 local_define_builtin ("__builtin_adjust_descriptor", ftype,
9772 BUILT_IN_ADJUST_DESCRIPTOR,
9773 "__builtin_adjust_descriptor",
9774 ECF_CONST | ECF_NOTHROW);
9776 ftype = build_function_type_list (void_type_node,
9777 ptr_type_node, ptr_type_node, NULL_TREE);
9778 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_CACHE))
9779 local_define_builtin ("__builtin___clear_cache", ftype,
9780 BUILT_IN_CLEAR_CACHE,
9781 "__clear_cache",
9782 ECF_NOTHROW);
9784 local_define_builtin ("__builtin_nonlocal_goto", ftype,
9785 BUILT_IN_NONLOCAL_GOTO,
9786 "__builtin_nonlocal_goto",
9787 ECF_NORETURN | ECF_NOTHROW);
9789 ftype = build_function_type_list (void_type_node,
9790 ptr_type_node, ptr_type_node, NULL_TREE);
9791 local_define_builtin ("__builtin_setjmp_setup", ftype,
9792 BUILT_IN_SETJMP_SETUP,
9793 "__builtin_setjmp_setup", ECF_NOTHROW);
9795 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9796 local_define_builtin ("__builtin_setjmp_receiver", ftype,
9797 BUILT_IN_SETJMP_RECEIVER,
9798 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
9800 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
9801 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
9802 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
9804 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9805 local_define_builtin ("__builtin_stack_restore", ftype,
9806 BUILT_IN_STACK_RESTORE,
9807 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
9809 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9810 const_ptr_type_node, size_type_node,
9811 NULL_TREE);
9812 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
9813 "__builtin_memcmp_eq",
9814 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9816 local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
9817 "__builtin_strncmp_eq",
9818 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9820 local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
9821 "__builtin_strcmp_eq",
9822 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9824 /* If there's a possibility that we might use the ARM EABI, build the
9825 alternate __cxa_end_cleanup node used to resume from C++. */
9826 if (targetm.arm_eabi_unwinder)
9828 ftype = build_function_type_list (void_type_node, NULL_TREE);
9829 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
9830 BUILT_IN_CXA_END_CLEANUP,
9831 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
9834 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9835 local_define_builtin ("__builtin_unwind_resume", ftype,
9836 BUILT_IN_UNWIND_RESUME,
9837 ((targetm_common.except_unwind_info (&global_options)
9838 == UI_SJLJ)
9839 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
9840 ECF_NORETURN);
9842 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
9844 ftype = build_function_type_list (ptr_type_node, integer_type_node,
9845 NULL_TREE);
9846 local_define_builtin ("__builtin_return_address", ftype,
9847 BUILT_IN_RETURN_ADDRESS,
9848 "__builtin_return_address",
9849 ECF_NOTHROW);
9852 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
9853 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9855 ftype = build_function_type_list (void_type_node, ptr_type_node,
9856 ptr_type_node, NULL_TREE);
9857 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
9858 local_define_builtin ("__cyg_profile_func_enter", ftype,
9859 BUILT_IN_PROFILE_FUNC_ENTER,
9860 "__cyg_profile_func_enter", 0);
9861 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9862 local_define_builtin ("__cyg_profile_func_exit", ftype,
9863 BUILT_IN_PROFILE_FUNC_EXIT,
9864 "__cyg_profile_func_exit", 0);
9867 /* The exception object and filter values from the runtime. The argument
9868 must be zero before exception lowering, i.e. from the front end. After
9869 exception lowering, it will be the region number for the exception
9870 landing pad. These functions are PURE instead of CONST to prevent
9871 them from being hoisted past the exception edge that will initialize
9872 its value in the landing pad. */
9873 ftype = build_function_type_list (ptr_type_node,
9874 integer_type_node, NULL_TREE);
9875 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
9876 /* Only use TM_PURE if we have TM language support. */
9877 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
9878 ecf_flags |= ECF_TM_PURE;
9879 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
9880 "__builtin_eh_pointer", ecf_flags);
9882 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
9883 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
9884 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
9885 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9887 ftype = build_function_type_list (void_type_node,
9888 integer_type_node, integer_type_node,
9889 NULL_TREE);
9890 local_define_builtin ("__builtin_eh_copy_values", ftype,
9891 BUILT_IN_EH_COPY_VALUES,
9892 "__builtin_eh_copy_values", ECF_NOTHROW);
9894 /* Complex multiplication and division. These are handled as builtins
9895 rather than optabs because emit_library_call_value doesn't support
9896 complex. Further, we can do slightly better with folding these
9897 beasties if the real and complex parts of the arguments are separate. */
9899 int mode;
9901 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
9903 char mode_name_buf[4], *q;
9904 const char *p;
9905 enum built_in_function mcode, dcode;
9906 tree type, inner_type;
9907 const char *prefix = "__";
9909 if (targetm.libfunc_gnu_prefix)
9910 prefix = "__gnu_";
9912 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
9913 if (type == NULL)
9914 continue;
9915 inner_type = TREE_TYPE (type);
9917 ftype = build_function_type_list (type, inner_type, inner_type,
9918 inner_type, inner_type, NULL_TREE);
9920 mcode = ((enum built_in_function)
9921 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
9922 dcode = ((enum built_in_function)
9923 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
9925 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
9926 *q = TOLOWER (*p);
9927 *q = '\0';
9929 /* For -ftrapping-math these should throw from a former
9930 -fnon-call-exception stmt. */
9931 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
9932 NULL);
9933 local_define_builtin (built_in_names[mcode], ftype, mcode,
9934 built_in_names[mcode],
9935 ECF_CONST | ECF_LEAF);
9937 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
9938 NULL);
9939 local_define_builtin (built_in_names[dcode], ftype, dcode,
9940 built_in_names[dcode],
9941 ECF_CONST | ECF_LEAF);
9945 init_internal_fns ();
9948 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
9949 better way.
9951 If we requested a pointer to a vector, build up the pointers that
9952 we stripped off while looking for the inner type. Similarly for
9953 return values from functions.
9955 The argument TYPE is the top of the chain, and BOTTOM is the
9956 new type which we will point to. */
9958 tree
9959 reconstruct_complex_type (tree type, tree bottom)
9961 tree inner, outer;
9963 if (TREE_CODE (type) == POINTER_TYPE)
9965 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9966 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
9967 TYPE_REF_CAN_ALIAS_ALL (type));
9969 else if (TREE_CODE (type) == REFERENCE_TYPE)
9971 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9972 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
9973 TYPE_REF_CAN_ALIAS_ALL (type));
9975 else if (TREE_CODE (type) == ARRAY_TYPE)
9977 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9978 outer = build_array_type (inner, TYPE_DOMAIN (type));
9980 else if (TREE_CODE (type) == FUNCTION_TYPE)
9982 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9983 outer = build_function_type (inner, TYPE_ARG_TYPES (type),
9984 TYPE_NO_NAMED_ARGS_STDARG_P (type));
9986 else if (TREE_CODE (type) == METHOD_TYPE)
9988 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9989 /* The build_method_type_directly() routine prepends 'this' to argument list,
9990 so we must compensate by getting rid of it. */
9991 outer
9992 = build_method_type_directly
9993 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
9994 inner,
9995 TREE_CHAIN (TYPE_ARG_TYPES (type)));
9997 else if (TREE_CODE (type) == OFFSET_TYPE)
9999 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10000 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10002 else
10003 return bottom;
10005 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10006 TYPE_QUALS (type));
10009 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10010 the inner type. */
10011 tree
10012 build_vector_type_for_mode (tree innertype, machine_mode mode)
10014 poly_int64 nunits;
10015 unsigned int bitsize;
10017 switch (GET_MODE_CLASS (mode))
10019 case MODE_VECTOR_BOOL:
10020 case MODE_VECTOR_INT:
10021 case MODE_VECTOR_FLOAT:
10022 case MODE_VECTOR_FRACT:
10023 case MODE_VECTOR_UFRACT:
10024 case MODE_VECTOR_ACCUM:
10025 case MODE_VECTOR_UACCUM:
10026 nunits = GET_MODE_NUNITS (mode);
10027 break;
10029 case MODE_INT:
10030 /* Check that there are no leftover bits. */
10031 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
10032 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10033 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10034 break;
10036 default:
10037 gcc_unreachable ();
10040 return make_vector_type (innertype, nunits, mode);
10043 /* Similarly, but takes the inner type and number of units, which must be
10044 a power of two. */
10046 tree
10047 build_vector_type (tree innertype, poly_int64 nunits)
10049 return make_vector_type (innertype, nunits, VOIDmode);
10052 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE. */
10054 tree
10055 build_truth_vector_type_for_mode (poly_uint64 nunits, machine_mode mask_mode)
10057 gcc_assert (mask_mode != BLKmode);
10059 unsigned HOST_WIDE_INT esize;
10060 if (VECTOR_MODE_P (mask_mode))
10062 poly_uint64 vsize = GET_MODE_BITSIZE (mask_mode);
10063 esize = vector_element_size (vsize, nunits);
10065 else
10066 esize = 1;
10068 tree bool_type = build_nonstandard_boolean_type (esize);
10070 return make_vector_type (bool_type, nunits, mask_mode);
10073 /* Build a vector type that holds one boolean result for each element of
10074 vector type VECTYPE. The public interface for this operation is
10075 truth_type_for. */
10077 static tree
10078 build_truth_vector_type_for (tree vectype)
10080 machine_mode vector_mode = TYPE_MODE (vectype);
10081 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
10083 machine_mode mask_mode;
10084 if (VECTOR_MODE_P (vector_mode)
10085 && targetm.vectorize.get_mask_mode (vector_mode).exists (&mask_mode))
10086 return build_truth_vector_type_for_mode (nunits, mask_mode);
10088 poly_uint64 vsize = tree_to_poly_uint64 (TYPE_SIZE (vectype));
10089 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
10090 tree bool_type = build_nonstandard_boolean_type (esize);
10092 return make_vector_type (bool_type, nunits, VOIDmode);
10095 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
10096 set. */
10098 tree
10099 build_opaque_vector_type (tree innertype, poly_int64 nunits)
10101 tree t = make_vector_type (innertype, nunits, VOIDmode);
10102 tree cand;
10103 /* We always build the non-opaque variant before the opaque one,
10104 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10105 cand = TYPE_NEXT_VARIANT (t);
10106 if (cand
10107 && TYPE_VECTOR_OPAQUE (cand)
10108 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10109 return cand;
10110 /* Othewise build a variant type and make sure to queue it after
10111 the non-opaque type. */
10112 cand = build_distinct_type_copy (t);
10113 TYPE_VECTOR_OPAQUE (cand) = true;
10114 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10115 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10116 TYPE_NEXT_VARIANT (t) = cand;
10117 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10118 return cand;
10121 /* Return the value of element I of VECTOR_CST T as a wide_int. */
10123 static poly_wide_int
10124 vector_cst_int_elt (const_tree t, unsigned int i)
10126 /* First handle elements that are directly encoded. */
10127 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10128 if (i < encoded_nelts)
10129 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, i));
10131 /* Identify the pattern that contains element I and work out the index of
10132 the last encoded element for that pattern. */
10133 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10134 unsigned int pattern = i % npatterns;
10135 unsigned int count = i / npatterns;
10136 unsigned int final_i = encoded_nelts - npatterns + pattern;
10138 /* If there are no steps, the final encoded value is the right one. */
10139 if (!VECTOR_CST_STEPPED_P (t))
10140 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
10142 /* Otherwise work out the value from the last two encoded elements. */
10143 tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
10144 tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
10145 poly_wide_int diff = wi::to_poly_wide (v2) - wi::to_poly_wide (v1);
10146 return wi::to_poly_wide (v2) + (count - 2) * diff;
10149 /* Return the value of element I of VECTOR_CST T. */
10151 tree
10152 vector_cst_elt (const_tree t, unsigned int i)
10154 /* First handle elements that are directly encoded. */
10155 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10156 if (i < encoded_nelts)
10157 return VECTOR_CST_ENCODED_ELT (t, i);
10159 /* If there are no steps, the final encoded value is the right one. */
10160 if (!VECTOR_CST_STEPPED_P (t))
10162 /* Identify the pattern that contains element I and work out the index of
10163 the last encoded element for that pattern. */
10164 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10165 unsigned int pattern = i % npatterns;
10166 unsigned int final_i = encoded_nelts - npatterns + pattern;
10167 return VECTOR_CST_ENCODED_ELT (t, final_i);
10170 /* Otherwise work out the value from the last two encoded elements. */
10171 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
10172 vector_cst_int_elt (t, i));
10175 /* Given an initializer INIT, return TRUE if INIT is zero or some
10176 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
10177 null, set *NONZERO if and only if INIT is known not to be all
10178 zeros. The combination of return value of false and *NONZERO
10179 false implies that INIT may but need not be all zeros. Other
10180 combinations indicate definitive answers. */
10182 bool
10183 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
10185 bool dummy;
10186 if (!nonzero)
10187 nonzero = &dummy;
10189 /* Conservatively clear NONZERO and set it only if INIT is definitely
10190 not all zero. */
10191 *nonzero = false;
10193 STRIP_NOPS (init);
10195 unsigned HOST_WIDE_INT off = 0;
10197 switch (TREE_CODE (init))
10199 case INTEGER_CST:
10200 if (integer_zerop (init))
10201 return true;
10203 *nonzero = true;
10204 return false;
10206 case REAL_CST:
10207 /* ??? Note that this is not correct for C4X float formats. There,
10208 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10209 negative exponent. */
10210 if (real_zerop (init)
10211 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
10212 return true;
10214 *nonzero = true;
10215 return false;
10217 case FIXED_CST:
10218 if (fixed_zerop (init))
10219 return true;
10221 *nonzero = true;
10222 return false;
10224 case COMPLEX_CST:
10225 if (integer_zerop (init)
10226 || (real_zerop (init)
10227 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10228 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
10229 return true;
10231 *nonzero = true;
10232 return false;
10234 case VECTOR_CST:
10235 if (VECTOR_CST_NPATTERNS (init) == 1
10236 && VECTOR_CST_DUPLICATE_P (init)
10237 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
10238 return true;
10240 *nonzero = true;
10241 return false;
10243 case CONSTRUCTOR:
10245 if (TREE_CLOBBER_P (init))
10246 return false;
10248 unsigned HOST_WIDE_INT idx;
10249 tree elt;
10251 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10252 if (!initializer_zerop (elt, nonzero))
10253 return false;
10255 return true;
10258 case MEM_REF:
10260 tree arg = TREE_OPERAND (init, 0);
10261 if (TREE_CODE (arg) != ADDR_EXPR)
10262 return false;
10263 tree offset = TREE_OPERAND (init, 1);
10264 if (TREE_CODE (offset) != INTEGER_CST
10265 || !tree_fits_uhwi_p (offset))
10266 return false;
10267 off = tree_to_uhwi (offset);
10268 if (INT_MAX < off)
10269 return false;
10270 arg = TREE_OPERAND (arg, 0);
10271 if (TREE_CODE (arg) != STRING_CST)
10272 return false;
10273 init = arg;
10275 /* Fall through. */
10277 case STRING_CST:
10279 gcc_assert (off <= INT_MAX);
10281 int i = off;
10282 int n = TREE_STRING_LENGTH (init);
10283 if (n <= i)
10284 return false;
10286 /* We need to loop through all elements to handle cases like
10287 "\0" and "\0foobar". */
10288 for (i = 0; i < n; ++i)
10289 if (TREE_STRING_POINTER (init)[i] != '\0')
10291 *nonzero = true;
10292 return false;
10295 return true;
10298 default:
10299 return false;
10303 /* Return true if EXPR is an initializer expression in which every element
10304 is a constant that is numerically equal to 0 or 1. The elements do not
10305 need to be equal to each other. */
10307 bool
10308 initializer_each_zero_or_onep (const_tree expr)
10310 STRIP_ANY_LOCATION_WRAPPER (expr);
10312 switch (TREE_CODE (expr))
10314 case INTEGER_CST:
10315 return integer_zerop (expr) || integer_onep (expr);
10317 case REAL_CST:
10318 return real_zerop (expr) || real_onep (expr);
10320 case VECTOR_CST:
10322 unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (expr);
10323 if (VECTOR_CST_STEPPED_P (expr)
10324 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)).is_constant (&nelts))
10325 return false;
10327 for (unsigned int i = 0; i < nelts; ++i)
10329 tree elt = vector_cst_elt (expr, i);
10330 if (!initializer_each_zero_or_onep (elt))
10331 return false;
10334 return true;
10337 default:
10338 return false;
10342 /* Check if vector VEC consists of all the equal elements and
10343 that the number of elements corresponds to the type of VEC.
10344 The function returns first element of the vector
10345 or NULL_TREE if the vector is not uniform. */
10346 tree
10347 uniform_vector_p (const_tree vec)
10349 tree first, t;
10350 unsigned HOST_WIDE_INT i, nelts;
10352 if (vec == NULL_TREE)
10353 return NULL_TREE;
10355 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10357 if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
10358 return TREE_OPERAND (vec, 0);
10360 else if (TREE_CODE (vec) == VECTOR_CST)
10362 if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
10363 return VECTOR_CST_ENCODED_ELT (vec, 0);
10364 return NULL_TREE;
10367 else if (TREE_CODE (vec) == CONSTRUCTOR
10368 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
10370 first = error_mark_node;
10372 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10374 if (i == 0)
10376 first = t;
10377 continue;
10379 if (!operand_equal_p (first, t, 0))
10380 return NULL_TREE;
10382 if (i != nelts)
10383 return NULL_TREE;
10385 if (TREE_CODE (first) == CONSTRUCTOR || TREE_CODE (first) == VECTOR_CST)
10386 return uniform_vector_p (first);
10387 return first;
10390 return NULL_TREE;
10393 /* If the argument is INTEGER_CST, return it. If the argument is vector
10394 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
10395 return NULL_TREE.
10396 Look through location wrappers. */
10398 tree
10399 uniform_integer_cst_p (tree t)
10401 STRIP_ANY_LOCATION_WRAPPER (t);
10403 if (TREE_CODE (t) == INTEGER_CST)
10404 return t;
10406 if (VECTOR_TYPE_P (TREE_TYPE (t)))
10408 t = uniform_vector_p (t);
10409 if (t && TREE_CODE (t) == INTEGER_CST)
10410 return t;
10413 return NULL_TREE;
10416 /* Checks to see if T is a constant or a constant vector and if each element E
10417 adheres to ~E + 1 == pow2 then return ~E otherwise NULL_TREE. */
10419 tree
10420 bitmask_inv_cst_vector_p (tree t)
10423 tree_code code = TREE_CODE (t);
10424 tree type = TREE_TYPE (t);
10426 if (!INTEGRAL_TYPE_P (type)
10427 && !VECTOR_INTEGER_TYPE_P (type))
10428 return NULL_TREE;
10430 unsigned HOST_WIDE_INT nelts = 1;
10431 tree cst;
10432 unsigned int idx = 0;
10433 bool uniform = uniform_integer_cst_p (t);
10434 tree newtype = unsigned_type_for (type);
10435 tree_vector_builder builder;
10436 if (code == INTEGER_CST)
10437 cst = t;
10438 else
10440 if (!VECTOR_CST_NELTS (t).is_constant (&nelts))
10441 return NULL_TREE;
10443 cst = vector_cst_elt (t, 0);
10444 builder.new_vector (newtype, nelts, 1);
10447 tree ty = unsigned_type_for (TREE_TYPE (cst));
10451 if (idx > 0)
10452 cst = vector_cst_elt (t, idx);
10453 wide_int icst = wi::to_wide (cst);
10454 wide_int inv = wi::bit_not (icst);
10455 icst = wi::add (1, inv);
10456 if (wi::popcount (icst) != 1)
10457 return NULL_TREE;
10459 tree newcst = wide_int_to_tree (ty, inv);
10461 if (uniform)
10462 return build_uniform_cst (newtype, newcst);
10464 builder.quick_push (newcst);
10466 while (++idx < nelts);
10468 return builder.build ();
10471 /* If VECTOR_CST T has a single nonzero element, return the index of that
10472 element, otherwise return -1. */
10475 single_nonzero_element (const_tree t)
10477 unsigned HOST_WIDE_INT nelts;
10478 unsigned int repeat_nelts;
10479 if (VECTOR_CST_NELTS (t).is_constant (&nelts))
10480 repeat_nelts = nelts;
10481 else if (VECTOR_CST_NELTS_PER_PATTERN (t) == 2)
10483 nelts = vector_cst_encoded_nelts (t);
10484 repeat_nelts = VECTOR_CST_NPATTERNS (t);
10486 else
10487 return -1;
10489 int res = -1;
10490 for (unsigned int i = 0; i < nelts; ++i)
10492 tree elt = vector_cst_elt (t, i);
10493 if (!integer_zerop (elt) && !real_zerop (elt))
10495 if (res >= 0 || i >= repeat_nelts)
10496 return -1;
10497 res = i;
10500 return res;
10503 /* Build an empty statement at location LOC. */
10505 tree
10506 build_empty_stmt (location_t loc)
10508 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10509 SET_EXPR_LOCATION (t, loc);
10510 return t;
10514 /* Build an OMP clause with code CODE. LOC is the location of the
10515 clause. */
10517 tree
10518 build_omp_clause (location_t loc, enum omp_clause_code code)
10520 tree t;
10521 int size, length;
10523 length = omp_clause_num_ops[code];
10524 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10526 record_node_allocation_statistics (OMP_CLAUSE, size);
10528 t = (tree) ggc_internal_alloc (size);
10529 memset (t, 0, size);
10530 TREE_SET_CODE (t, OMP_CLAUSE);
10531 OMP_CLAUSE_SET_CODE (t, code);
10532 OMP_CLAUSE_LOCATION (t) = loc;
10534 return t;
10537 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10538 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10539 Except for the CODE and operand count field, other storage for the
10540 object is initialized to zeros. */
10542 tree
10543 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
10545 tree t;
10546 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10548 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10549 gcc_assert (len >= 1);
10551 record_node_allocation_statistics (code, length);
10553 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10555 TREE_SET_CODE (t, code);
10557 /* Can't use TREE_OPERAND to store the length because if checking is
10558 enabled, it will try to check the length before we store it. :-P */
10559 t->exp.operands[0] = build_int_cst (sizetype, len);
10561 return t;
10564 /* Helper function for build_call_* functions; build a CALL_EXPR with
10565 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10566 the argument slots. */
10568 static tree
10569 build_call_1 (tree return_type, tree fn, int nargs)
10571 tree t;
10573 t = build_vl_exp (CALL_EXPR, nargs + 3);
10574 TREE_TYPE (t) = return_type;
10575 CALL_EXPR_FN (t) = fn;
10576 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10578 return t;
10581 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10582 FN and a null static chain slot. NARGS is the number of call arguments
10583 which are specified as "..." arguments. */
10585 tree
10586 build_call_nary (tree return_type, tree fn, int nargs, ...)
10588 tree ret;
10589 va_list args;
10590 va_start (args, nargs);
10591 ret = build_call_valist (return_type, fn, nargs, args);
10592 va_end (args);
10593 return ret;
10596 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10597 FN and a null static chain slot. NARGS is the number of call arguments
10598 which are specified as a va_list ARGS. */
10600 tree
10601 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10603 tree t;
10604 int i;
10606 t = build_call_1 (return_type, fn, nargs);
10607 for (i = 0; i < nargs; i++)
10608 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10609 process_call_operands (t);
10610 return t;
10613 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10614 FN and a null static chain slot. NARGS is the number of call arguments
10615 which are specified as a tree array ARGS. */
10617 tree
10618 build_call_array_loc (location_t loc, tree return_type, tree fn,
10619 int nargs, const tree *args)
10621 tree t;
10622 int i;
10624 t = build_call_1 (return_type, fn, nargs);
10625 for (i = 0; i < nargs; i++)
10626 CALL_EXPR_ARG (t, i) = args[i];
10627 process_call_operands (t);
10628 SET_EXPR_LOCATION (t, loc);
10629 return t;
10632 /* Like build_call_array, but takes a vec. */
10634 tree
10635 build_call_vec (tree return_type, tree fn, const vec<tree, va_gc> *args)
10637 tree ret, t;
10638 unsigned int ix;
10640 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10641 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10642 CALL_EXPR_ARG (ret, ix) = t;
10643 process_call_operands (ret);
10644 return ret;
10647 /* Conveniently construct a function call expression. FNDECL names the
10648 function to be called and N arguments are passed in the array
10649 ARGARRAY. */
10651 tree
10652 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10654 tree fntype = TREE_TYPE (fndecl);
10655 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10657 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10660 /* Conveniently construct a function call expression. FNDECL names the
10661 function to be called and the arguments are passed in the vector
10662 VEC. */
10664 tree
10665 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10667 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10668 vec_safe_address (vec));
10672 /* Conveniently construct a function call expression. FNDECL names the
10673 function to be called, N is the number of arguments, and the "..."
10674 parameters are the argument expressions. */
10676 tree
10677 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10679 va_list ap;
10680 tree *argarray = XALLOCAVEC (tree, n);
10681 int i;
10683 va_start (ap, n);
10684 for (i = 0; i < n; i++)
10685 argarray[i] = va_arg (ap, tree);
10686 va_end (ap);
10687 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10690 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10691 varargs macros aren't supported by all bootstrap compilers. */
10693 tree
10694 build_call_expr (tree fndecl, int n, ...)
10696 va_list ap;
10697 tree *argarray = XALLOCAVEC (tree, n);
10698 int i;
10700 va_start (ap, n);
10701 for (i = 0; i < n; i++)
10702 argarray[i] = va_arg (ap, tree);
10703 va_end (ap);
10704 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10707 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10708 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10709 It will get gimplified later into an ordinary internal function. */
10711 tree
10712 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
10713 tree type, int n, const tree *args)
10715 tree t = build_call_1 (type, NULL_TREE, n);
10716 for (int i = 0; i < n; ++i)
10717 CALL_EXPR_ARG (t, i) = args[i];
10718 SET_EXPR_LOCATION (t, loc);
10719 CALL_EXPR_IFN (t) = ifn;
10720 process_call_operands (t);
10721 return t;
10724 /* Build internal call expression. This is just like CALL_EXPR, except
10725 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10726 internal function. */
10728 tree
10729 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10730 tree type, int n, ...)
10732 va_list ap;
10733 tree *argarray = XALLOCAVEC (tree, n);
10734 int i;
10736 va_start (ap, n);
10737 for (i = 0; i < n; i++)
10738 argarray[i] = va_arg (ap, tree);
10739 va_end (ap);
10740 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10743 /* Return a function call to FN, if the target is guaranteed to support it,
10744 or null otherwise.
10746 N is the number of arguments, passed in the "...", and TYPE is the
10747 type of the return value. */
10749 tree
10750 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
10751 int n, ...)
10753 va_list ap;
10754 tree *argarray = XALLOCAVEC (tree, n);
10755 int i;
10757 va_start (ap, n);
10758 for (i = 0; i < n; i++)
10759 argarray[i] = va_arg (ap, tree);
10760 va_end (ap);
10761 if (internal_fn_p (fn))
10763 internal_fn ifn = as_internal_fn (fn);
10764 if (direct_internal_fn_p (ifn))
10766 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
10767 if (!direct_internal_fn_supported_p (ifn, types,
10768 OPTIMIZE_FOR_BOTH))
10769 return NULL_TREE;
10771 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10773 else
10775 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
10776 if (!fndecl)
10777 return NULL_TREE;
10778 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10782 /* Return a function call to the appropriate builtin alloca variant.
10784 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
10785 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
10786 bound for SIZE in case it is not a fixed value. */
10788 tree
10789 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
10791 if (max_size >= 0)
10793 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
10794 return
10795 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
10797 else if (align > 0)
10799 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10800 return build_call_expr (t, 2, size, size_int (align));
10802 else
10804 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
10805 return build_call_expr (t, 1, size);
10809 /* The built-in decl to use to mark code points believed to be unreachable.
10810 Typically __builtin_unreachable, but __builtin_trap if
10811 -fsanitize=unreachable -fsanitize-trap=unreachable. If only
10812 -fsanitize=unreachable, we rely on sanopt to replace calls with the
10813 appropriate ubsan function. When building a call directly, use
10814 {gimple_},build_builtin_unreachable instead. */
10816 tree
10817 builtin_decl_unreachable ()
10819 enum built_in_function fncode = BUILT_IN_UNREACHABLE;
10821 if (sanitize_flags_p (SANITIZE_UNREACHABLE)
10822 ? (flag_sanitize_trap & SANITIZE_UNREACHABLE)
10823 : flag_unreachable_traps)
10824 fncode = BUILT_IN_TRAP;
10825 /* For non-trapping sanitize, we will rewrite __builtin_unreachable () later,
10826 in the sanopt pass. */
10828 return builtin_decl_explicit (fncode);
10831 /* Build a call to __builtin_unreachable, possibly rewritten by
10832 -fsanitize=unreachable. Use this rather than the above when practical. */
10834 tree
10835 build_builtin_unreachable (location_t loc)
10837 tree data = NULL_TREE;
10838 tree fn = sanitize_unreachable_fn (&data, loc);
10839 return build_call_expr_loc (loc, fn, data != NULL_TREE, data);
10842 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
10843 if SIZE == -1) and return a tree node representing char* pointer to
10844 it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). When STR is nonnull
10845 the STRING_CST value is the LEN bytes at STR (the representation
10846 of the string, which may be wide). Otherwise it's all zeros. */
10848 tree
10849 build_string_literal (unsigned len, const char *str /* = NULL */,
10850 tree eltype /* = char_type_node */,
10851 unsigned HOST_WIDE_INT size /* = -1 */)
10853 tree t = build_string (len, str);
10854 /* Set the maximum valid index based on the string length or SIZE. */
10855 unsigned HOST_WIDE_INT maxidx
10856 = (size == HOST_WIDE_INT_M1U ? len : size) - 1;
10858 tree index = build_index_type (size_int (maxidx));
10859 eltype = build_type_variant (eltype, 1, 0);
10860 tree type = build_array_type (eltype, index);
10861 TREE_TYPE (t) = type;
10862 TREE_CONSTANT (t) = 1;
10863 TREE_READONLY (t) = 1;
10864 TREE_STATIC (t) = 1;
10866 type = build_pointer_type (eltype);
10867 t = build1 (ADDR_EXPR, type,
10868 build4 (ARRAY_REF, eltype,
10869 t, integer_zero_node, NULL_TREE, NULL_TREE));
10870 return t;
10875 /* Return true if T (assumed to be a DECL) must be assigned a memory
10876 location. */
10878 bool
10879 needs_to_live_in_memory (const_tree t)
10881 return (TREE_ADDRESSABLE (t)
10882 || is_global_var (t)
10883 || (TREE_CODE (t) == RESULT_DECL
10884 && !DECL_BY_REFERENCE (t)
10885 && aggregate_value_p (t, current_function_decl)));
10888 /* Return value of a constant X and sign-extend it. */
10890 HOST_WIDE_INT
10891 int_cst_value (const_tree x)
10893 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10894 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10896 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10897 gcc_assert (cst_and_fits_in_hwi (x));
10899 if (bits < HOST_BITS_PER_WIDE_INT)
10901 bool negative = ((val >> (bits - 1)) & 1) != 0;
10902 if (negative)
10903 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
10904 else
10905 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
10908 return val;
10911 /* If TYPE is an integral or pointer type, return an integer type with
10912 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10913 if TYPE is already an integer type of signedness UNSIGNEDP.
10914 If TYPE is a floating-point type, return an integer type with the same
10915 bitsize and with the signedness given by UNSIGNEDP; this is useful
10916 when doing bit-level operations on a floating-point value. */
10918 tree
10919 signed_or_unsigned_type_for (int unsignedp, tree type)
10921 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
10922 return type;
10924 if (TREE_CODE (type) == VECTOR_TYPE)
10926 tree inner = TREE_TYPE (type);
10927 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10928 if (!inner2)
10929 return NULL_TREE;
10930 if (inner == inner2)
10931 return type;
10932 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10935 if (TREE_CODE (type) == COMPLEX_TYPE)
10937 tree inner = TREE_TYPE (type);
10938 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10939 if (!inner2)
10940 return NULL_TREE;
10941 if (inner == inner2)
10942 return type;
10943 return build_complex_type (inner2);
10946 unsigned int bits;
10947 if (INTEGRAL_TYPE_P (type)
10948 || POINTER_TYPE_P (type)
10949 || TREE_CODE (type) == OFFSET_TYPE)
10950 bits = TYPE_PRECISION (type);
10951 else if (TREE_CODE (type) == REAL_TYPE)
10952 bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type));
10953 else
10954 return NULL_TREE;
10956 return build_nonstandard_integer_type (bits, unsignedp);
10959 /* If TYPE is an integral or pointer type, return an integer type with
10960 the same precision which is unsigned, or itself if TYPE is already an
10961 unsigned integer type. If TYPE is a floating-point type, return an
10962 unsigned integer type with the same bitsize as TYPE. */
10964 tree
10965 unsigned_type_for (tree type)
10967 return signed_or_unsigned_type_for (1, type);
10970 /* If TYPE is an integral or pointer type, return an integer type with
10971 the same precision which is signed, or itself if TYPE is already a
10972 signed integer type. If TYPE is a floating-point type, return a
10973 signed integer type with the same bitsize as TYPE. */
10975 tree
10976 signed_type_for (tree type)
10978 return signed_or_unsigned_type_for (0, type);
10981 /* - For VECTOR_TYPEs:
10982 - The truth type must be a VECTOR_BOOLEAN_TYPE.
10983 - The number of elements must match (known_eq).
10984 - targetm.vectorize.get_mask_mode exists, and exactly
10985 the same mode as the truth type.
10986 - Otherwise, the truth type must be a BOOLEAN_TYPE
10987 or useless_type_conversion_p to BOOLEAN_TYPE. */
10988 bool
10989 is_truth_type_for (tree type, tree truth_type)
10991 machine_mode mask_mode = TYPE_MODE (truth_type);
10992 machine_mode vmode = TYPE_MODE (type);
10993 machine_mode tmask_mode;
10995 if (TREE_CODE (type) == VECTOR_TYPE)
10997 if (VECTOR_BOOLEAN_TYPE_P (truth_type)
10998 && known_eq (TYPE_VECTOR_SUBPARTS (type),
10999 TYPE_VECTOR_SUBPARTS (truth_type))
11000 && targetm.vectorize.get_mask_mode (vmode).exists (&tmask_mode)
11001 && tmask_mode == mask_mode)
11002 return true;
11004 return false;
11007 return useless_type_conversion_p (boolean_type_node, truth_type);
11010 /* If TYPE is a vector type, return a signed integer vector type with the
11011 same width and number of subparts. Otherwise return boolean_type_node. */
11013 tree
11014 truth_type_for (tree type)
11016 if (TREE_CODE (type) == VECTOR_TYPE)
11018 if (VECTOR_BOOLEAN_TYPE_P (type))
11019 return type;
11020 return build_truth_vector_type_for (type);
11022 else
11023 return boolean_type_node;
11026 /* Returns the largest value obtainable by casting something in INNER type to
11027 OUTER type. */
11029 tree
11030 upper_bound_in_type (tree outer, tree inner)
11032 unsigned int det = 0;
11033 unsigned oprec = TYPE_PRECISION (outer);
11034 unsigned iprec = TYPE_PRECISION (inner);
11035 unsigned prec;
11037 /* Compute a unique number for every combination. */
11038 det |= (oprec > iprec) ? 4 : 0;
11039 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11040 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11042 /* Determine the exponent to use. */
11043 switch (det)
11045 case 0:
11046 case 1:
11047 /* oprec <= iprec, outer: signed, inner: don't care. */
11048 prec = oprec - 1;
11049 break;
11050 case 2:
11051 case 3:
11052 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11053 prec = oprec;
11054 break;
11055 case 4:
11056 /* oprec > iprec, outer: signed, inner: signed. */
11057 prec = iprec - 1;
11058 break;
11059 case 5:
11060 /* oprec > iprec, outer: signed, inner: unsigned. */
11061 prec = iprec;
11062 break;
11063 case 6:
11064 /* oprec > iprec, outer: unsigned, inner: signed. */
11065 prec = oprec;
11066 break;
11067 case 7:
11068 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11069 prec = iprec;
11070 break;
11071 default:
11072 gcc_unreachable ();
11075 return wide_int_to_tree (outer,
11076 wi::mask (prec, false, TYPE_PRECISION (outer)));
11079 /* Returns the smallest value obtainable by casting something in INNER type to
11080 OUTER type. */
11082 tree
11083 lower_bound_in_type (tree outer, tree inner)
11085 unsigned oprec = TYPE_PRECISION (outer);
11086 unsigned iprec = TYPE_PRECISION (inner);
11088 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11089 and obtain 0. */
11090 if (TYPE_UNSIGNED (outer)
11091 /* If we are widening something of an unsigned type, OUTER type
11092 contains all values of INNER type. In particular, both INNER
11093 and OUTER types have zero in common. */
11094 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11095 return build_int_cst (outer, 0);
11096 else
11098 /* If we are widening a signed type to another signed type, we
11099 want to obtain -2^^(iprec-1). If we are keeping the
11100 precision or narrowing to a signed type, we want to obtain
11101 -2^(oprec-1). */
11102 unsigned prec = oprec > iprec ? iprec : oprec;
11103 return wide_int_to_tree (outer,
11104 wi::mask (prec - 1, true,
11105 TYPE_PRECISION (outer)));
11109 /* Return nonzero if two operands that are suitable for PHI nodes are
11110 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11111 SSA_NAME or invariant. Note that this is strictly an optimization.
11112 That is, callers of this function can directly call operand_equal_p
11113 and get the same result, only slower. */
11116 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11118 if (arg0 == arg1)
11119 return 1;
11120 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11121 return 0;
11122 return operand_equal_p (arg0, arg1, 0);
11125 /* Returns number of zeros at the end of binary representation of X. */
11127 tree
11128 num_ending_zeros (const_tree x)
11130 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
11134 #define WALK_SUBTREE(NODE) \
11135 do \
11137 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11138 if (result) \
11139 return result; \
11141 while (0)
11143 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11144 be walked whenever a type is seen in the tree. Rest of operands and return
11145 value are as for walk_tree. */
11147 static tree
11148 walk_type_fields (tree type, walk_tree_fn func, void *data,
11149 hash_set<tree> *pset, walk_tree_lh lh)
11151 tree result = NULL_TREE;
11153 switch (TREE_CODE (type))
11155 case POINTER_TYPE:
11156 case REFERENCE_TYPE:
11157 case VECTOR_TYPE:
11158 /* We have to worry about mutually recursive pointers. These can't
11159 be written in C. They can in Ada. It's pathological, but
11160 there's an ACATS test (c38102a) that checks it. Deal with this
11161 by checking if we're pointing to another pointer, that one
11162 points to another pointer, that one does too, and we have no htab.
11163 If so, get a hash table. We check three levels deep to avoid
11164 the cost of the hash table if we don't need one. */
11165 if (POINTER_TYPE_P (TREE_TYPE (type))
11166 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11167 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11168 && !pset)
11170 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11171 func, data);
11172 if (result)
11173 return result;
11175 break;
11178 /* fall through */
11180 case COMPLEX_TYPE:
11181 WALK_SUBTREE (TREE_TYPE (type));
11182 break;
11184 case METHOD_TYPE:
11185 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11187 /* Fall through. */
11189 case FUNCTION_TYPE:
11190 WALK_SUBTREE (TREE_TYPE (type));
11192 tree arg;
11194 /* We never want to walk into default arguments. */
11195 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11196 WALK_SUBTREE (TREE_VALUE (arg));
11198 break;
11200 case ARRAY_TYPE:
11201 /* Don't follow this nodes's type if a pointer for fear that
11202 we'll have infinite recursion. If we have a PSET, then we
11203 need not fear. */
11204 if (pset
11205 || (!POINTER_TYPE_P (TREE_TYPE (type))
11206 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11207 WALK_SUBTREE (TREE_TYPE (type));
11208 WALK_SUBTREE (TYPE_DOMAIN (type));
11209 break;
11211 case OFFSET_TYPE:
11212 WALK_SUBTREE (TREE_TYPE (type));
11213 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11214 break;
11216 default:
11217 break;
11220 return NULL_TREE;
11223 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11224 called with the DATA and the address of each sub-tree. If FUNC returns a
11225 non-NULL value, the traversal is stopped, and the value returned by FUNC
11226 is returned. If PSET is non-NULL it is used to record the nodes visited,
11227 and to avoid visiting a node more than once. */
11229 tree
11230 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11231 hash_set<tree> *pset, walk_tree_lh lh)
11233 enum tree_code code;
11234 int walk_subtrees;
11235 tree result;
11237 #define WALK_SUBTREE_TAIL(NODE) \
11238 do \
11240 tp = & (NODE); \
11241 goto tail_recurse; \
11243 while (0)
11245 tail_recurse:
11246 /* Skip empty subtrees. */
11247 if (!*tp)
11248 return NULL_TREE;
11250 /* Don't walk the same tree twice, if the user has requested
11251 that we avoid doing so. */
11252 if (pset && pset->add (*tp))
11253 return NULL_TREE;
11255 /* Call the function. */
11256 walk_subtrees = 1;
11257 result = (*func) (tp, &walk_subtrees, data);
11259 /* If we found something, return it. */
11260 if (result)
11261 return result;
11263 code = TREE_CODE (*tp);
11265 /* Even if we didn't, FUNC may have decided that there was nothing
11266 interesting below this point in the tree. */
11267 if (!walk_subtrees)
11269 /* But we still need to check our siblings. */
11270 if (code == TREE_LIST)
11271 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11272 else if (code == OMP_CLAUSE)
11273 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11274 else
11275 return NULL_TREE;
11278 if (lh)
11280 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11281 if (result || !walk_subtrees)
11282 return result;
11285 switch (code)
11287 case ERROR_MARK:
11288 case IDENTIFIER_NODE:
11289 case INTEGER_CST:
11290 case REAL_CST:
11291 case FIXED_CST:
11292 case STRING_CST:
11293 case BLOCK:
11294 case PLACEHOLDER_EXPR:
11295 case SSA_NAME:
11296 case FIELD_DECL:
11297 case RESULT_DECL:
11298 /* None of these have subtrees other than those already walked
11299 above. */
11300 break;
11302 case TREE_LIST:
11303 WALK_SUBTREE (TREE_VALUE (*tp));
11304 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11306 case TREE_VEC:
11308 int len = TREE_VEC_LENGTH (*tp);
11310 if (len == 0)
11311 break;
11313 /* Walk all elements but the first. */
11314 while (--len)
11315 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11317 /* Now walk the first one as a tail call. */
11318 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11321 case VECTOR_CST:
11323 unsigned len = vector_cst_encoded_nelts (*tp);
11324 if (len == 0)
11325 break;
11326 /* Walk all elements but the first. */
11327 while (--len)
11328 WALK_SUBTREE (VECTOR_CST_ENCODED_ELT (*tp, len));
11329 /* Now walk the first one as a tail call. */
11330 WALK_SUBTREE_TAIL (VECTOR_CST_ENCODED_ELT (*tp, 0));
11333 case COMPLEX_CST:
11334 WALK_SUBTREE (TREE_REALPART (*tp));
11335 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11337 case CONSTRUCTOR:
11339 unsigned HOST_WIDE_INT idx;
11340 constructor_elt *ce;
11342 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11343 idx++)
11344 WALK_SUBTREE (ce->value);
11346 break;
11348 case SAVE_EXPR:
11349 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11351 case BIND_EXPR:
11353 tree decl;
11354 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11356 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11357 into declarations that are just mentioned, rather than
11358 declared; they don't really belong to this part of the tree.
11359 And, we can see cycles: the initializer for a declaration
11360 can refer to the declaration itself. */
11361 WALK_SUBTREE (DECL_INITIAL (decl));
11362 WALK_SUBTREE (DECL_SIZE (decl));
11363 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11365 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11368 case STATEMENT_LIST:
11370 tree_stmt_iterator i;
11371 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11372 WALK_SUBTREE (*tsi_stmt_ptr (i));
11374 break;
11376 case OMP_CLAUSE:
11378 int len = omp_clause_num_ops[OMP_CLAUSE_CODE (*tp)];
11379 for (int i = 0; i < len; i++)
11380 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11381 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11384 case TARGET_EXPR:
11386 int i, len;
11388 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11389 But, we only want to walk once. */
11390 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11391 for (i = 0; i < len; ++i)
11392 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11393 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11396 case DECL_EXPR:
11397 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11398 defining. We only want to walk into these fields of a type in this
11399 case and not in the general case of a mere reference to the type.
11401 The criterion is as follows: if the field can be an expression, it
11402 must be walked only here. This should be in keeping with the fields
11403 that are directly gimplified in gimplify_type_sizes in order for the
11404 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11405 variable-sized types.
11407 Note that DECLs get walked as part of processing the BIND_EXPR. */
11408 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11410 /* Call the function for the decl so e.g. copy_tree_body_r can
11411 replace it with the remapped one. */
11412 result = (*func) (&DECL_EXPR_DECL (*tp), &walk_subtrees, data);
11413 if (result || !walk_subtrees)
11414 return result;
11416 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11417 if (TREE_CODE (*type_p) == ERROR_MARK)
11418 return NULL_TREE;
11420 /* Call the function for the type. See if it returns anything or
11421 doesn't want us to continue. If we are to continue, walk both
11422 the normal fields and those for the declaration case. */
11423 result = (*func) (type_p, &walk_subtrees, data);
11424 if (result || !walk_subtrees)
11425 return result;
11427 /* But do not walk a pointed-to type since it may itself need to
11428 be walked in the declaration case if it isn't anonymous. */
11429 if (!POINTER_TYPE_P (*type_p))
11431 result = walk_type_fields (*type_p, func, data, pset, lh);
11432 if (result)
11433 return result;
11436 /* If this is a record type, also walk the fields. */
11437 if (RECORD_OR_UNION_TYPE_P (*type_p))
11439 tree field;
11441 for (field = TYPE_FIELDS (*type_p); field;
11442 field = DECL_CHAIN (field))
11444 /* We'd like to look at the type of the field, but we can
11445 easily get infinite recursion. So assume it's pointed
11446 to elsewhere in the tree. Also, ignore things that
11447 aren't fields. */
11448 if (TREE_CODE (field) != FIELD_DECL)
11449 continue;
11451 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11452 WALK_SUBTREE (DECL_SIZE (field));
11453 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11454 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11455 WALK_SUBTREE (DECL_QUALIFIER (field));
11459 /* Same for scalar types. */
11460 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11461 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11462 || TREE_CODE (*type_p) == INTEGER_TYPE
11463 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11464 || TREE_CODE (*type_p) == REAL_TYPE)
11466 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11467 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11470 WALK_SUBTREE (TYPE_SIZE (*type_p));
11471 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11473 /* FALLTHRU */
11475 default:
11476 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11478 int i, len;
11480 /* Walk over all the sub-trees of this operand. */
11481 len = TREE_OPERAND_LENGTH (*tp);
11483 /* Go through the subtrees. We need to do this in forward order so
11484 that the scope of a FOR_EXPR is handled properly. */
11485 if (len)
11487 for (i = 0; i < len - 1; ++i)
11488 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11489 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11492 /* If this is a type, walk the needed fields in the type. */
11493 else if (TYPE_P (*tp))
11494 return walk_type_fields (*tp, func, data, pset, lh);
11495 break;
11498 /* We didn't find what we were looking for. */
11499 return NULL_TREE;
11501 #undef WALK_SUBTREE_TAIL
11503 #undef WALK_SUBTREE
11505 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11507 tree
11508 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11509 walk_tree_lh lh)
11511 tree result;
11513 hash_set<tree> pset;
11514 result = walk_tree_1 (tp, func, data, &pset, lh);
11515 return result;
11519 tree
11520 tree_block (tree t)
11522 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11524 if (IS_EXPR_CODE_CLASS (c))
11525 return LOCATION_BLOCK (t->exp.locus);
11526 gcc_unreachable ();
11527 return NULL;
11530 void
11531 tree_set_block (tree t, tree b)
11533 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11535 if (IS_EXPR_CODE_CLASS (c))
11537 t->exp.locus = set_block (t->exp.locus, b);
11539 else
11540 gcc_unreachable ();
11543 /* Create a nameless artificial label and put it in the current
11544 function context. The label has a location of LOC. Returns the
11545 newly created label. */
11547 tree
11548 create_artificial_label (location_t loc)
11550 tree lab = build_decl (loc,
11551 LABEL_DECL, NULL_TREE, void_type_node);
11553 DECL_ARTIFICIAL (lab) = 1;
11554 DECL_IGNORED_P (lab) = 1;
11555 DECL_CONTEXT (lab) = current_function_decl;
11556 return lab;
11559 /* Given a tree, try to return a useful variable name that we can use
11560 to prefix a temporary that is being assigned the value of the tree.
11561 I.E. given <temp> = &A, return A. */
11563 const char *
11564 get_name (tree t)
11566 tree stripped_decl;
11568 stripped_decl = t;
11569 STRIP_NOPS (stripped_decl);
11570 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11571 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11572 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11574 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11575 if (!name)
11576 return NULL;
11577 return IDENTIFIER_POINTER (name);
11579 else
11581 switch (TREE_CODE (stripped_decl))
11583 case ADDR_EXPR:
11584 return get_name (TREE_OPERAND (stripped_decl, 0));
11585 default:
11586 return NULL;
11591 /* Return true if TYPE has a variable argument list. */
11593 bool
11594 stdarg_p (const_tree fntype)
11596 function_args_iterator args_iter;
11597 tree n = NULL_TREE, t;
11599 if (!fntype)
11600 return false;
11602 if (TYPE_NO_NAMED_ARGS_STDARG_P (fntype))
11603 return true;
11605 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11607 n = t;
11610 return n != NULL_TREE && n != void_type_node;
11613 /* Return true if TYPE has a prototype. */
11615 bool
11616 prototype_p (const_tree fntype)
11618 tree t;
11620 gcc_assert (fntype != NULL_TREE);
11622 if (TYPE_NO_NAMED_ARGS_STDARG_P (fntype))
11623 return true;
11625 t = TYPE_ARG_TYPES (fntype);
11626 return (t != NULL_TREE);
11629 /* If BLOCK is inlined from an __attribute__((__artificial__))
11630 routine, return pointer to location from where it has been
11631 called. */
11632 location_t *
11633 block_nonartificial_location (tree block)
11635 location_t *ret = NULL;
11637 while (block && TREE_CODE (block) == BLOCK
11638 && BLOCK_ABSTRACT_ORIGIN (block))
11640 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11641 if (TREE_CODE (ao) == FUNCTION_DECL)
11643 /* If AO is an artificial inline, point RET to the
11644 call site locus at which it has been inlined and continue
11645 the loop, in case AO's caller is also an artificial
11646 inline. */
11647 if (DECL_DECLARED_INLINE_P (ao)
11648 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11649 ret = &BLOCK_SOURCE_LOCATION (block);
11650 else
11651 break;
11653 else if (TREE_CODE (ao) != BLOCK)
11654 break;
11656 block = BLOCK_SUPERCONTEXT (block);
11658 return ret;
11662 /* If EXP is inlined from an __attribute__((__artificial__))
11663 function, return the location of the original call expression. */
11665 location_t
11666 tree_nonartificial_location (tree exp)
11668 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11670 if (loc)
11671 return *loc;
11672 else
11673 return EXPR_LOCATION (exp);
11676 /* Return the location into which EXP has been inlined. Analogous
11677 to tree_nonartificial_location() above but not limited to artificial
11678 functions declared inline. If SYSTEM_HEADER is true, return
11679 the macro expansion point of the location if it's in a system header */
11681 location_t
11682 tree_inlined_location (tree exp, bool system_header /* = true */)
11684 location_t loc = UNKNOWN_LOCATION;
11686 tree block = TREE_BLOCK (exp);
11688 while (block && TREE_CODE (block) == BLOCK
11689 && BLOCK_ABSTRACT_ORIGIN (block))
11691 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11692 if (TREE_CODE (ao) == FUNCTION_DECL)
11693 loc = BLOCK_SOURCE_LOCATION (block);
11694 else if (TREE_CODE (ao) != BLOCK)
11695 break;
11697 block = BLOCK_SUPERCONTEXT (block);
11700 if (loc == UNKNOWN_LOCATION)
11702 loc = EXPR_LOCATION (exp);
11703 if (system_header)
11704 /* Only consider macro expansion when the block traversal failed
11705 to find a location. Otherwise it's not relevant. */
11706 return expansion_point_location_if_in_system_header (loc);
11709 return loc;
11712 /* These are the hash table functions for the hash table of OPTIMIZATION_NODE
11713 nodes. */
11715 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11717 hashval_t
11718 cl_option_hasher::hash (tree x)
11720 const_tree const t = x;
11722 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11723 return cl_optimization_hash (TREE_OPTIMIZATION (t));
11724 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11725 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11726 else
11727 gcc_unreachable ();
11730 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11731 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11732 same. */
11734 bool
11735 cl_option_hasher::equal (tree x, tree y)
11737 const_tree const xt = x;
11738 const_tree const yt = y;
11740 if (TREE_CODE (xt) != TREE_CODE (yt))
11741 return 0;
11743 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11744 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
11745 TREE_OPTIMIZATION (yt));
11746 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11747 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11748 TREE_TARGET_OPTION (yt));
11749 else
11750 gcc_unreachable ();
11753 /* Build an OPTIMIZATION_NODE based on the options in OPTS and OPTS_SET. */
11755 tree
11756 build_optimization_node (struct gcc_options *opts,
11757 struct gcc_options *opts_set)
11759 tree t;
11761 /* Use the cache of optimization nodes. */
11763 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11764 opts, opts_set);
11766 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11767 t = *slot;
11768 if (!t)
11770 /* Insert this one into the hash table. */
11771 t = cl_optimization_node;
11772 *slot = t;
11774 /* Make a new node for next time round. */
11775 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11778 return t;
11781 /* Build a TARGET_OPTION_NODE based on the options in OPTS and OPTS_SET. */
11783 tree
11784 build_target_option_node (struct gcc_options *opts,
11785 struct gcc_options *opts_set)
11787 tree t;
11789 /* Use the cache of optimization nodes. */
11791 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11792 opts, opts_set);
11794 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11795 t = *slot;
11796 if (!t)
11798 /* Insert this one into the hash table. */
11799 t = cl_target_option_node;
11800 *slot = t;
11802 /* Make a new node for next time round. */
11803 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11806 return t;
11809 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11810 so that they aren't saved during PCH writing. */
11812 void
11813 prepare_target_option_nodes_for_pch (void)
11815 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11816 for (; iter != cl_option_hash_table->end (); ++iter)
11817 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11818 TREE_TARGET_GLOBALS (*iter) = NULL;
11821 /* Determine the "ultimate origin" of a block. */
11823 tree
11824 block_ultimate_origin (const_tree block)
11826 tree origin = BLOCK_ABSTRACT_ORIGIN (block);
11828 if (origin == NULL_TREE)
11829 return NULL_TREE;
11830 else
11832 gcc_checking_assert ((DECL_P (origin)
11833 && DECL_ORIGIN (origin) == origin)
11834 || BLOCK_ORIGIN (origin) == origin);
11835 return origin;
11839 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11840 no instruction. */
11842 bool
11843 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11845 /* Do not strip casts into or out of differing address spaces. */
11846 if (POINTER_TYPE_P (outer_type)
11847 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
11849 if (!POINTER_TYPE_P (inner_type)
11850 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
11851 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
11852 return false;
11854 else if (POINTER_TYPE_P (inner_type)
11855 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
11857 /* We already know that outer_type is not a pointer with
11858 a non-generic address space. */
11859 return false;
11862 /* Use precision rather then machine mode when we can, which gives
11863 the correct answer even for submode (bit-field) types. */
11864 if ((INTEGRAL_TYPE_P (outer_type)
11865 || POINTER_TYPE_P (outer_type)
11866 || TREE_CODE (outer_type) == OFFSET_TYPE)
11867 && (INTEGRAL_TYPE_P (inner_type)
11868 || POINTER_TYPE_P (inner_type)
11869 || TREE_CODE (inner_type) == OFFSET_TYPE))
11870 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11872 /* Otherwise fall back on comparing machine modes (e.g. for
11873 aggregate types, floats). */
11874 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11877 /* Return true iff conversion in EXP generates no instruction. Mark
11878 it inline so that we fully inline into the stripping functions even
11879 though we have two uses of this function. */
11881 static inline bool
11882 tree_nop_conversion (const_tree exp)
11884 tree outer_type, inner_type;
11886 if (location_wrapper_p (exp))
11887 return true;
11888 if (!CONVERT_EXPR_P (exp)
11889 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11890 return false;
11892 outer_type = TREE_TYPE (exp);
11893 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11894 if (!inner_type || inner_type == error_mark_node)
11895 return false;
11897 return tree_nop_conversion_p (outer_type, inner_type);
11900 /* Return true iff conversion in EXP generates no instruction. Don't
11901 consider conversions changing the signedness. */
11903 static bool
11904 tree_sign_nop_conversion (const_tree exp)
11906 tree outer_type, inner_type;
11908 if (!tree_nop_conversion (exp))
11909 return false;
11911 outer_type = TREE_TYPE (exp);
11912 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11914 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11915 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11918 /* Strip conversions from EXP according to tree_nop_conversion and
11919 return the resulting expression. */
11921 tree
11922 tree_strip_nop_conversions (tree exp)
11924 while (tree_nop_conversion (exp))
11925 exp = TREE_OPERAND (exp, 0);
11926 return exp;
11929 /* Strip conversions from EXP according to tree_sign_nop_conversion
11930 and return the resulting expression. */
11932 tree
11933 tree_strip_sign_nop_conversions (tree exp)
11935 while (tree_sign_nop_conversion (exp))
11936 exp = TREE_OPERAND (exp, 0);
11937 return exp;
11940 /* Avoid any floating point extensions from EXP. */
11941 tree
11942 strip_float_extensions (tree exp)
11944 tree sub, expt, subt;
11946 /* For floating point constant look up the narrowest type that can hold
11947 it properly and handle it like (type)(narrowest_type)constant.
11948 This way we can optimize for instance a=a*2.0 where "a" is float
11949 but 2.0 is double constant. */
11950 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11952 REAL_VALUE_TYPE orig;
11953 tree type = NULL;
11955 orig = TREE_REAL_CST (exp);
11956 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11957 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11958 type = float_type_node;
11959 else if (TYPE_PRECISION (TREE_TYPE (exp))
11960 > TYPE_PRECISION (double_type_node)
11961 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11962 type = double_type_node;
11963 if (type)
11964 return build_real_truncate (type, orig);
11967 if (!CONVERT_EXPR_P (exp))
11968 return exp;
11970 sub = TREE_OPERAND (exp, 0);
11971 subt = TREE_TYPE (sub);
11972 expt = TREE_TYPE (exp);
11974 if (!FLOAT_TYPE_P (subt))
11975 return exp;
11977 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11978 return exp;
11980 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11981 return exp;
11983 return strip_float_extensions (sub);
11986 /* Strip out all handled components that produce invariant
11987 offsets. */
11989 const_tree
11990 strip_invariant_refs (const_tree op)
11992 while (handled_component_p (op))
11994 switch (TREE_CODE (op))
11996 case ARRAY_REF:
11997 case ARRAY_RANGE_REF:
11998 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11999 || TREE_OPERAND (op, 2) != NULL_TREE
12000 || TREE_OPERAND (op, 3) != NULL_TREE)
12001 return NULL;
12002 break;
12004 case COMPONENT_REF:
12005 if (TREE_OPERAND (op, 2) != NULL_TREE)
12006 return NULL;
12007 break;
12009 default:;
12011 op = TREE_OPERAND (op, 0);
12014 return op;
12017 /* Strip handled components with zero offset from OP. */
12019 tree
12020 strip_zero_offset_components (tree op)
12022 while (TREE_CODE (op) == COMPONENT_REF
12023 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (op, 1)))
12024 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (op, 1))))
12025 op = TREE_OPERAND (op, 0);
12026 return op;
12029 static GTY(()) tree gcc_eh_personality_decl;
12031 /* Return the GCC personality function decl. */
12033 tree
12034 lhd_gcc_personality (void)
12036 if (!gcc_eh_personality_decl)
12037 gcc_eh_personality_decl = build_personality_function ("gcc");
12038 return gcc_eh_personality_decl;
12041 /* TARGET is a call target of GIMPLE call statement
12042 (obtained by gimple_call_fn). Return true if it is
12043 OBJ_TYPE_REF representing an virtual call of C++ method.
12044 (As opposed to OBJ_TYPE_REF representing objc calls
12045 through a cast where middle-end devirtualization machinery
12046 can't apply.) FOR_DUMP_P is true when being called from
12047 the dump routines. */
12049 bool
12050 virtual_method_call_p (const_tree target, bool for_dump_p)
12052 if (TREE_CODE (target) != OBJ_TYPE_REF)
12053 return false;
12054 tree t = TREE_TYPE (target);
12055 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12056 t = TREE_TYPE (t);
12057 if (TREE_CODE (t) == FUNCTION_TYPE)
12058 return false;
12059 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12060 /* If we do not have BINFO associated, it means that type was built
12061 without devirtualization enabled. Do not consider this a virtual
12062 call. */
12063 if (!TYPE_BINFO (obj_type_ref_class (target, for_dump_p)))
12064 return false;
12065 return true;
12068 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12070 static tree
12071 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12073 unsigned int i;
12074 tree base_binfo, b;
12076 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12077 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12078 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12079 return base_binfo;
12080 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12081 return b;
12082 return NULL;
12085 /* Try to find a base info of BINFO that would have its field decl at offset
12086 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12087 found, return, otherwise return NULL_TREE. */
12089 tree
12090 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
12092 tree type = BINFO_TYPE (binfo);
12094 while (true)
12096 HOST_WIDE_INT pos, size;
12097 tree fld;
12098 int i;
12100 if (types_same_for_odr (type, expected_type))
12101 return binfo;
12102 if (maybe_lt (offset, 0))
12103 return NULL_TREE;
12105 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12107 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12108 continue;
12110 pos = int_bit_position (fld);
12111 size = tree_to_uhwi (DECL_SIZE (fld));
12112 if (known_in_range_p (offset, pos, size))
12113 break;
12115 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12116 return NULL_TREE;
12118 /* Offset 0 indicates the primary base, whose vtable contents are
12119 represented in the binfo for the derived class. */
12120 else if (maybe_ne (offset, 0))
12122 tree found_binfo = NULL, base_binfo;
12123 /* Offsets in BINFO are in bytes relative to the whole structure
12124 while POS is in bits relative to the containing field. */
12125 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12126 / BITS_PER_UNIT);
12128 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12129 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12130 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12132 found_binfo = base_binfo;
12133 break;
12135 if (found_binfo)
12136 binfo = found_binfo;
12137 else
12138 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12139 binfo_offset);
12142 type = TREE_TYPE (fld);
12143 offset -= pos;
12147 /* Returns true if X is a typedef decl. */
12149 bool
12150 is_typedef_decl (const_tree x)
12152 return (x && TREE_CODE (x) == TYPE_DECL
12153 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12156 /* Returns true iff TYPE is a type variant created for a typedef. */
12158 bool
12159 typedef_variant_p (const_tree type)
12161 return is_typedef_decl (TYPE_NAME (type));
12164 /* PR 84195: Replace control characters in "unescaped" with their
12165 escaped equivalents. Allow newlines if -fmessage-length has
12166 been set to a non-zero value. This is done here, rather than
12167 where the attribute is recorded as the message length can
12168 change between these two locations. */
12170 void
12171 escaped_string::escape (const char *unescaped)
12173 char *escaped;
12174 size_t i, new_i, len;
12176 if (m_owned)
12177 free (m_str);
12179 m_str = const_cast<char *> (unescaped);
12180 m_owned = false;
12182 if (unescaped == NULL || *unescaped == 0)
12183 return;
12185 len = strlen (unescaped);
12186 escaped = NULL;
12187 new_i = 0;
12189 for (i = 0; i < len; i++)
12191 char c = unescaped[i];
12193 if (!ISCNTRL (c))
12195 if (escaped)
12196 escaped[new_i++] = c;
12197 continue;
12200 if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
12202 if (escaped == NULL)
12204 /* We only allocate space for a new string if we
12205 actually encounter a control character that
12206 needs replacing. */
12207 escaped = (char *) xmalloc (len * 2 + 1);
12208 strncpy (escaped, unescaped, i);
12209 new_i = i;
12212 escaped[new_i++] = '\\';
12214 switch (c)
12216 case '\a': escaped[new_i++] = 'a'; break;
12217 case '\b': escaped[new_i++] = 'b'; break;
12218 case '\f': escaped[new_i++] = 'f'; break;
12219 case '\n': escaped[new_i++] = 'n'; break;
12220 case '\r': escaped[new_i++] = 'r'; break;
12221 case '\t': escaped[new_i++] = 't'; break;
12222 case '\v': escaped[new_i++] = 'v'; break;
12223 default: escaped[new_i++] = '?'; break;
12226 else if (escaped)
12227 escaped[new_i++] = c;
12230 if (escaped)
12232 escaped[new_i] = 0;
12233 m_str = escaped;
12234 m_owned = true;
12238 /* Warn about a use of an identifier which was marked deprecated. Returns
12239 whether a warning was given. */
12241 bool
12242 warn_deprecated_use (tree node, tree attr)
12244 escaped_string msg;
12246 if (node == 0 || !warn_deprecated_decl)
12247 return false;
12249 if (!attr)
12251 if (DECL_P (node))
12252 attr = DECL_ATTRIBUTES (node);
12253 else if (TYPE_P (node))
12255 tree decl = TYPE_STUB_DECL (node);
12256 if (decl)
12257 attr = TYPE_ATTRIBUTES (TREE_TYPE (decl));
12258 else if ((decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (node)))
12259 != NULL_TREE)
12261 node = TREE_TYPE (decl);
12262 attr = TYPE_ATTRIBUTES (node);
12267 if (attr)
12268 attr = lookup_attribute ("deprecated", attr);
12270 if (attr)
12271 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12273 bool w = false;
12274 if (DECL_P (node))
12276 auto_diagnostic_group d;
12277 if (msg)
12278 w = warning (OPT_Wdeprecated_declarations,
12279 "%qD is deprecated: %s", node, (const char *) msg);
12280 else
12281 w = warning (OPT_Wdeprecated_declarations,
12282 "%qD is deprecated", node);
12283 if (w)
12284 inform (DECL_SOURCE_LOCATION (node), "declared here");
12286 else if (TYPE_P (node))
12288 tree what = NULL_TREE;
12289 tree decl = TYPE_STUB_DECL (node);
12291 if (TYPE_NAME (node))
12293 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12294 what = TYPE_NAME (node);
12295 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12296 && DECL_NAME (TYPE_NAME (node)))
12297 what = DECL_NAME (TYPE_NAME (node));
12300 auto_diagnostic_group d;
12301 if (what)
12303 if (msg)
12304 w = warning (OPT_Wdeprecated_declarations,
12305 "%qE is deprecated: %s", what, (const char *) msg);
12306 else
12307 w = warning (OPT_Wdeprecated_declarations,
12308 "%qE is deprecated", what);
12310 else
12312 if (msg)
12313 w = warning (OPT_Wdeprecated_declarations,
12314 "type is deprecated: %s", (const char *) msg);
12315 else
12316 w = warning (OPT_Wdeprecated_declarations,
12317 "type is deprecated");
12320 if (w && decl)
12321 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12324 return w;
12327 /* Error out with an identifier which was marked 'unavailable'. */
12328 void
12329 error_unavailable_use (tree node, tree attr)
12331 escaped_string msg;
12333 if (node == 0)
12334 return;
12336 if (!attr)
12338 if (DECL_P (node))
12339 attr = DECL_ATTRIBUTES (node);
12340 else if (TYPE_P (node))
12342 tree decl = TYPE_STUB_DECL (node);
12343 if (decl)
12344 attr = lookup_attribute ("unavailable",
12345 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12349 if (attr)
12350 attr = lookup_attribute ("unavailable", attr);
12352 if (attr)
12353 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12355 if (DECL_P (node))
12357 auto_diagnostic_group d;
12358 if (msg)
12359 error ("%qD is unavailable: %s", node, (const char *) msg);
12360 else
12361 error ("%qD is unavailable", node);
12362 inform (DECL_SOURCE_LOCATION (node), "declared here");
12364 else if (TYPE_P (node))
12366 tree what = NULL_TREE;
12367 tree decl = TYPE_STUB_DECL (node);
12369 if (TYPE_NAME (node))
12371 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12372 what = TYPE_NAME (node);
12373 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12374 && DECL_NAME (TYPE_NAME (node)))
12375 what = DECL_NAME (TYPE_NAME (node));
12378 auto_diagnostic_group d;
12379 if (what)
12381 if (msg)
12382 error ("%qE is unavailable: %s", what, (const char *) msg);
12383 else
12384 error ("%qE is unavailable", what);
12386 else
12388 if (msg)
12389 error ("type is unavailable: %s", (const char *) msg);
12390 else
12391 error ("type is unavailable");
12394 if (decl)
12395 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12399 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12400 somewhere in it. */
12402 bool
12403 contains_bitfld_component_ref_p (const_tree ref)
12405 while (handled_component_p (ref))
12407 if (TREE_CODE (ref) == COMPONENT_REF
12408 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12409 return true;
12410 ref = TREE_OPERAND (ref, 0);
12413 return false;
12416 /* Try to determine whether a TRY_CATCH expression can fall through.
12417 This is a subroutine of block_may_fallthru. */
12419 static bool
12420 try_catch_may_fallthru (const_tree stmt)
12422 tree_stmt_iterator i;
12424 /* If the TRY block can fall through, the whole TRY_CATCH can
12425 fall through. */
12426 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12427 return true;
12429 i = tsi_start (TREE_OPERAND (stmt, 1));
12430 switch (TREE_CODE (tsi_stmt (i)))
12432 case CATCH_EXPR:
12433 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12434 catch expression and a body. The whole TRY_CATCH may fall
12435 through iff any of the catch bodies falls through. */
12436 for (; !tsi_end_p (i); tsi_next (&i))
12438 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12439 return true;
12441 return false;
12443 case EH_FILTER_EXPR:
12444 /* The exception filter expression only matters if there is an
12445 exception. If the exception does not match EH_FILTER_TYPES,
12446 we will execute EH_FILTER_FAILURE, and we will fall through
12447 if that falls through. If the exception does match
12448 EH_FILTER_TYPES, the stack unwinder will continue up the
12449 stack, so we will not fall through. We don't know whether we
12450 will throw an exception which matches EH_FILTER_TYPES or not,
12451 so we just ignore EH_FILTER_TYPES and assume that we might
12452 throw an exception which doesn't match. */
12453 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12455 default:
12456 /* This case represents statements to be executed when an
12457 exception occurs. Those statements are implicitly followed
12458 by a RESX statement to resume execution after the exception.
12459 So in this case the TRY_CATCH never falls through. */
12460 return false;
12464 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12465 need not be 100% accurate; simply be conservative and return true if we
12466 don't know. This is used only to avoid stupidly generating extra code.
12467 If we're wrong, we'll just delete the extra code later. */
12469 bool
12470 block_may_fallthru (const_tree block)
12472 /* This CONST_CAST is okay because expr_last returns its argument
12473 unmodified and we assign it to a const_tree. */
12474 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12476 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12478 case GOTO_EXPR:
12479 case RETURN_EXPR:
12480 /* Easy cases. If the last statement of the block implies
12481 control transfer, then we can't fall through. */
12482 return false;
12484 case SWITCH_EXPR:
12485 /* If there is a default: label or case labels cover all possible
12486 SWITCH_COND values, then the SWITCH_EXPR will transfer control
12487 to some case label in all cases and all we care is whether the
12488 SWITCH_BODY falls through. */
12489 if (SWITCH_ALL_CASES_P (stmt))
12490 return block_may_fallthru (SWITCH_BODY (stmt));
12491 return true;
12493 case COND_EXPR:
12494 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12495 return true;
12496 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12498 case BIND_EXPR:
12499 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12501 case TRY_CATCH_EXPR:
12502 return try_catch_may_fallthru (stmt);
12504 case TRY_FINALLY_EXPR:
12505 /* The finally clause is always executed after the try clause,
12506 so if it does not fall through, then the try-finally will not
12507 fall through. Otherwise, if the try clause does not fall
12508 through, then when the finally clause falls through it will
12509 resume execution wherever the try clause was going. So the
12510 whole try-finally will only fall through if both the try
12511 clause and the finally clause fall through. */
12512 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12513 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12515 case EH_ELSE_EXPR:
12516 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12518 case MODIFY_EXPR:
12519 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12520 stmt = TREE_OPERAND (stmt, 1);
12521 else
12522 return true;
12523 /* FALLTHRU */
12525 case CALL_EXPR:
12526 /* Functions that do not return do not fall through. */
12527 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12529 case CLEANUP_POINT_EXPR:
12530 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12532 case TARGET_EXPR:
12533 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12535 case ERROR_MARK:
12536 return true;
12538 default:
12539 return lang_hooks.block_may_fallthru (stmt);
12543 /* True if we are using EH to handle cleanups. */
12544 static bool using_eh_for_cleanups_flag = false;
12546 /* This routine is called from front ends to indicate eh should be used for
12547 cleanups. */
12548 void
12549 using_eh_for_cleanups (void)
12551 using_eh_for_cleanups_flag = true;
12554 /* Query whether EH is used for cleanups. */
12555 bool
12556 using_eh_for_cleanups_p (void)
12558 return using_eh_for_cleanups_flag;
12561 /* Wrapper for tree_code_name to ensure that tree code is valid */
12562 const char *
12563 get_tree_code_name (enum tree_code code)
12565 const char *invalid = "<invalid tree code>";
12567 /* The tree_code enum promotes to signed, but we could be getting
12568 invalid values, so force an unsigned comparison. */
12569 if (unsigned (code) >= MAX_TREE_CODES)
12571 if ((unsigned)code == 0xa5a5)
12572 return "ggc_freed";
12573 return invalid;
12576 return tree_code_name[code];
12579 /* Drops the TREE_OVERFLOW flag from T. */
12581 tree
12582 drop_tree_overflow (tree t)
12584 gcc_checking_assert (TREE_OVERFLOW (t));
12586 /* For tree codes with a sharing machinery re-build the result. */
12587 if (poly_int_tree_p (t))
12588 return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
12590 /* For VECTOR_CST, remove the overflow bits from the encoded elements
12591 and canonicalize the result. */
12592 if (TREE_CODE (t) == VECTOR_CST)
12594 tree_vector_builder builder;
12595 builder.new_unary_operation (TREE_TYPE (t), t, true);
12596 unsigned int count = builder.encoded_nelts ();
12597 for (unsigned int i = 0; i < count; ++i)
12599 tree elt = VECTOR_CST_ELT (t, i);
12600 if (TREE_OVERFLOW (elt))
12601 elt = drop_tree_overflow (elt);
12602 builder.quick_push (elt);
12604 return builder.build ();
12607 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12608 and drop the flag. */
12609 t = copy_node (t);
12610 TREE_OVERFLOW (t) = 0;
12612 /* For constants that contain nested constants, drop the flag
12613 from those as well. */
12614 if (TREE_CODE (t) == COMPLEX_CST)
12616 if (TREE_OVERFLOW (TREE_REALPART (t)))
12617 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
12618 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
12619 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
12622 return t;
12625 /* Given a memory reference expression T, return its base address.
12626 The base address of a memory reference expression is the main
12627 object being referenced. For instance, the base address for
12628 'array[i].fld[j]' is 'array'. You can think of this as stripping
12629 away the offset part from a memory address.
12631 This function calls handled_component_p to strip away all the inner
12632 parts of the memory reference until it reaches the base object. */
12634 tree
12635 get_base_address (tree t)
12637 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12638 t = TREE_OPERAND (t, 0);
12639 while (handled_component_p (t))
12640 t = TREE_OPERAND (t, 0);
12642 if ((TREE_CODE (t) == MEM_REF
12643 || TREE_CODE (t) == TARGET_MEM_REF)
12644 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12645 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12647 return t;
12650 /* Return a tree of sizetype representing the size, in bytes, of the element
12651 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12653 tree
12654 array_ref_element_size (tree exp)
12656 tree aligned_size = TREE_OPERAND (exp, 3);
12657 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12658 location_t loc = EXPR_LOCATION (exp);
12660 /* If a size was specified in the ARRAY_REF, it's the size measured
12661 in alignment units of the element type. So multiply by that value. */
12662 if (aligned_size)
12664 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12665 sizetype from another type of the same width and signedness. */
12666 if (TREE_TYPE (aligned_size) != sizetype)
12667 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12668 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12669 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12672 /* Otherwise, take the size from that of the element type. Substitute
12673 any PLACEHOLDER_EXPR that we have. */
12674 else
12675 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12678 /* Return a tree representing the lower bound of the array mentioned in
12679 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12681 tree
12682 array_ref_low_bound (tree exp)
12684 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12686 /* If a lower bound is specified in EXP, use it. */
12687 if (TREE_OPERAND (exp, 2))
12688 return TREE_OPERAND (exp, 2);
12690 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12691 substituting for a PLACEHOLDER_EXPR as needed. */
12692 if (domain_type && TYPE_MIN_VALUE (domain_type))
12693 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12695 /* Otherwise, return a zero of the appropriate type. */
12696 tree idxtype = TREE_TYPE (TREE_OPERAND (exp, 1));
12697 return (idxtype == error_mark_node
12698 ? integer_zero_node : build_int_cst (idxtype, 0));
12701 /* Return a tree representing the upper bound of the array mentioned in
12702 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12704 tree
12705 array_ref_up_bound (tree exp)
12707 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12709 /* If there is a domain type and it has an upper bound, use it, substituting
12710 for a PLACEHOLDER_EXPR as needed. */
12711 if (domain_type && TYPE_MAX_VALUE (domain_type))
12712 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12714 /* Otherwise fail. */
12715 return NULL_TREE;
12718 /* Returns true if REF is an array reference, a component reference,
12719 or a memory reference to an array whose actual size might be larger
12720 than its upper bound implies, there are multiple cases:
12721 A. a ref to a flexible array member at the end of a structure;
12722 B. a ref to an array with a different type against the original decl;
12723 for example:
12725 short a[16] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 };
12726 (*((char(*)[16])&a[0]))[i+8]
12728 C. a ref to an array that was passed as a parameter;
12729 for example:
12731 int test (uint8_t *p, uint32_t t[1][1], int n) {
12732 for (int i = 0; i < 4; i++, p++)
12733 t[i][0] = ...;
12735 If non-null, set IS_TRAILING_ARRAY to true if the ref is the above case A.
12738 bool
12739 array_ref_flexible_size_p (tree ref, bool *is_trailing_array /* = NULL */)
12741 /* The TYPE for this array referece. */
12742 tree atype = NULL_TREE;
12743 /* The FIELD_DECL for the array field in the containing structure. */
12744 tree afield_decl = NULL_TREE;
12745 /* Whether this array is the trailing array of a structure. */
12746 bool is_trailing_array_tmp = false;
12747 if (!is_trailing_array)
12748 is_trailing_array = &is_trailing_array_tmp;
12750 if (TREE_CODE (ref) == ARRAY_REF
12751 || TREE_CODE (ref) == ARRAY_RANGE_REF)
12753 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
12754 ref = TREE_OPERAND (ref, 0);
12756 else if (TREE_CODE (ref) == COMPONENT_REF
12757 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
12759 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
12760 afield_decl = TREE_OPERAND (ref, 1);
12762 else if (TREE_CODE (ref) == MEM_REF)
12764 tree arg = TREE_OPERAND (ref, 0);
12765 if (TREE_CODE (arg) == ADDR_EXPR)
12766 arg = TREE_OPERAND (arg, 0);
12767 tree argtype = TREE_TYPE (arg);
12768 if (TREE_CODE (argtype) == RECORD_TYPE)
12770 if (tree fld = last_field (argtype))
12772 atype = TREE_TYPE (fld);
12773 afield_decl = fld;
12774 if (TREE_CODE (atype) != ARRAY_TYPE)
12775 return false;
12776 if (VAR_P (arg) && DECL_SIZE (fld))
12777 return false;
12779 else
12780 return false;
12782 else
12783 return false;
12785 else
12786 return false;
12788 if (TREE_CODE (ref) == STRING_CST)
12789 return false;
12791 tree ref_to_array = ref;
12792 while (handled_component_p (ref))
12794 /* If the reference chain contains a component reference to a
12795 non-union type and there follows another field the reference
12796 is not at the end of a structure. */
12797 if (TREE_CODE (ref) == COMPONENT_REF)
12799 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12801 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12802 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12803 nextf = DECL_CHAIN (nextf);
12804 if (nextf)
12805 return false;
12808 /* If we have a multi-dimensional array we do not consider
12809 a non-innermost dimension as flex array if the whole
12810 multi-dimensional array is at struct end.
12811 Same for an array of aggregates with a trailing array
12812 member. */
12813 else if (TREE_CODE (ref) == ARRAY_REF)
12814 return false;
12815 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
12817 /* If we view an underlying object as sth else then what we
12818 gathered up to now is what we have to rely on. */
12819 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
12820 break;
12821 else
12822 gcc_unreachable ();
12824 ref = TREE_OPERAND (ref, 0);
12827 gcc_assert (!afield_decl
12828 || (afield_decl && TREE_CODE (afield_decl) == FIELD_DECL));
12830 /* The array now is at struct end. Treat flexible array member as
12831 always subject to extend, even into just padding constrained by
12832 an underlying decl. */
12833 if (! TYPE_SIZE (atype)
12834 || ! TYPE_DOMAIN (atype)
12835 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12837 *is_trailing_array = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12838 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12841 /* If the reference is based on a declared entity, the size of the array
12842 is constrained by its given domain. (Do not trust commons PR/69368). */
12843 ref = get_base_address (ref);
12844 if (ref
12845 && DECL_P (ref)
12846 && !(flag_unconstrained_commons
12847 && VAR_P (ref) && DECL_COMMON (ref))
12848 && DECL_SIZE_UNIT (ref)
12849 && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
12851 /* If the object itself is the array it is not at struct end. */
12852 if (DECL_P (ref_to_array))
12853 return false;
12855 /* Check whether the array domain covers all of the available
12856 padding. */
12857 poly_int64 offset;
12858 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
12859 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
12860 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
12862 *is_trailing_array
12863 = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12864 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12866 if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
12868 *is_trailing_array
12869 = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12870 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12873 /* If at least one extra element fits it is a flexarray. */
12874 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12875 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
12876 + 2)
12877 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
12878 wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
12880 *is_trailing_array
12881 = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12882 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12885 return false;
12888 *is_trailing_array = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12889 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12893 /* Return a tree representing the offset, in bytes, of the field referenced
12894 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12896 tree
12897 component_ref_field_offset (tree exp)
12899 tree aligned_offset = TREE_OPERAND (exp, 2);
12900 tree field = TREE_OPERAND (exp, 1);
12901 location_t loc = EXPR_LOCATION (exp);
12903 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12904 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12905 value. */
12906 if (aligned_offset)
12908 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12909 sizetype from another type of the same width and signedness. */
12910 if (TREE_TYPE (aligned_offset) != sizetype)
12911 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12912 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12913 size_int (DECL_OFFSET_ALIGN (field)
12914 / BITS_PER_UNIT));
12917 /* Otherwise, take the offset from that of the field. Substitute
12918 any PLACEHOLDER_EXPR that we have. */
12919 else
12920 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12923 /* Given the initializer INIT, return the initializer for the field
12924 DECL if it exists, otherwise null. Used to obtain the initializer
12925 for a flexible array member and determine its size. */
12927 static tree
12928 get_initializer_for (tree init, tree decl)
12930 STRIP_NOPS (init);
12932 tree fld, fld_init;
12933 unsigned HOST_WIDE_INT i;
12934 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), i, fld, fld_init)
12936 if (decl == fld)
12937 return fld_init;
12939 if (TREE_CODE (fld) == CONSTRUCTOR)
12941 fld_init = get_initializer_for (fld_init, decl);
12942 if (fld_init)
12943 return fld_init;
12947 return NULL_TREE;
12950 /* Determines the special array member type for the array reference REF. */
12951 special_array_member
12952 component_ref_sam_type (tree ref)
12954 special_array_member sam_type = special_array_member::none;
12956 tree member = TREE_OPERAND (ref, 1);
12957 tree memsize = DECL_SIZE_UNIT (member);
12958 if (memsize)
12960 tree memtype = TREE_TYPE (member);
12961 if (TREE_CODE (memtype) != ARRAY_TYPE)
12962 return sam_type;
12964 bool trailing = false;
12965 (void)array_ref_flexible_size_p (ref, &trailing);
12966 bool zero_length = integer_zerop (memsize);
12967 if (!trailing && !zero_length)
12968 /* MEMBER is an interior array with
12969 more than one element. */
12970 return special_array_member::int_n;
12972 if (zero_length)
12974 if (trailing)
12975 return special_array_member::trail_0;
12976 else
12977 return special_array_member::int_0;
12980 if (!zero_length)
12981 if (tree dom = TYPE_DOMAIN (memtype))
12982 if (tree min = TYPE_MIN_VALUE (dom))
12983 if (tree max = TYPE_MAX_VALUE (dom))
12984 if (TREE_CODE (min) == INTEGER_CST
12985 && TREE_CODE (max) == INTEGER_CST)
12987 offset_int minidx = wi::to_offset (min);
12988 offset_int maxidx = wi::to_offset (max);
12989 offset_int neltsm1 = maxidx - minidx;
12990 if (neltsm1 > 0)
12991 /* MEMBER is a trailing array with more than
12992 one elements. */
12993 return special_array_member::trail_n;
12995 if (neltsm1 == 0)
12996 return special_array_member::trail_1;
13000 return sam_type;
13003 /* Determines the size of the member referenced by the COMPONENT_REF
13004 REF, using its initializer expression if necessary in order to
13005 determine the size of an initialized flexible array member.
13006 If non-null, set *SAM to the type of special array member.
13007 Returns the size as sizetype (which might be zero for an object
13008 with an uninitialized flexible array member) or null if the size
13009 cannot be determined. */
13011 tree
13012 component_ref_size (tree ref, special_array_member *sam /* = NULL */)
13014 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
13016 special_array_member sambuf;
13017 if (!sam)
13018 sam = &sambuf;
13019 *sam = component_ref_sam_type (ref);
13021 /* The object/argument referenced by the COMPONENT_REF and its type. */
13022 tree arg = TREE_OPERAND (ref, 0);
13023 tree argtype = TREE_TYPE (arg);
13024 /* The referenced member. */
13025 tree member = TREE_OPERAND (ref, 1);
13027 tree memsize = DECL_SIZE_UNIT (member);
13028 if (memsize)
13030 tree memtype = TREE_TYPE (member);
13031 if (TREE_CODE (memtype) != ARRAY_TYPE)
13032 /* DECL_SIZE may be less than TYPE_SIZE in C++ when referring
13033 to the type of a class with a virtual base which doesn't
13034 reflect the size of the virtual's members (see pr97595).
13035 If that's the case fail for now and implement something
13036 more robust in the future. */
13037 return (tree_int_cst_equal (memsize, TYPE_SIZE_UNIT (memtype))
13038 ? memsize : NULL_TREE);
13040 /* 2-or-more elements arrays are treated as normal arrays by default. */
13041 if (*sam == special_array_member::int_n
13042 || *sam == special_array_member::trail_n)
13043 return memsize;
13045 /* flag_strict_flex_arrays will control how to treat
13046 the trailing arrays as flexiable array members. */
13048 tree afield_decl = TREE_OPERAND (ref, 1);
13049 unsigned int strict_flex_array_level
13050 = strict_flex_array_level_of (afield_decl);
13052 switch (strict_flex_array_level)
13054 case 3:
13055 /* Treaing 0-length trailing arrays as normal array. */
13056 if (*sam == special_array_member::trail_0)
13057 return size_zero_node;
13058 /* FALLTHROUGH. */
13059 case 2:
13060 /* Treating 1-element trailing arrays as normal array. */
13061 if (*sam == special_array_member::trail_1)
13062 return memsize;
13063 /* FALLTHROUGH. */
13064 case 1:
13065 /* Treating 2-or-more elements trailing arrays as normal
13066 array. */
13067 if (*sam == special_array_member::trail_n)
13068 return memsize;
13069 /* FALLTHROUGH. */
13070 case 0:
13071 break;
13072 default:
13073 gcc_unreachable ();
13076 if (*sam == special_array_member::int_0)
13077 memsize = NULL_TREE;
13079 /* For a reference to a flexible array member of a union
13080 use the size of the union instead of the size of the member. */
13081 if (TREE_CODE (argtype) == UNION_TYPE)
13082 memsize = TYPE_SIZE_UNIT (argtype);
13085 /* MEMBER is either a bona fide flexible array member, or a zero-length
13086 array member, or an array of length one treated as such. */
13088 /* If the reference is to a declared object and the member a true
13089 flexible array, try to determine its size from its initializer. */
13090 poly_int64 baseoff = 0;
13091 tree base = get_addr_base_and_unit_offset (ref, &baseoff);
13092 if (!base || !VAR_P (base))
13094 if (*sam != special_array_member::int_0)
13095 return NULL_TREE;
13097 if (TREE_CODE (arg) != COMPONENT_REF)
13098 return NULL_TREE;
13100 base = arg;
13101 while (TREE_CODE (base) == COMPONENT_REF)
13102 base = TREE_OPERAND (base, 0);
13103 baseoff = tree_to_poly_int64 (byte_position (TREE_OPERAND (ref, 1)));
13106 /* BASE is the declared object of which MEMBER is either a member
13107 or that is cast to ARGTYPE (e.g., a char buffer used to store
13108 an ARGTYPE object). */
13109 tree basetype = TREE_TYPE (base);
13111 /* Determine the base type of the referenced object. If it's
13112 the same as ARGTYPE and MEMBER has a known size, return it. */
13113 tree bt = basetype;
13114 if (*sam != special_array_member::int_0)
13115 while (TREE_CODE (bt) == ARRAY_TYPE)
13116 bt = TREE_TYPE (bt);
13117 bool typematch = useless_type_conversion_p (argtype, bt);
13118 if (memsize && typematch)
13119 return memsize;
13121 memsize = NULL_TREE;
13123 if (typematch)
13124 /* MEMBER is a true flexible array member. Compute its size from
13125 the initializer of the BASE object if it has one. */
13126 if (tree init = DECL_P (base) ? DECL_INITIAL (base) : NULL_TREE)
13127 if (init != error_mark_node)
13129 init = get_initializer_for (init, member);
13130 if (init)
13132 memsize = TYPE_SIZE_UNIT (TREE_TYPE (init));
13133 if (tree refsize = TYPE_SIZE_UNIT (argtype))
13135 /* Use the larger of the initializer size and the tail
13136 padding in the enclosing struct. */
13137 poly_int64 rsz = tree_to_poly_int64 (refsize);
13138 rsz -= baseoff;
13139 if (known_lt (tree_to_poly_int64 (memsize), rsz))
13140 memsize = wide_int_to_tree (TREE_TYPE (memsize), rsz);
13143 baseoff = 0;
13147 if (!memsize)
13149 if (typematch)
13151 if (DECL_P (base)
13152 && DECL_EXTERNAL (base)
13153 && bt == basetype
13154 && *sam != special_array_member::int_0)
13155 /* The size of a flexible array member of an extern struct
13156 with no initializer cannot be determined (it's defined
13157 in another translation unit and can have an initializer
13158 with an arbitrary number of elements). */
13159 return NULL_TREE;
13161 /* Use the size of the base struct or, for interior zero-length
13162 arrays, the size of the enclosing type. */
13163 memsize = TYPE_SIZE_UNIT (bt);
13165 else if (DECL_P (base))
13166 /* Use the size of the BASE object (possibly an array of some
13167 other type such as char used to store the struct). */
13168 memsize = DECL_SIZE_UNIT (base);
13169 else
13170 return NULL_TREE;
13173 /* If the flexible array member has a known size use the greater
13174 of it and the tail padding in the enclosing struct.
13175 Otherwise, when the size of the flexible array member is unknown
13176 and the referenced object is not a struct, use the size of its
13177 type when known. This detects sizes of array buffers when cast
13178 to struct types with flexible array members. */
13179 if (memsize)
13181 if (!tree_fits_poly_int64_p (memsize))
13182 return NULL_TREE;
13183 poly_int64 memsz64 = memsize ? tree_to_poly_int64 (memsize) : 0;
13184 if (known_lt (baseoff, memsz64))
13186 memsz64 -= baseoff;
13187 return wide_int_to_tree (TREE_TYPE (memsize), memsz64);
13189 return size_zero_node;
13192 /* Return "don't know" for an external non-array object since its
13193 flexible array member can be initialized to have any number of
13194 elements. Otherwise, return zero because the flexible array
13195 member has no elements. */
13196 return (DECL_P (base)
13197 && DECL_EXTERNAL (base)
13198 && (!typematch
13199 || TREE_CODE (basetype) != ARRAY_TYPE)
13200 ? NULL_TREE : size_zero_node);
13203 /* Return the machine mode of T. For vectors, returns the mode of the
13204 inner type. The main use case is to feed the result to HONOR_NANS,
13205 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
13207 machine_mode
13208 element_mode (const_tree t)
13210 if (!TYPE_P (t))
13211 t = TREE_TYPE (t);
13212 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
13213 t = TREE_TYPE (t);
13214 return TYPE_MODE (t);
13217 /* Vector types need to re-check the target flags each time we report
13218 the machine mode. We need to do this because attribute target can
13219 change the result of vector_mode_supported_p and have_regs_of_mode
13220 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
13221 change on a per-function basis. */
13222 /* ??? Possibly a better solution is to run through all the types
13223 referenced by a function and re-compute the TYPE_MODE once, rather
13224 than make the TYPE_MODE macro call a function. */
13226 machine_mode
13227 vector_type_mode (const_tree t)
13229 machine_mode mode;
13231 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
13233 mode = t->type_common.mode;
13234 if (VECTOR_MODE_P (mode)
13235 && (!targetm.vector_mode_supported_p (mode)
13236 || !have_regs_of_mode[mode]))
13238 scalar_int_mode innermode;
13240 /* For integers, try mapping it to a same-sized scalar mode. */
13241 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
13243 poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
13244 * GET_MODE_BITSIZE (innermode));
13245 scalar_int_mode mode;
13246 if (int_mode_for_size (size, 0).exists (&mode)
13247 && have_regs_of_mode[mode])
13248 return mode;
13251 return BLKmode;
13254 return mode;
13257 /* Return the size in bits of each element of vector type TYPE. */
13259 unsigned int
13260 vector_element_bits (const_tree type)
13262 gcc_checking_assert (VECTOR_TYPE_P (type));
13263 if (VECTOR_BOOLEAN_TYPE_P (type))
13264 return TYPE_PRECISION (TREE_TYPE (type));
13265 return tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
13268 /* Calculate the size in bits of each element of vector type TYPE
13269 and return the result as a tree of type bitsizetype. */
13271 tree
13272 vector_element_bits_tree (const_tree type)
13274 gcc_checking_assert (VECTOR_TYPE_P (type));
13275 if (VECTOR_BOOLEAN_TYPE_P (type))
13276 return bitsize_int (vector_element_bits (type));
13277 return TYPE_SIZE (TREE_TYPE (type));
13280 /* Verify that basic properties of T match TV and thus T can be a variant of
13281 TV. TV should be the more specified variant (i.e. the main variant). */
13283 static bool
13284 verify_type_variant (const_tree t, tree tv)
13286 /* Type variant can differ by:
13288 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13289 ENCODE_QUAL_ADDR_SPACE.
13290 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13291 in this case some values may not be set in the variant types
13292 (see TYPE_COMPLETE_P checks).
13293 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13294 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13295 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13296 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13297 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13298 this is necessary to make it possible to merge types form different TUs
13299 - arrays, pointers and references may have TREE_TYPE that is a variant
13300 of TREE_TYPE of their main variants.
13301 - aggregates may have new TYPE_FIELDS list that list variants of
13302 the main variant TYPE_FIELDS.
13303 - vector types may differ by TYPE_VECTOR_OPAQUE
13306 /* Convenience macro for matching individual fields. */
13307 #define verify_variant_match(flag) \
13308 do { \
13309 if (flag (tv) != flag (t)) \
13311 error ("type variant differs by %s", #flag); \
13312 debug_tree (tv); \
13313 return false; \
13315 } while (false)
13317 /* tree_base checks. */
13319 verify_variant_match (TREE_CODE);
13320 /* FIXME: Ada builds non-artificial variants of artificial types. */
13321 #if 0
13322 if (TYPE_ARTIFICIAL (tv))
13323 verify_variant_match (TYPE_ARTIFICIAL);
13324 #endif
13325 if (POINTER_TYPE_P (tv))
13326 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13327 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13328 verify_variant_match (TYPE_UNSIGNED);
13329 verify_variant_match (TYPE_PACKED);
13330 if (TREE_CODE (t) == REFERENCE_TYPE)
13331 verify_variant_match (TYPE_REF_IS_RVALUE);
13332 if (AGGREGATE_TYPE_P (t))
13333 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13334 else
13335 verify_variant_match (TYPE_SATURATING);
13336 /* FIXME: This check trigger during libstdc++ build. */
13337 #if 0
13338 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t))
13339 verify_variant_match (TYPE_FINAL_P);
13340 #endif
13342 /* tree_type_common checks. */
13344 if (COMPLETE_TYPE_P (t))
13346 verify_variant_match (TYPE_MODE);
13347 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
13348 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
13349 verify_variant_match (TYPE_SIZE);
13350 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
13351 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
13352 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
13354 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
13355 TYPE_SIZE_UNIT (tv), 0));
13356 error ("type variant has different %<TYPE_SIZE_UNIT%>");
13357 debug_tree (tv);
13358 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13359 debug_tree (TYPE_SIZE_UNIT (tv));
13360 error ("type%'s %<TYPE_SIZE_UNIT%>");
13361 debug_tree (TYPE_SIZE_UNIT (t));
13362 return false;
13364 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13366 verify_variant_match (TYPE_PRECISION);
13367 if (RECORD_OR_UNION_TYPE_P (t))
13368 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13369 else if (TREE_CODE (t) == ARRAY_TYPE)
13370 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13371 /* During LTO we merge variant lists from diferent translation units
13372 that may differ BY TYPE_CONTEXT that in turn may point
13373 to TRANSLATION_UNIT_DECL.
13374 Ada also builds variants of types with different TYPE_CONTEXT. */
13375 #if 0
13376 if (!in_lto_p || !TYPE_FILE_SCOPE_P (t))
13377 verify_variant_match (TYPE_CONTEXT);
13378 #endif
13379 if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == INTEGER_TYPE)
13380 verify_variant_match (TYPE_STRING_FLAG);
13381 if (TREE_CODE (t) == RECORD_TYPE || TREE_CODE (t) == UNION_TYPE)
13382 verify_variant_match (TYPE_CXX_ODR_P);
13383 if (TYPE_ALIAS_SET_KNOWN_P (t))
13385 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13386 debug_tree (tv);
13387 return false;
13390 /* tree_type_non_common checks. */
13392 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13393 and dangle the pointer from time to time. */
13394 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13395 && (in_lto_p || !TYPE_VFIELD (tv)
13396 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13398 error ("type variant has different %<TYPE_VFIELD%>");
13399 debug_tree (tv);
13400 return false;
13402 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13403 || TREE_CODE (t) == INTEGER_TYPE
13404 || TREE_CODE (t) == BOOLEAN_TYPE
13405 || TREE_CODE (t) == REAL_TYPE
13406 || TREE_CODE (t) == FIXED_POINT_TYPE)
13408 verify_variant_match (TYPE_MAX_VALUE);
13409 verify_variant_match (TYPE_MIN_VALUE);
13411 if (TREE_CODE (t) == METHOD_TYPE)
13412 verify_variant_match (TYPE_METHOD_BASETYPE);
13413 if (TREE_CODE (t) == OFFSET_TYPE)
13414 verify_variant_match (TYPE_OFFSET_BASETYPE);
13415 if (TREE_CODE (t) == ARRAY_TYPE)
13416 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13417 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13418 or even type's main variant. This is needed to make bootstrap pass
13419 and the bug seems new in GCC 5.
13420 C++ FE should be updated to make this consistent and we should check
13421 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13422 is a match with main variant.
13424 Also disable the check for Java for now because of parser hack that builds
13425 first an dummy BINFO and then sometimes replace it by real BINFO in some
13426 of the copies. */
13427 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13428 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13429 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13430 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13431 at LTO time only. */
13432 && (in_lto_p && odr_type_p (t)))
13434 error ("type variant has different %<TYPE_BINFO%>");
13435 debug_tree (tv);
13436 error ("type variant%'s %<TYPE_BINFO%>");
13437 debug_tree (TYPE_BINFO (tv));
13438 error ("type%'s %<TYPE_BINFO%>");
13439 debug_tree (TYPE_BINFO (t));
13440 return false;
13443 /* Check various uses of TYPE_VALUES_RAW. */
13444 if (TREE_CODE (t) == ENUMERAL_TYPE
13445 && TYPE_VALUES (t))
13446 verify_variant_match (TYPE_VALUES);
13447 else if (TREE_CODE (t) == ARRAY_TYPE)
13448 verify_variant_match (TYPE_DOMAIN);
13449 /* Permit incomplete variants of complete type. While FEs may complete
13450 all variants, this does not happen for C++ templates in all cases. */
13451 else if (RECORD_OR_UNION_TYPE_P (t)
13452 && COMPLETE_TYPE_P (t)
13453 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13455 tree f1, f2;
13457 /* Fortran builds qualified variants as new records with items of
13458 qualified type. Verify that they looks same. */
13459 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13460 f1 && f2;
13461 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13462 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13463 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13464 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13465 /* FIXME: gfc_nonrestricted_type builds all types as variants
13466 with exception of pointer types. It deeply copies the type
13467 which means that we may end up with a variant type
13468 referring non-variant pointer. We may change it to
13469 produce types as variants, too, like
13470 objc_get_protocol_qualified_type does. */
13471 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13472 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13473 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13474 break;
13475 if (f1 || f2)
13477 error ("type variant has different %<TYPE_FIELDS%>");
13478 debug_tree (tv);
13479 error ("first mismatch is field");
13480 debug_tree (f1);
13481 error ("and field");
13482 debug_tree (f2);
13483 return false;
13486 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13487 verify_variant_match (TYPE_ARG_TYPES);
13488 /* For C++ the qualified variant of array type is really an array type
13489 of qualified TREE_TYPE.
13490 objc builds variants of pointer where pointer to type is a variant, too
13491 in objc_get_protocol_qualified_type. */
13492 if (TREE_TYPE (t) != TREE_TYPE (tv)
13493 && ((TREE_CODE (t) != ARRAY_TYPE
13494 && !POINTER_TYPE_P (t))
13495 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13496 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13498 error ("type variant has different %<TREE_TYPE%>");
13499 debug_tree (tv);
13500 error ("type variant%'s %<TREE_TYPE%>");
13501 debug_tree (TREE_TYPE (tv));
13502 error ("type%'s %<TREE_TYPE%>");
13503 debug_tree (TREE_TYPE (t));
13504 return false;
13506 if (type_with_alias_set_p (t)
13507 && !gimple_canonical_types_compatible_p (t, tv, false))
13509 error ("type is not compatible with its variant");
13510 debug_tree (tv);
13511 error ("type variant%'s %<TREE_TYPE%>");
13512 debug_tree (TREE_TYPE (tv));
13513 error ("type%'s %<TREE_TYPE%>");
13514 debug_tree (TREE_TYPE (t));
13515 return false;
13517 return true;
13518 #undef verify_variant_match
13522 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13523 the middle-end types_compatible_p function. It needs to avoid
13524 claiming types are different for types that should be treated
13525 the same with respect to TBAA. Canonical types are also used
13526 for IL consistency checks via the useless_type_conversion_p
13527 predicate which does not handle all type kinds itself but falls
13528 back to pointer-comparison of TYPE_CANONICAL for aggregates
13529 for example. */
13531 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13532 type calculation because we need to allow inter-operability between signed
13533 and unsigned variants. */
13535 bool
13536 type_with_interoperable_signedness (const_tree type)
13538 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13539 signed char and unsigned char. Similarly fortran FE builds
13540 C_SIZE_T as signed type, while C defines it unsigned. */
13542 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13543 == INTEGER_TYPE
13544 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13545 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13548 /* Return true iff T1 and T2 are structurally identical for what
13549 TBAA is concerned.
13550 This function is used both by lto.cc canonical type merging and by the
13551 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13552 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13553 only for LTO because only in these cases TYPE_CANONICAL equivalence
13554 correspond to one defined by gimple_canonical_types_compatible_p. */
13556 bool
13557 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13558 bool trust_type_canonical)
13560 /* Type variants should be same as the main variant. When not doing sanity
13561 checking to verify this fact, go to main variants and save some work. */
13562 if (trust_type_canonical)
13564 t1 = TYPE_MAIN_VARIANT (t1);
13565 t2 = TYPE_MAIN_VARIANT (t2);
13568 /* Check first for the obvious case of pointer identity. */
13569 if (t1 == t2)
13570 return true;
13572 /* Check that we have two types to compare. */
13573 if (t1 == NULL_TREE || t2 == NULL_TREE)
13574 return false;
13576 /* We consider complete types always compatible with incomplete type.
13577 This does not make sense for canonical type calculation and thus we
13578 need to ensure that we are never called on it.
13580 FIXME: For more correctness the function probably should have three modes
13581 1) mode assuming that types are complete mathcing their structure
13582 2) mode allowing incomplete types but producing equivalence classes
13583 and thus ignoring all info from complete types
13584 3) mode allowing incomplete types to match complete but checking
13585 compatibility between complete types.
13587 1 and 2 can be used for canonical type calculation. 3 is the real
13588 definition of type compatibility that can be used i.e. for warnings during
13589 declaration merging. */
13591 gcc_assert (!trust_type_canonical
13592 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13594 /* If the types have been previously registered and found equal
13595 they still are. */
13597 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13598 && trust_type_canonical)
13600 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13601 they are always NULL, but they are set to non-NULL for types
13602 constructed by build_pointer_type and variants. In this case the
13603 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13604 all pointers are considered equal. Be sure to not return false
13605 negatives. */
13606 gcc_checking_assert (canonical_type_used_p (t1)
13607 && canonical_type_used_p (t2));
13608 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13611 /* For types where we do ODR based TBAA the canonical type is always
13612 set correctly, so we know that types are different if their
13613 canonical types does not match. */
13614 if (trust_type_canonical
13615 && (odr_type_p (t1) && odr_based_tbaa_p (t1))
13616 != (odr_type_p (t2) && odr_based_tbaa_p (t2)))
13617 return false;
13619 /* Can't be the same type if the types don't have the same code. */
13620 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13621 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13622 return false;
13624 /* Qualifiers do not matter for canonical type comparison purposes. */
13626 /* Void types and nullptr types are always the same. */
13627 if (TREE_CODE (t1) == VOID_TYPE
13628 || TREE_CODE (t1) == NULLPTR_TYPE)
13629 return true;
13631 /* Can't be the same type if they have different mode. */
13632 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13633 return false;
13635 /* Non-aggregate types can be handled cheaply. */
13636 if (INTEGRAL_TYPE_P (t1)
13637 || SCALAR_FLOAT_TYPE_P (t1)
13638 || FIXED_POINT_TYPE_P (t1)
13639 || TREE_CODE (t1) == VECTOR_TYPE
13640 || TREE_CODE (t1) == COMPLEX_TYPE
13641 || TREE_CODE (t1) == OFFSET_TYPE
13642 || POINTER_TYPE_P (t1))
13644 /* Can't be the same type if they have different recision. */
13645 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13646 return false;
13648 /* In some cases the signed and unsigned types are required to be
13649 inter-operable. */
13650 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13651 && !type_with_interoperable_signedness (t1))
13652 return false;
13654 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13655 interoperable with "signed char". Unless all frontends are revisited
13656 to agree on these types, we must ignore the flag completely. */
13658 /* Fortran standard define C_PTR type that is compatible with every
13659 C pointer. For this reason we need to glob all pointers into one.
13660 Still pointers in different address spaces are not compatible. */
13661 if (POINTER_TYPE_P (t1))
13663 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13664 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13665 return false;
13668 /* Tail-recurse to components. */
13669 if (TREE_CODE (t1) == VECTOR_TYPE
13670 || TREE_CODE (t1) == COMPLEX_TYPE)
13671 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13672 TREE_TYPE (t2),
13673 trust_type_canonical);
13675 return true;
13678 /* Do type-specific comparisons. */
13679 switch (TREE_CODE (t1))
13681 case ARRAY_TYPE:
13682 /* Array types are the same if the element types are the same and
13683 the number of elements are the same. */
13684 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13685 trust_type_canonical)
13686 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13687 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13688 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13689 return false;
13690 else
13692 tree i1 = TYPE_DOMAIN (t1);
13693 tree i2 = TYPE_DOMAIN (t2);
13695 /* For an incomplete external array, the type domain can be
13696 NULL_TREE. Check this condition also. */
13697 if (i1 == NULL_TREE && i2 == NULL_TREE)
13698 return true;
13699 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13700 return false;
13701 else
13703 tree min1 = TYPE_MIN_VALUE (i1);
13704 tree min2 = TYPE_MIN_VALUE (i2);
13705 tree max1 = TYPE_MAX_VALUE (i1);
13706 tree max2 = TYPE_MAX_VALUE (i2);
13708 /* The minimum/maximum values have to be the same. */
13709 if ((min1 == min2
13710 || (min1 && min2
13711 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13712 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13713 || operand_equal_p (min1, min2, 0))))
13714 && (max1 == max2
13715 || (max1 && max2
13716 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13717 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13718 || operand_equal_p (max1, max2, 0)))))
13719 return true;
13720 else
13721 return false;
13725 case METHOD_TYPE:
13726 case FUNCTION_TYPE:
13727 /* Function types are the same if the return type and arguments types
13728 are the same. */
13729 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13730 trust_type_canonical))
13731 return false;
13733 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2)
13734 && (TYPE_NO_NAMED_ARGS_STDARG_P (t1)
13735 == TYPE_NO_NAMED_ARGS_STDARG_P (t2)))
13736 return true;
13737 else
13739 tree parms1, parms2;
13741 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13742 parms1 && parms2;
13743 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13745 if (!gimple_canonical_types_compatible_p
13746 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13747 trust_type_canonical))
13748 return false;
13751 if (parms1 || parms2)
13752 return false;
13754 return true;
13757 case RECORD_TYPE:
13758 case UNION_TYPE:
13759 case QUAL_UNION_TYPE:
13761 tree f1, f2;
13763 /* Don't try to compare variants of an incomplete type, before
13764 TYPE_FIELDS has been copied around. */
13765 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13766 return true;
13769 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13770 return false;
13772 /* For aggregate types, all the fields must be the same. */
13773 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13774 f1 || f2;
13775 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13777 /* Skip non-fields and zero-sized fields. */
13778 while (f1 && (TREE_CODE (f1) != FIELD_DECL
13779 || (DECL_SIZE (f1)
13780 && integer_zerop (DECL_SIZE (f1)))))
13781 f1 = TREE_CHAIN (f1);
13782 while (f2 && (TREE_CODE (f2) != FIELD_DECL
13783 || (DECL_SIZE (f2)
13784 && integer_zerop (DECL_SIZE (f2)))))
13785 f2 = TREE_CHAIN (f2);
13786 if (!f1 || !f2)
13787 break;
13788 /* The fields must have the same name, offset and type. */
13789 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13790 || !gimple_compare_field_offset (f1, f2)
13791 || !gimple_canonical_types_compatible_p
13792 (TREE_TYPE (f1), TREE_TYPE (f2),
13793 trust_type_canonical))
13794 return false;
13797 /* If one aggregate has more fields than the other, they
13798 are not the same. */
13799 if (f1 || f2)
13800 return false;
13802 return true;
13805 default:
13806 /* Consider all types with language specific trees in them mutually
13807 compatible. This is executed only from verify_type and false
13808 positives can be tolerated. */
13809 gcc_assert (!in_lto_p);
13810 return true;
13814 /* For OPAQUE_TYPE T, it should have only size and alignment information
13815 and its mode should be of class MODE_OPAQUE. This function verifies
13816 these properties of T match TV which is the main variant of T and TC
13817 which is the canonical of T. */
13819 static void
13820 verify_opaque_type (const_tree t, tree tv, tree tc)
13822 gcc_assert (OPAQUE_TYPE_P (t));
13823 gcc_assert (tv && tv == TYPE_MAIN_VARIANT (tv));
13824 gcc_assert (tc && tc == TYPE_CANONICAL (tc));
13826 /* For an opaque type T1, check if some of its properties match
13827 the corresponding ones of the other opaque type T2, emit some
13828 error messages for those inconsistent ones. */
13829 auto check_properties_for_opaque_type = [](const_tree t1, tree t2,
13830 const char *kind_msg)
13832 if (!OPAQUE_TYPE_P (t2))
13834 error ("type %s is not an opaque type", kind_msg);
13835 debug_tree (t2);
13836 return;
13838 if (!OPAQUE_MODE_P (TYPE_MODE (t2)))
13840 error ("type %s is not with opaque mode", kind_msg);
13841 debug_tree (t2);
13842 return;
13844 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13846 error ("type %s differs by %<TYPE_MODE%>", kind_msg);
13847 debug_tree (t2);
13848 return;
13850 poly_uint64 t1_size = tree_to_poly_uint64 (TYPE_SIZE (t1));
13851 poly_uint64 t2_size = tree_to_poly_uint64 (TYPE_SIZE (t2));
13852 if (maybe_ne (t1_size, t2_size))
13854 error ("type %s differs by %<TYPE_SIZE%>", kind_msg);
13855 debug_tree (t2);
13856 return;
13858 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2))
13860 error ("type %s differs by %<TYPE_ALIGN%>", kind_msg);
13861 debug_tree (t2);
13862 return;
13864 if (TYPE_USER_ALIGN (t1) != TYPE_USER_ALIGN (t2))
13866 error ("type %s differs by %<TYPE_USER_ALIGN%>", kind_msg);
13867 debug_tree (t2);
13868 return;
13872 if (t != tv)
13873 check_properties_for_opaque_type (t, tv, "variant");
13875 if (t != tc)
13876 check_properties_for_opaque_type (t, tc, "canonical");
13879 /* Verify type T. */
13881 void
13882 verify_type (const_tree t)
13884 bool error_found = false;
13885 tree mv = TYPE_MAIN_VARIANT (t);
13886 tree ct = TYPE_CANONICAL (t);
13888 if (OPAQUE_TYPE_P (t))
13890 verify_opaque_type (t, mv, ct);
13891 return;
13894 if (!mv)
13896 error ("main variant is not defined");
13897 error_found = true;
13899 else if (mv != TYPE_MAIN_VARIANT (mv))
13901 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
13902 debug_tree (mv);
13903 error_found = true;
13905 else if (t != mv && !verify_type_variant (t, mv))
13906 error_found = true;
13908 if (!ct)
13910 else if (TYPE_CANONICAL (ct) != ct)
13912 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
13913 debug_tree (ct);
13914 error_found = true;
13916 /* Method and function types cannot be used to address memory and thus
13917 TYPE_CANONICAL really matters only for determining useless conversions.
13919 FIXME: C++ FE produce declarations of builtin functions that are not
13920 compatible with main variants. */
13921 else if (TREE_CODE (t) == FUNCTION_TYPE)
13923 else if (t != ct
13924 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
13925 with variably sized arrays because their sizes possibly
13926 gimplified to different variables. */
13927 && !variably_modified_type_p (ct, NULL)
13928 && !gimple_canonical_types_compatible_p (t, ct, false)
13929 && COMPLETE_TYPE_P (t))
13931 error ("%<TYPE_CANONICAL%> is not compatible");
13932 debug_tree (ct);
13933 error_found = true;
13936 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13937 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13939 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
13940 debug_tree (ct);
13941 error_found = true;
13943 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
13945 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
13946 debug_tree (ct);
13947 debug_tree (TYPE_MAIN_VARIANT (ct));
13948 error_found = true;
13952 /* Check various uses of TYPE_MIN_VALUE_RAW. */
13953 if (RECORD_OR_UNION_TYPE_P (t))
13955 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13956 and danagle the pointer from time to time. */
13957 if (TYPE_VFIELD (t)
13958 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13959 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13961 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
13962 debug_tree (TYPE_VFIELD (t));
13963 error_found = true;
13966 else if (TREE_CODE (t) == POINTER_TYPE)
13968 if (TYPE_NEXT_PTR_TO (t)
13969 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13971 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
13972 debug_tree (TYPE_NEXT_PTR_TO (t));
13973 error_found = true;
13976 else if (TREE_CODE (t) == REFERENCE_TYPE)
13978 if (TYPE_NEXT_REF_TO (t)
13979 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13981 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
13982 debug_tree (TYPE_NEXT_REF_TO (t));
13983 error_found = true;
13986 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13987 || TREE_CODE (t) == FIXED_POINT_TYPE)
13989 /* FIXME: The following check should pass:
13990 useless_type_conversion_p (const_cast <tree> (t),
13991 TREE_TYPE (TYPE_MIN_VALUE (t))
13992 but does not for C sizetypes in LTO. */
13995 /* Check various uses of TYPE_MAXVAL_RAW. */
13996 if (RECORD_OR_UNION_TYPE_P (t))
13998 if (!TYPE_BINFO (t))
14000 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
14002 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
14003 debug_tree (TYPE_BINFO (t));
14004 error_found = true;
14006 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
14008 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
14009 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
14010 error_found = true;
14013 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14015 if (TYPE_METHOD_BASETYPE (t)
14016 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
14017 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
14019 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
14020 debug_tree (TYPE_METHOD_BASETYPE (t));
14021 error_found = true;
14024 else if (TREE_CODE (t) == OFFSET_TYPE)
14026 if (TYPE_OFFSET_BASETYPE (t)
14027 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
14028 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
14030 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
14031 debug_tree (TYPE_OFFSET_BASETYPE (t));
14032 error_found = true;
14035 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14036 || TREE_CODE (t) == FIXED_POINT_TYPE)
14038 /* FIXME: The following check should pass:
14039 useless_type_conversion_p (const_cast <tree> (t),
14040 TREE_TYPE (TYPE_MAX_VALUE (t))
14041 but does not for C sizetypes in LTO. */
14043 else if (TREE_CODE (t) == ARRAY_TYPE)
14045 if (TYPE_ARRAY_MAX_SIZE (t)
14046 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
14048 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
14049 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
14050 error_found = true;
14053 else if (TYPE_MAX_VALUE_RAW (t))
14055 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
14056 debug_tree (TYPE_MAX_VALUE_RAW (t));
14057 error_found = true;
14060 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
14062 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
14063 debug_tree (TYPE_LANG_SLOT_1 (t));
14064 error_found = true;
14067 /* Check various uses of TYPE_VALUES_RAW. */
14068 if (TREE_CODE (t) == ENUMERAL_TYPE)
14069 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
14071 tree value = TREE_VALUE (l);
14072 tree name = TREE_PURPOSE (l);
14074 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
14075 CONST_DECL of ENUMERAL TYPE. */
14076 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
14078 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
14079 debug_tree (value);
14080 debug_tree (name);
14081 error_found = true;
14083 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
14084 && TREE_CODE (TREE_TYPE (value)) != BOOLEAN_TYPE
14085 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
14087 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
14088 "to the enum");
14089 debug_tree (value);
14090 debug_tree (name);
14091 error_found = true;
14093 if (TREE_CODE (name) != IDENTIFIER_NODE)
14095 error ("enum value name is not %<IDENTIFIER_NODE%>");
14096 debug_tree (value);
14097 debug_tree (name);
14098 error_found = true;
14101 else if (TREE_CODE (t) == ARRAY_TYPE)
14103 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
14105 error ("array %<TYPE_DOMAIN%> is not integer type");
14106 debug_tree (TYPE_DOMAIN (t));
14107 error_found = true;
14110 else if (RECORD_OR_UNION_TYPE_P (t))
14112 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
14114 error ("%<TYPE_FIELDS%> defined in incomplete type");
14115 error_found = true;
14117 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
14119 /* TODO: verify properties of decls. */
14120 if (TREE_CODE (fld) == FIELD_DECL)
14122 else if (TREE_CODE (fld) == TYPE_DECL)
14124 else if (TREE_CODE (fld) == CONST_DECL)
14126 else if (VAR_P (fld))
14128 else if (TREE_CODE (fld) == TEMPLATE_DECL)
14130 else if (TREE_CODE (fld) == USING_DECL)
14132 else if (TREE_CODE (fld) == FUNCTION_DECL)
14134 else
14136 error ("wrong tree in %<TYPE_FIELDS%> list");
14137 debug_tree (fld);
14138 error_found = true;
14142 else if (TREE_CODE (t) == INTEGER_TYPE
14143 || TREE_CODE (t) == BOOLEAN_TYPE
14144 || TREE_CODE (t) == OFFSET_TYPE
14145 || TREE_CODE (t) == REFERENCE_TYPE
14146 || TREE_CODE (t) == NULLPTR_TYPE
14147 || TREE_CODE (t) == POINTER_TYPE)
14149 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
14151 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
14152 "is %p",
14153 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
14154 error_found = true;
14156 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
14158 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
14159 debug_tree (TYPE_CACHED_VALUES (t));
14160 error_found = true;
14162 /* Verify just enough of cache to ensure that no one copied it to new type.
14163 All copying should go by copy_node that should clear it. */
14164 else if (TYPE_CACHED_VALUES_P (t))
14166 int i;
14167 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
14168 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
14169 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
14171 error ("wrong %<TYPE_CACHED_VALUES%> entry");
14172 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
14173 error_found = true;
14174 break;
14178 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14179 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
14181 /* C++ FE uses TREE_PURPOSE to store initial values. */
14182 if (TREE_PURPOSE (l) && in_lto_p)
14184 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
14185 debug_tree (l);
14186 error_found = true;
14188 if (!TYPE_P (TREE_VALUE (l)))
14190 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
14191 debug_tree (l);
14192 error_found = true;
14195 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
14197 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
14198 debug_tree (TYPE_VALUES_RAW (t));
14199 error_found = true;
14201 if (TREE_CODE (t) != INTEGER_TYPE
14202 && TREE_CODE (t) != BOOLEAN_TYPE
14203 && TREE_CODE (t) != OFFSET_TYPE
14204 && TREE_CODE (t) != REFERENCE_TYPE
14205 && TREE_CODE (t) != NULLPTR_TYPE
14206 && TREE_CODE (t) != POINTER_TYPE
14207 && TYPE_CACHED_VALUES_P (t))
14209 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
14210 error_found = true;
14213 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14214 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14215 of a type. */
14216 if (TREE_CODE (t) == METHOD_TYPE
14217 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
14219 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
14220 error_found = true;
14223 if (error_found)
14225 debug_tree (const_cast <tree> (t));
14226 internal_error ("%qs failed", __func__);
14231 /* Return 1 if ARG interpreted as signed in its precision is known to be
14232 always positive or 2 if ARG is known to be always negative, or 3 if
14233 ARG may be positive or negative. */
14236 get_range_pos_neg (tree arg)
14238 if (arg == error_mark_node)
14239 return 3;
14241 int prec = TYPE_PRECISION (TREE_TYPE (arg));
14242 int cnt = 0;
14243 if (TREE_CODE (arg) == INTEGER_CST)
14245 wide_int w = wi::sext (wi::to_wide (arg), prec);
14246 if (wi::neg_p (w))
14247 return 2;
14248 else
14249 return 1;
14251 while (CONVERT_EXPR_P (arg)
14252 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
14253 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
14255 arg = TREE_OPERAND (arg, 0);
14256 /* Narrower value zero extended into wider type
14257 will always result in positive values. */
14258 if (TYPE_UNSIGNED (TREE_TYPE (arg))
14259 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
14260 return 1;
14261 prec = TYPE_PRECISION (TREE_TYPE (arg));
14262 if (++cnt > 30)
14263 return 3;
14266 if (TREE_CODE (arg) != SSA_NAME)
14267 return 3;
14268 value_range r;
14269 while (!get_global_range_query ()->range_of_expr (r, arg) || r.kind () != VR_RANGE)
14271 gimple *g = SSA_NAME_DEF_STMT (arg);
14272 if (is_gimple_assign (g)
14273 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
14275 tree t = gimple_assign_rhs1 (g);
14276 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
14277 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
14279 if (TYPE_UNSIGNED (TREE_TYPE (t))
14280 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
14281 return 1;
14282 prec = TYPE_PRECISION (TREE_TYPE (t));
14283 arg = t;
14284 if (++cnt > 30)
14285 return 3;
14286 continue;
14289 return 3;
14291 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
14293 /* For unsigned values, the "positive" range comes
14294 below the "negative" range. */
14295 if (!wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
14296 return 1;
14297 if (wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
14298 return 2;
14300 else
14302 if (!wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
14303 return 1;
14304 if (wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
14305 return 2;
14307 return 3;
14313 /* Return true if ARG is marked with the nonnull attribute in the
14314 current function signature. */
14316 bool
14317 nonnull_arg_p (const_tree arg)
14319 tree t, attrs, fntype;
14320 unsigned HOST_WIDE_INT arg_num;
14322 gcc_assert (TREE_CODE (arg) == PARM_DECL
14323 && (POINTER_TYPE_P (TREE_TYPE (arg))
14324 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
14326 /* The static chain decl is always non null. */
14327 if (arg == cfun->static_chain_decl)
14328 return true;
14330 /* THIS argument of method is always non-NULL. */
14331 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
14332 && arg == DECL_ARGUMENTS (cfun->decl)
14333 && flag_delete_null_pointer_checks)
14334 return true;
14336 /* Values passed by reference are always non-NULL. */
14337 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
14338 && flag_delete_null_pointer_checks)
14339 return true;
14341 fntype = TREE_TYPE (cfun->decl);
14342 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
14344 attrs = lookup_attribute ("nonnull", attrs);
14346 /* If "nonnull" wasn't specified, we know nothing about the argument. */
14347 if (attrs == NULL_TREE)
14348 return false;
14350 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
14351 if (TREE_VALUE (attrs) == NULL_TREE)
14352 return true;
14354 /* Get the position number for ARG in the function signature. */
14355 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
14357 t = DECL_CHAIN (t), arg_num++)
14359 if (t == arg)
14360 break;
14363 gcc_assert (t == arg);
14365 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14366 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14368 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14369 return true;
14373 return false;
14376 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14377 information. */
14379 location_t
14380 set_block (location_t loc, tree block)
14382 location_t pure_loc = get_pure_location (loc);
14383 source_range src_range = get_range_from_loc (line_table, loc);
14384 unsigned discriminator = get_discriminator_from_loc (line_table, loc);
14385 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block, discriminator);
14388 location_t
14389 set_source_range (tree expr, location_t start, location_t finish)
14391 source_range src_range;
14392 src_range.m_start = start;
14393 src_range.m_finish = finish;
14394 return set_source_range (expr, src_range);
14397 location_t
14398 set_source_range (tree expr, source_range src_range)
14400 if (!EXPR_P (expr))
14401 return UNKNOWN_LOCATION;
14403 location_t expr_location = EXPR_LOCATION (expr);
14404 location_t pure_loc = get_pure_location (expr_location);
14405 unsigned discriminator = get_discriminator_from_loc (expr_location);
14406 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
14407 pure_loc,
14408 src_range,
14409 NULL,
14410 discriminator);
14411 SET_EXPR_LOCATION (expr, adhoc);
14412 return adhoc;
14415 /* Return EXPR, potentially wrapped with a node expression LOC,
14416 if !CAN_HAVE_LOCATION_P (expr).
14418 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
14419 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
14421 Wrapper nodes can be identified using location_wrapper_p. */
14423 tree
14424 maybe_wrap_with_location (tree expr, location_t loc)
14426 if (expr == NULL)
14427 return NULL;
14428 if (loc == UNKNOWN_LOCATION)
14429 return expr;
14430 if (CAN_HAVE_LOCATION_P (expr))
14431 return expr;
14432 /* We should only be adding wrappers for constants and for decls,
14433 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
14434 gcc_assert (CONSTANT_CLASS_P (expr)
14435 || DECL_P (expr)
14436 || EXCEPTIONAL_CLASS_P (expr));
14438 /* For now, don't add wrappers to exceptional tree nodes, to minimize
14439 any impact of the wrapper nodes. */
14440 if (EXCEPTIONAL_CLASS_P (expr) || error_operand_p (expr))
14441 return expr;
14443 /* Compiler-generated temporary variables don't need a wrapper. */
14444 if (DECL_P (expr) && DECL_ARTIFICIAL (expr) && DECL_IGNORED_P (expr))
14445 return expr;
14447 /* If any auto_suppress_location_wrappers are active, don't create
14448 wrappers. */
14449 if (suppress_location_wrappers > 0)
14450 return expr;
14452 tree_code code
14453 = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
14454 || (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
14455 ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
14456 tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
14457 /* Mark this node as being a wrapper. */
14458 EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
14459 return wrapper;
14462 int suppress_location_wrappers;
14464 /* Return the name of combined function FN, for debugging purposes. */
14466 const char *
14467 combined_fn_name (combined_fn fn)
14469 if (builtin_fn_p (fn))
14471 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14472 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14474 else
14475 return internal_fn_name (as_internal_fn (fn));
14478 /* Return a bitmap with a bit set corresponding to each argument in
14479 a function call type FNTYPE declared with attribute nonnull,
14480 or null if none of the function's argument are nonnull. The caller
14481 must free the bitmap. */
14483 bitmap
14484 get_nonnull_args (const_tree fntype)
14486 if (fntype == NULL_TREE)
14487 return NULL;
14489 bitmap argmap = NULL;
14490 if (TREE_CODE (fntype) == METHOD_TYPE)
14492 /* The this pointer in C++ non-static member functions is
14493 implicitly nonnull whether or not it's declared as such. */
14494 argmap = BITMAP_ALLOC (NULL);
14495 bitmap_set_bit (argmap, 0);
14498 tree attrs = TYPE_ATTRIBUTES (fntype);
14499 if (!attrs)
14500 return argmap;
14502 /* A function declaration can specify multiple attribute nonnull,
14503 each with zero or more arguments. The loop below creates a bitmap
14504 representing a union of all the arguments. An empty (but non-null)
14505 bitmap means that all arguments have been declaraed nonnull. */
14506 for ( ; attrs; attrs = TREE_CHAIN (attrs))
14508 attrs = lookup_attribute ("nonnull", attrs);
14509 if (!attrs)
14510 break;
14512 if (!argmap)
14513 argmap = BITMAP_ALLOC (NULL);
14515 if (!TREE_VALUE (attrs))
14517 /* Clear the bitmap in case a previous attribute nonnull
14518 set it and this one overrides it for all arguments. */
14519 bitmap_clear (argmap);
14520 return argmap;
14523 /* Iterate over the indices of the format arguments declared nonnull
14524 and set a bit for each. */
14525 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
14527 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
14528 bitmap_set_bit (argmap, val);
14532 return argmap;
14535 /* Returns true if TYPE is a type where it and all of its subobjects
14536 (recursively) are of structure, union, or array type. */
14538 bool
14539 is_empty_type (const_tree type)
14541 if (RECORD_OR_UNION_TYPE_P (type))
14543 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
14544 if (TREE_CODE (field) == FIELD_DECL
14545 && !DECL_PADDING_P (field)
14546 && !is_empty_type (TREE_TYPE (field)))
14547 return false;
14548 return true;
14550 else if (TREE_CODE (type) == ARRAY_TYPE)
14551 return (integer_minus_onep (array_type_nelts (type))
14552 || TYPE_DOMAIN (type) == NULL_TREE
14553 || is_empty_type (TREE_TYPE (type)));
14554 return false;
14557 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
14558 that shouldn't be passed via stack. */
14560 bool
14561 default_is_empty_record (const_tree type)
14563 if (!abi_version_at_least (12))
14564 return false;
14566 if (type == error_mark_node)
14567 return false;
14569 if (TREE_ADDRESSABLE (type))
14570 return false;
14572 return is_empty_type (TYPE_MAIN_VARIANT (type));
14575 /* Determine whether TYPE is a structure with a flexible array member,
14576 or a union containing such a structure (possibly recursively). */
14578 bool
14579 flexible_array_type_p (const_tree type)
14581 tree x, last;
14582 switch (TREE_CODE (type))
14584 case RECORD_TYPE:
14585 last = NULL_TREE;
14586 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14587 if (TREE_CODE (x) == FIELD_DECL)
14588 last = x;
14589 if (last == NULL_TREE)
14590 return false;
14591 if (TREE_CODE (TREE_TYPE (last)) == ARRAY_TYPE
14592 && TYPE_SIZE (TREE_TYPE (last)) == NULL_TREE
14593 && TYPE_DOMAIN (TREE_TYPE (last)) != NULL_TREE
14594 && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last))) == NULL_TREE)
14595 return true;
14596 return false;
14597 case UNION_TYPE:
14598 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14600 if (TREE_CODE (x) == FIELD_DECL
14601 && flexible_array_type_p (TREE_TYPE (x)))
14602 return true;
14604 return false;
14605 default:
14606 return false;
14610 /* Like int_size_in_bytes, but handle empty records specially. */
14612 HOST_WIDE_INT
14613 arg_int_size_in_bytes (const_tree type)
14615 return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
14618 /* Like size_in_bytes, but handle empty records specially. */
14620 tree
14621 arg_size_in_bytes (const_tree type)
14623 return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
14626 /* Return true if an expression with CODE has to have the same result type as
14627 its first operand. */
14629 bool
14630 expr_type_first_operand_type_p (tree_code code)
14632 switch (code)
14634 case NEGATE_EXPR:
14635 case ABS_EXPR:
14636 case BIT_NOT_EXPR:
14637 case PAREN_EXPR:
14638 case CONJ_EXPR:
14640 case PLUS_EXPR:
14641 case MINUS_EXPR:
14642 case MULT_EXPR:
14643 case TRUNC_DIV_EXPR:
14644 case CEIL_DIV_EXPR:
14645 case FLOOR_DIV_EXPR:
14646 case ROUND_DIV_EXPR:
14647 case TRUNC_MOD_EXPR:
14648 case CEIL_MOD_EXPR:
14649 case FLOOR_MOD_EXPR:
14650 case ROUND_MOD_EXPR:
14651 case RDIV_EXPR:
14652 case EXACT_DIV_EXPR:
14653 case MIN_EXPR:
14654 case MAX_EXPR:
14655 case BIT_IOR_EXPR:
14656 case BIT_XOR_EXPR:
14657 case BIT_AND_EXPR:
14659 case LSHIFT_EXPR:
14660 case RSHIFT_EXPR:
14661 case LROTATE_EXPR:
14662 case RROTATE_EXPR:
14663 return true;
14665 default:
14666 return false;
14670 /* Return a typenode for the "standard" C type with a given name. */
14671 tree
14672 get_typenode_from_name (const char *name)
14674 if (name == NULL || *name == '\0')
14675 return NULL_TREE;
14677 if (strcmp (name, "char") == 0)
14678 return char_type_node;
14679 if (strcmp (name, "unsigned char") == 0)
14680 return unsigned_char_type_node;
14681 if (strcmp (name, "signed char") == 0)
14682 return signed_char_type_node;
14684 if (strcmp (name, "short int") == 0)
14685 return short_integer_type_node;
14686 if (strcmp (name, "short unsigned int") == 0)
14687 return short_unsigned_type_node;
14689 if (strcmp (name, "int") == 0)
14690 return integer_type_node;
14691 if (strcmp (name, "unsigned int") == 0)
14692 return unsigned_type_node;
14694 if (strcmp (name, "long int") == 0)
14695 return long_integer_type_node;
14696 if (strcmp (name, "long unsigned int") == 0)
14697 return long_unsigned_type_node;
14699 if (strcmp (name, "long long int") == 0)
14700 return long_long_integer_type_node;
14701 if (strcmp (name, "long long unsigned int") == 0)
14702 return long_long_unsigned_type_node;
14704 gcc_unreachable ();
14707 /* List of pointer types used to declare builtins before we have seen their
14708 real declaration.
14710 Keep the size up to date in tree.h ! */
14711 const builtin_structptr_type builtin_structptr_types[6] =
14713 { fileptr_type_node, ptr_type_node, "FILE" },
14714 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
14715 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
14716 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
14717 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
14718 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
14721 /* Return the maximum object size. */
14723 tree
14724 max_object_size (void)
14726 /* To do: Make this a configurable parameter. */
14727 return TYPE_MAX_VALUE (ptrdiff_type_node);
14730 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
14731 parameter default to false and that weeds out error_mark_node. */
14733 bool
14734 verify_type_context (location_t loc, type_context_kind context,
14735 const_tree type, bool silent_p)
14737 if (type == error_mark_node)
14738 return true;
14740 gcc_assert (TYPE_P (type));
14741 return (!targetm.verify_type_context
14742 || targetm.verify_type_context (loc, context, type, silent_p));
14745 /* Return true if NEW_ASM and DELETE_ASM name a valid pair of new and
14746 delete operators. Return false if they may or may not name such
14747 a pair and, when nonnull, set *PCERTAIN to true if they certainly
14748 do not. */
14750 bool
14751 valid_new_delete_pair_p (tree new_asm, tree delete_asm,
14752 bool *pcertain /* = NULL */)
14754 bool certain;
14755 if (!pcertain)
14756 pcertain = &certain;
14758 const char *new_name = IDENTIFIER_POINTER (new_asm);
14759 const char *delete_name = IDENTIFIER_POINTER (delete_asm);
14760 unsigned int new_len = IDENTIFIER_LENGTH (new_asm);
14761 unsigned int delete_len = IDENTIFIER_LENGTH (delete_asm);
14763 /* The following failures are due to invalid names so they're not
14764 considered certain mismatches. */
14765 *pcertain = false;
14767 if (new_len < 5 || delete_len < 6)
14768 return false;
14769 if (new_name[0] == '_')
14770 ++new_name, --new_len;
14771 if (new_name[0] == '_')
14772 ++new_name, --new_len;
14773 if (delete_name[0] == '_')
14774 ++delete_name, --delete_len;
14775 if (delete_name[0] == '_')
14776 ++delete_name, --delete_len;
14777 if (new_len < 4 || delete_len < 5)
14778 return false;
14780 /* The following failures are due to names of user-defined operators
14781 so they're also not considered certain mismatches. */
14783 /* *_len is now just the length after initial underscores. */
14784 if (new_name[0] != 'Z' || new_name[1] != 'n')
14785 return false;
14786 if (delete_name[0] != 'Z' || delete_name[1] != 'd')
14787 return false;
14789 /* The following failures are certain mismatches. */
14790 *pcertain = true;
14792 /* _Znw must match _Zdl, _Zna must match _Zda. */
14793 if ((new_name[2] != 'w' || delete_name[2] != 'l')
14794 && (new_name[2] != 'a' || delete_name[2] != 'a'))
14795 return false;
14796 /* 'j', 'm' and 'y' correspond to size_t. */
14797 if (new_name[3] != 'j' && new_name[3] != 'm' && new_name[3] != 'y')
14798 return false;
14799 if (delete_name[3] != 'P' || delete_name[4] != 'v')
14800 return false;
14801 if (new_len == 4
14802 || (new_len == 18 && !memcmp (new_name + 4, "RKSt9nothrow_t", 14)))
14804 /* _ZnXY or _ZnXYRKSt9nothrow_t matches
14805 _ZdXPv, _ZdXPvY and _ZdXPvRKSt9nothrow_t. */
14806 if (delete_len == 5)
14807 return true;
14808 if (delete_len == 6 && delete_name[5] == new_name[3])
14809 return true;
14810 if (delete_len == 19 && !memcmp (delete_name + 5, "RKSt9nothrow_t", 14))
14811 return true;
14813 else if ((new_len == 19 && !memcmp (new_name + 4, "St11align_val_t", 15))
14814 || (new_len == 33
14815 && !memcmp (new_name + 4, "St11align_val_tRKSt9nothrow_t", 29)))
14817 /* _ZnXYSt11align_val_t or _ZnXYSt11align_val_tRKSt9nothrow_t matches
14818 _ZdXPvSt11align_val_t or _ZdXPvYSt11align_val_t or or
14819 _ZdXPvSt11align_val_tRKSt9nothrow_t. */
14820 if (delete_len == 20 && !memcmp (delete_name + 5, "St11align_val_t", 15))
14821 return true;
14822 if (delete_len == 21
14823 && delete_name[5] == new_name[3]
14824 && !memcmp (delete_name + 6, "St11align_val_t", 15))
14825 return true;
14826 if (delete_len == 34
14827 && !memcmp (delete_name + 5, "St11align_val_tRKSt9nothrow_t", 29))
14828 return true;
14831 /* The negative result is conservative. */
14832 *pcertain = false;
14833 return false;
14836 /* Return the zero-based number corresponding to the argument being
14837 deallocated if FNDECL is a deallocation function or an out-of-bounds
14838 value if it isn't. */
14840 unsigned
14841 fndecl_dealloc_argno (tree fndecl)
14843 /* A call to operator delete isn't recognized as one to a built-in. */
14844 if (DECL_IS_OPERATOR_DELETE_P (fndecl))
14846 if (DECL_IS_REPLACEABLE_OPERATOR (fndecl))
14847 return 0;
14849 /* Avoid placement delete that's not been inlined. */
14850 tree fname = DECL_ASSEMBLER_NAME (fndecl);
14851 if (id_equal (fname, "_ZdlPvS_") // ordinary form
14852 || id_equal (fname, "_ZdaPvS_")) // array form
14853 return UINT_MAX;
14854 return 0;
14857 /* TODO: Handle user-defined functions with attribute malloc? Handle
14858 known non-built-ins like fopen? */
14859 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14861 switch (DECL_FUNCTION_CODE (fndecl))
14863 case BUILT_IN_FREE:
14864 case BUILT_IN_REALLOC:
14865 return 0;
14866 default:
14867 break;
14869 return UINT_MAX;
14872 tree attrs = DECL_ATTRIBUTES (fndecl);
14873 if (!attrs)
14874 return UINT_MAX;
14876 for (tree atfree = attrs;
14877 (atfree = lookup_attribute ("*dealloc", atfree));
14878 atfree = TREE_CHAIN (atfree))
14880 tree alloc = TREE_VALUE (atfree);
14881 if (!alloc)
14882 continue;
14884 tree pos = TREE_CHAIN (alloc);
14885 if (!pos)
14886 return 0;
14888 pos = TREE_VALUE (pos);
14889 return TREE_INT_CST_LOW (pos) - 1;
14892 return UINT_MAX;
14895 /* If EXPR refers to a character array or pointer declared attribute
14896 nonstring, return a decl for that array or pointer and set *REF
14897 to the referenced enclosing object or pointer. Otherwise return
14898 null. */
14900 tree
14901 get_attr_nonstring_decl (tree expr, tree *ref)
14903 tree decl = expr;
14904 tree var = NULL_TREE;
14905 if (TREE_CODE (decl) == SSA_NAME)
14907 gimple *def = SSA_NAME_DEF_STMT (decl);
14909 if (is_gimple_assign (def))
14911 tree_code code = gimple_assign_rhs_code (def);
14912 if (code == ADDR_EXPR
14913 || code == COMPONENT_REF
14914 || code == VAR_DECL)
14915 decl = gimple_assign_rhs1 (def);
14917 else
14918 var = SSA_NAME_VAR (decl);
14921 if (TREE_CODE (decl) == ADDR_EXPR)
14922 decl = TREE_OPERAND (decl, 0);
14924 /* To simplify calling code, store the referenced DECL regardless of
14925 the attribute determined below, but avoid storing the SSA_NAME_VAR
14926 obtained above (it's not useful for dataflow purposes). */
14927 if (ref)
14928 *ref = decl;
14930 /* Use the SSA_NAME_VAR that was determined above to see if it's
14931 declared nonstring. Otherwise drill down into the referenced
14932 DECL. */
14933 if (var)
14934 decl = var;
14935 else if (TREE_CODE (decl) == ARRAY_REF)
14936 decl = TREE_OPERAND (decl, 0);
14937 else if (TREE_CODE (decl) == COMPONENT_REF)
14938 decl = TREE_OPERAND (decl, 1);
14939 else if (TREE_CODE (decl) == MEM_REF)
14940 return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
14942 if (DECL_P (decl)
14943 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
14944 return decl;
14946 return NULL_TREE;
14949 /* Return length of attribute names string,
14950 if arglist chain > 1, -1 otherwise. */
14953 get_target_clone_attr_len (tree arglist)
14955 tree arg;
14956 int str_len_sum = 0;
14957 int argnum = 0;
14959 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
14961 const char *str = TREE_STRING_POINTER (TREE_VALUE (arg));
14962 size_t len = strlen (str);
14963 str_len_sum += len + 1;
14964 for (const char *p = strchr (str, ','); p; p = strchr (p + 1, ','))
14965 argnum++;
14966 argnum++;
14968 if (argnum <= 1)
14969 return -1;
14970 return str_len_sum;
14973 void
14974 tree_cc_finalize (void)
14976 clear_nonstandard_integer_type_cache ();
14979 #if CHECKING_P
14981 namespace selftest {
14983 /* Selftests for tree. */
14985 /* Verify that integer constants are sane. */
14987 static void
14988 test_integer_constants ()
14990 ASSERT_TRUE (integer_type_node != NULL);
14991 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
14993 tree type = integer_type_node;
14995 tree zero = build_zero_cst (type);
14996 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
14997 ASSERT_EQ (type, TREE_TYPE (zero));
14999 tree one = build_int_cst (type, 1);
15000 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
15001 ASSERT_EQ (type, TREE_TYPE (zero));
15004 /* Verify identifiers. */
15006 static void
15007 test_identifiers ()
15009 tree identifier = get_identifier ("foo");
15010 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
15011 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
15014 /* Verify LABEL_DECL. */
15016 static void
15017 test_labels ()
15019 tree identifier = get_identifier ("err");
15020 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
15021 identifier, void_type_node);
15022 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
15023 ASSERT_FALSE (FORCED_LABEL (label_decl));
15026 /* Return a new VECTOR_CST node whose type is TYPE and whose values
15027 are given by VALS. */
15029 static tree
15030 build_vector (tree type, const vec<tree> &vals MEM_STAT_DECL)
15032 gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
15033 tree_vector_builder builder (type, vals.length (), 1);
15034 builder.splice (vals);
15035 return builder.build ();
15038 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
15040 static void
15041 check_vector_cst (const vec<tree> &expected, tree actual)
15043 ASSERT_KNOWN_EQ (expected.length (),
15044 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
15045 for (unsigned int i = 0; i < expected.length (); ++i)
15046 ASSERT_EQ (wi::to_wide (expected[i]),
15047 wi::to_wide (vector_cst_elt (actual, i)));
15050 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
15051 and that its elements match EXPECTED. */
15053 static void
15054 check_vector_cst_duplicate (const vec<tree> &expected, tree actual,
15055 unsigned int npatterns)
15057 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15058 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
15059 ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
15060 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
15061 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15062 check_vector_cst (expected, actual);
15065 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
15066 and NPATTERNS background elements, and that its elements match
15067 EXPECTED. */
15069 static void
15070 check_vector_cst_fill (const vec<tree> &expected, tree actual,
15071 unsigned int npatterns)
15073 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15074 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
15075 ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
15076 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15077 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15078 check_vector_cst (expected, actual);
15081 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
15082 and that its elements match EXPECTED. */
15084 static void
15085 check_vector_cst_stepped (const vec<tree> &expected, tree actual,
15086 unsigned int npatterns)
15088 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15089 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
15090 ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
15091 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15092 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
15093 check_vector_cst (expected, actual);
15096 /* Test the creation of VECTOR_CSTs. */
15098 static void
15099 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
15101 auto_vec<tree, 8> elements (8);
15102 elements.quick_grow (8);
15103 tree element_type = build_nonstandard_integer_type (16, true);
15104 tree vector_type = build_vector_type (element_type, 8);
15106 /* Test a simple linear series with a base of 0 and a step of 1:
15107 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
15108 for (unsigned int i = 0; i < 8; ++i)
15109 elements[i] = build_int_cst (element_type, i);
15110 tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
15111 check_vector_cst_stepped (elements, vector, 1);
15113 /* Try the same with the first element replaced by 100:
15114 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
15115 elements[0] = build_int_cst (element_type, 100);
15116 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15117 check_vector_cst_stepped (elements, vector, 1);
15119 /* Try a series that wraps around.
15120 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
15121 for (unsigned int i = 1; i < 8; ++i)
15122 elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
15123 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15124 check_vector_cst_stepped (elements, vector, 1);
15126 /* Try a downward series:
15127 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
15128 for (unsigned int i = 1; i < 8; ++i)
15129 elements[i] = build_int_cst (element_type, 80 - i);
15130 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15131 check_vector_cst_stepped (elements, vector, 1);
15133 /* Try two interleaved series with different bases and steps:
15134 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
15135 elements[1] = build_int_cst (element_type, 53);
15136 for (unsigned int i = 2; i < 8; i += 2)
15138 elements[i] = build_int_cst (element_type, 70 - i * 2);
15139 elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
15141 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15142 check_vector_cst_stepped (elements, vector, 2);
15144 /* Try a duplicated value:
15145 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
15146 for (unsigned int i = 1; i < 8; ++i)
15147 elements[i] = elements[0];
15148 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15149 check_vector_cst_duplicate (elements, vector, 1);
15151 /* Try an interleaved duplicated value:
15152 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
15153 elements[1] = build_int_cst (element_type, 55);
15154 for (unsigned int i = 2; i < 8; ++i)
15155 elements[i] = elements[i - 2];
15156 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15157 check_vector_cst_duplicate (elements, vector, 2);
15159 /* Try a duplicated value with 2 exceptions
15160 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
15161 elements[0] = build_int_cst (element_type, 41);
15162 elements[1] = build_int_cst (element_type, 97);
15163 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15164 check_vector_cst_fill (elements, vector, 2);
15166 /* Try with and without a step
15167 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
15168 for (unsigned int i = 3; i < 8; i += 2)
15169 elements[i] = build_int_cst (element_type, i * 7);
15170 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15171 check_vector_cst_stepped (elements, vector, 2);
15173 /* Try a fully-general constant:
15174 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
15175 elements[5] = build_int_cst (element_type, 9990);
15176 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15177 check_vector_cst_fill (elements, vector, 4);
15180 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
15181 Helper function for test_location_wrappers, to deal with STRIP_NOPS
15182 modifying its argument in-place. */
15184 static void
15185 check_strip_nops (tree node, tree expected)
15187 STRIP_NOPS (node);
15188 ASSERT_EQ (expected, node);
15191 /* Verify location wrappers. */
15193 static void
15194 test_location_wrappers ()
15196 location_t loc = BUILTINS_LOCATION;
15198 ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
15200 /* Wrapping a constant. */
15201 tree int_cst = build_int_cst (integer_type_node, 42);
15202 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
15203 ASSERT_FALSE (location_wrapper_p (int_cst));
15205 tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
15206 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
15207 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
15208 ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
15210 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
15211 ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
15213 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
15214 tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
15215 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
15216 ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
15218 /* Wrapping a STRING_CST. */
15219 tree string_cst = build_string (4, "foo");
15220 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
15221 ASSERT_FALSE (location_wrapper_p (string_cst));
15223 tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
15224 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
15225 ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
15226 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
15227 ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
15230 /* Wrapping a variable. */
15231 tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
15232 get_identifier ("some_int_var"),
15233 integer_type_node);
15234 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
15235 ASSERT_FALSE (location_wrapper_p (int_var));
15237 tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
15238 ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
15239 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
15240 ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
15242 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
15243 wrapper. */
15244 tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
15245 ASSERT_FALSE (location_wrapper_p (r_cast));
15246 ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
15248 /* Verify that STRIP_NOPS removes wrappers. */
15249 check_strip_nops (wrapped_int_cst, int_cst);
15250 check_strip_nops (wrapped_string_cst, string_cst);
15251 check_strip_nops (wrapped_int_var, int_var);
15254 /* Test various tree predicates. Verify that location wrappers don't
15255 affect the results. */
15257 static void
15258 test_predicates ()
15260 /* Build various constants and wrappers around them. */
15262 location_t loc = BUILTINS_LOCATION;
15264 tree i_0 = build_int_cst (integer_type_node, 0);
15265 tree wr_i_0 = maybe_wrap_with_location (i_0, loc);
15267 tree i_1 = build_int_cst (integer_type_node, 1);
15268 tree wr_i_1 = maybe_wrap_with_location (i_1, loc);
15270 tree i_m1 = build_int_cst (integer_type_node, -1);
15271 tree wr_i_m1 = maybe_wrap_with_location (i_m1, loc);
15273 tree f_0 = build_real_from_int_cst (float_type_node, i_0);
15274 tree wr_f_0 = maybe_wrap_with_location (f_0, loc);
15275 tree f_1 = build_real_from_int_cst (float_type_node, i_1);
15276 tree wr_f_1 = maybe_wrap_with_location (f_1, loc);
15277 tree f_m1 = build_real_from_int_cst (float_type_node, i_m1);
15278 tree wr_f_m1 = maybe_wrap_with_location (f_m1, loc);
15280 tree c_i_0 = build_complex (NULL_TREE, i_0, i_0);
15281 tree c_i_1 = build_complex (NULL_TREE, i_1, i_0);
15282 tree c_i_m1 = build_complex (NULL_TREE, i_m1, i_0);
15284 tree c_f_0 = build_complex (NULL_TREE, f_0, f_0);
15285 tree c_f_1 = build_complex (NULL_TREE, f_1, f_0);
15286 tree c_f_m1 = build_complex (NULL_TREE, f_m1, f_0);
15288 /* TODO: vector constants. */
15290 /* Test integer_onep. */
15291 ASSERT_FALSE (integer_onep (i_0));
15292 ASSERT_FALSE (integer_onep (wr_i_0));
15293 ASSERT_TRUE (integer_onep (i_1));
15294 ASSERT_TRUE (integer_onep (wr_i_1));
15295 ASSERT_FALSE (integer_onep (i_m1));
15296 ASSERT_FALSE (integer_onep (wr_i_m1));
15297 ASSERT_FALSE (integer_onep (f_0));
15298 ASSERT_FALSE (integer_onep (wr_f_0));
15299 ASSERT_FALSE (integer_onep (f_1));
15300 ASSERT_FALSE (integer_onep (wr_f_1));
15301 ASSERT_FALSE (integer_onep (f_m1));
15302 ASSERT_FALSE (integer_onep (wr_f_m1));
15303 ASSERT_FALSE (integer_onep (c_i_0));
15304 ASSERT_TRUE (integer_onep (c_i_1));
15305 ASSERT_FALSE (integer_onep (c_i_m1));
15306 ASSERT_FALSE (integer_onep (c_f_0));
15307 ASSERT_FALSE (integer_onep (c_f_1));
15308 ASSERT_FALSE (integer_onep (c_f_m1));
15310 /* Test integer_zerop. */
15311 ASSERT_TRUE (integer_zerop (i_0));
15312 ASSERT_TRUE (integer_zerop (wr_i_0));
15313 ASSERT_FALSE (integer_zerop (i_1));
15314 ASSERT_FALSE (integer_zerop (wr_i_1));
15315 ASSERT_FALSE (integer_zerop (i_m1));
15316 ASSERT_FALSE (integer_zerop (wr_i_m1));
15317 ASSERT_FALSE (integer_zerop (f_0));
15318 ASSERT_FALSE (integer_zerop (wr_f_0));
15319 ASSERT_FALSE (integer_zerop (f_1));
15320 ASSERT_FALSE (integer_zerop (wr_f_1));
15321 ASSERT_FALSE (integer_zerop (f_m1));
15322 ASSERT_FALSE (integer_zerop (wr_f_m1));
15323 ASSERT_TRUE (integer_zerop (c_i_0));
15324 ASSERT_FALSE (integer_zerop (c_i_1));
15325 ASSERT_FALSE (integer_zerop (c_i_m1));
15326 ASSERT_FALSE (integer_zerop (c_f_0));
15327 ASSERT_FALSE (integer_zerop (c_f_1));
15328 ASSERT_FALSE (integer_zerop (c_f_m1));
15330 /* Test integer_all_onesp. */
15331 ASSERT_FALSE (integer_all_onesp (i_0));
15332 ASSERT_FALSE (integer_all_onesp (wr_i_0));
15333 ASSERT_FALSE (integer_all_onesp (i_1));
15334 ASSERT_FALSE (integer_all_onesp (wr_i_1));
15335 ASSERT_TRUE (integer_all_onesp (i_m1));
15336 ASSERT_TRUE (integer_all_onesp (wr_i_m1));
15337 ASSERT_FALSE (integer_all_onesp (f_0));
15338 ASSERT_FALSE (integer_all_onesp (wr_f_0));
15339 ASSERT_FALSE (integer_all_onesp (f_1));
15340 ASSERT_FALSE (integer_all_onesp (wr_f_1));
15341 ASSERT_FALSE (integer_all_onesp (f_m1));
15342 ASSERT_FALSE (integer_all_onesp (wr_f_m1));
15343 ASSERT_FALSE (integer_all_onesp (c_i_0));
15344 ASSERT_FALSE (integer_all_onesp (c_i_1));
15345 ASSERT_FALSE (integer_all_onesp (c_i_m1));
15346 ASSERT_FALSE (integer_all_onesp (c_f_0));
15347 ASSERT_FALSE (integer_all_onesp (c_f_1));
15348 ASSERT_FALSE (integer_all_onesp (c_f_m1));
15350 /* Test integer_minus_onep. */
15351 ASSERT_FALSE (integer_minus_onep (i_0));
15352 ASSERT_FALSE (integer_minus_onep (wr_i_0));
15353 ASSERT_FALSE (integer_minus_onep (i_1));
15354 ASSERT_FALSE (integer_minus_onep (wr_i_1));
15355 ASSERT_TRUE (integer_minus_onep (i_m1));
15356 ASSERT_TRUE (integer_minus_onep (wr_i_m1));
15357 ASSERT_FALSE (integer_minus_onep (f_0));
15358 ASSERT_FALSE (integer_minus_onep (wr_f_0));
15359 ASSERT_FALSE (integer_minus_onep (f_1));
15360 ASSERT_FALSE (integer_minus_onep (wr_f_1));
15361 ASSERT_FALSE (integer_minus_onep (f_m1));
15362 ASSERT_FALSE (integer_minus_onep (wr_f_m1));
15363 ASSERT_FALSE (integer_minus_onep (c_i_0));
15364 ASSERT_FALSE (integer_minus_onep (c_i_1));
15365 ASSERT_TRUE (integer_minus_onep (c_i_m1));
15366 ASSERT_FALSE (integer_minus_onep (c_f_0));
15367 ASSERT_FALSE (integer_minus_onep (c_f_1));
15368 ASSERT_FALSE (integer_minus_onep (c_f_m1));
15370 /* Test integer_each_onep. */
15371 ASSERT_FALSE (integer_each_onep (i_0));
15372 ASSERT_FALSE (integer_each_onep (wr_i_0));
15373 ASSERT_TRUE (integer_each_onep (i_1));
15374 ASSERT_TRUE (integer_each_onep (wr_i_1));
15375 ASSERT_FALSE (integer_each_onep (i_m1));
15376 ASSERT_FALSE (integer_each_onep (wr_i_m1));
15377 ASSERT_FALSE (integer_each_onep (f_0));
15378 ASSERT_FALSE (integer_each_onep (wr_f_0));
15379 ASSERT_FALSE (integer_each_onep (f_1));
15380 ASSERT_FALSE (integer_each_onep (wr_f_1));
15381 ASSERT_FALSE (integer_each_onep (f_m1));
15382 ASSERT_FALSE (integer_each_onep (wr_f_m1));
15383 ASSERT_FALSE (integer_each_onep (c_i_0));
15384 ASSERT_FALSE (integer_each_onep (c_i_1));
15385 ASSERT_FALSE (integer_each_onep (c_i_m1));
15386 ASSERT_FALSE (integer_each_onep (c_f_0));
15387 ASSERT_FALSE (integer_each_onep (c_f_1));
15388 ASSERT_FALSE (integer_each_onep (c_f_m1));
15390 /* Test integer_truep. */
15391 ASSERT_FALSE (integer_truep (i_0));
15392 ASSERT_FALSE (integer_truep (wr_i_0));
15393 ASSERT_TRUE (integer_truep (i_1));
15394 ASSERT_TRUE (integer_truep (wr_i_1));
15395 ASSERT_FALSE (integer_truep (i_m1));
15396 ASSERT_FALSE (integer_truep (wr_i_m1));
15397 ASSERT_FALSE (integer_truep (f_0));
15398 ASSERT_FALSE (integer_truep (wr_f_0));
15399 ASSERT_FALSE (integer_truep (f_1));
15400 ASSERT_FALSE (integer_truep (wr_f_1));
15401 ASSERT_FALSE (integer_truep (f_m1));
15402 ASSERT_FALSE (integer_truep (wr_f_m1));
15403 ASSERT_FALSE (integer_truep (c_i_0));
15404 ASSERT_TRUE (integer_truep (c_i_1));
15405 ASSERT_FALSE (integer_truep (c_i_m1));
15406 ASSERT_FALSE (integer_truep (c_f_0));
15407 ASSERT_FALSE (integer_truep (c_f_1));
15408 ASSERT_FALSE (integer_truep (c_f_m1));
15410 /* Test integer_nonzerop. */
15411 ASSERT_FALSE (integer_nonzerop (i_0));
15412 ASSERT_FALSE (integer_nonzerop (wr_i_0));
15413 ASSERT_TRUE (integer_nonzerop (i_1));
15414 ASSERT_TRUE (integer_nonzerop (wr_i_1));
15415 ASSERT_TRUE (integer_nonzerop (i_m1));
15416 ASSERT_TRUE (integer_nonzerop (wr_i_m1));
15417 ASSERT_FALSE (integer_nonzerop (f_0));
15418 ASSERT_FALSE (integer_nonzerop (wr_f_0));
15419 ASSERT_FALSE (integer_nonzerop (f_1));
15420 ASSERT_FALSE (integer_nonzerop (wr_f_1));
15421 ASSERT_FALSE (integer_nonzerop (f_m1));
15422 ASSERT_FALSE (integer_nonzerop (wr_f_m1));
15423 ASSERT_FALSE (integer_nonzerop (c_i_0));
15424 ASSERT_TRUE (integer_nonzerop (c_i_1));
15425 ASSERT_TRUE (integer_nonzerop (c_i_m1));
15426 ASSERT_FALSE (integer_nonzerop (c_f_0));
15427 ASSERT_FALSE (integer_nonzerop (c_f_1));
15428 ASSERT_FALSE (integer_nonzerop (c_f_m1));
15430 /* Test real_zerop. */
15431 ASSERT_FALSE (real_zerop (i_0));
15432 ASSERT_FALSE (real_zerop (wr_i_0));
15433 ASSERT_FALSE (real_zerop (i_1));
15434 ASSERT_FALSE (real_zerop (wr_i_1));
15435 ASSERT_FALSE (real_zerop (i_m1));
15436 ASSERT_FALSE (real_zerop (wr_i_m1));
15437 ASSERT_TRUE (real_zerop (f_0));
15438 ASSERT_TRUE (real_zerop (wr_f_0));
15439 ASSERT_FALSE (real_zerop (f_1));
15440 ASSERT_FALSE (real_zerop (wr_f_1));
15441 ASSERT_FALSE (real_zerop (f_m1));
15442 ASSERT_FALSE (real_zerop (wr_f_m1));
15443 ASSERT_FALSE (real_zerop (c_i_0));
15444 ASSERT_FALSE (real_zerop (c_i_1));
15445 ASSERT_FALSE (real_zerop (c_i_m1));
15446 ASSERT_TRUE (real_zerop (c_f_0));
15447 ASSERT_FALSE (real_zerop (c_f_1));
15448 ASSERT_FALSE (real_zerop (c_f_m1));
15450 /* Test real_onep. */
15451 ASSERT_FALSE (real_onep (i_0));
15452 ASSERT_FALSE (real_onep (wr_i_0));
15453 ASSERT_FALSE (real_onep (i_1));
15454 ASSERT_FALSE (real_onep (wr_i_1));
15455 ASSERT_FALSE (real_onep (i_m1));
15456 ASSERT_FALSE (real_onep (wr_i_m1));
15457 ASSERT_FALSE (real_onep (f_0));
15458 ASSERT_FALSE (real_onep (wr_f_0));
15459 ASSERT_TRUE (real_onep (f_1));
15460 ASSERT_TRUE (real_onep (wr_f_1));
15461 ASSERT_FALSE (real_onep (f_m1));
15462 ASSERT_FALSE (real_onep (wr_f_m1));
15463 ASSERT_FALSE (real_onep (c_i_0));
15464 ASSERT_FALSE (real_onep (c_i_1));
15465 ASSERT_FALSE (real_onep (c_i_m1));
15466 ASSERT_FALSE (real_onep (c_f_0));
15467 ASSERT_TRUE (real_onep (c_f_1));
15468 ASSERT_FALSE (real_onep (c_f_m1));
15470 /* Test real_minus_onep. */
15471 ASSERT_FALSE (real_minus_onep (i_0));
15472 ASSERT_FALSE (real_minus_onep (wr_i_0));
15473 ASSERT_FALSE (real_minus_onep (i_1));
15474 ASSERT_FALSE (real_minus_onep (wr_i_1));
15475 ASSERT_FALSE (real_minus_onep (i_m1));
15476 ASSERT_FALSE (real_minus_onep (wr_i_m1));
15477 ASSERT_FALSE (real_minus_onep (f_0));
15478 ASSERT_FALSE (real_minus_onep (wr_f_0));
15479 ASSERT_FALSE (real_minus_onep (f_1));
15480 ASSERT_FALSE (real_minus_onep (wr_f_1));
15481 ASSERT_TRUE (real_minus_onep (f_m1));
15482 ASSERT_TRUE (real_minus_onep (wr_f_m1));
15483 ASSERT_FALSE (real_minus_onep (c_i_0));
15484 ASSERT_FALSE (real_minus_onep (c_i_1));
15485 ASSERT_FALSE (real_minus_onep (c_i_m1));
15486 ASSERT_FALSE (real_minus_onep (c_f_0));
15487 ASSERT_FALSE (real_minus_onep (c_f_1));
15488 ASSERT_TRUE (real_minus_onep (c_f_m1));
15490 /* Test zerop. */
15491 ASSERT_TRUE (zerop (i_0));
15492 ASSERT_TRUE (zerop (wr_i_0));
15493 ASSERT_FALSE (zerop (i_1));
15494 ASSERT_FALSE (zerop (wr_i_1));
15495 ASSERT_FALSE (zerop (i_m1));
15496 ASSERT_FALSE (zerop (wr_i_m1));
15497 ASSERT_TRUE (zerop (f_0));
15498 ASSERT_TRUE (zerop (wr_f_0));
15499 ASSERT_FALSE (zerop (f_1));
15500 ASSERT_FALSE (zerop (wr_f_1));
15501 ASSERT_FALSE (zerop (f_m1));
15502 ASSERT_FALSE (zerop (wr_f_m1));
15503 ASSERT_TRUE (zerop (c_i_0));
15504 ASSERT_FALSE (zerop (c_i_1));
15505 ASSERT_FALSE (zerop (c_i_m1));
15506 ASSERT_TRUE (zerop (c_f_0));
15507 ASSERT_FALSE (zerop (c_f_1));
15508 ASSERT_FALSE (zerop (c_f_m1));
15510 /* Test tree_expr_nonnegative_p. */
15511 ASSERT_TRUE (tree_expr_nonnegative_p (i_0));
15512 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0));
15513 ASSERT_TRUE (tree_expr_nonnegative_p (i_1));
15514 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1));
15515 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1));
15516 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1));
15517 ASSERT_TRUE (tree_expr_nonnegative_p (f_0));
15518 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0));
15519 ASSERT_TRUE (tree_expr_nonnegative_p (f_1));
15520 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1));
15521 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1));
15522 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1));
15523 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0));
15524 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1));
15525 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1));
15526 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0));
15527 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1));
15528 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1));
15530 /* Test tree_expr_nonzero_p. */
15531 ASSERT_FALSE (tree_expr_nonzero_p (i_0));
15532 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0));
15533 ASSERT_TRUE (tree_expr_nonzero_p (i_1));
15534 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1));
15535 ASSERT_TRUE (tree_expr_nonzero_p (i_m1));
15536 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1));
15538 /* Test integer_valued_real_p. */
15539 ASSERT_FALSE (integer_valued_real_p (i_0));
15540 ASSERT_TRUE (integer_valued_real_p (f_0));
15541 ASSERT_TRUE (integer_valued_real_p (wr_f_0));
15542 ASSERT_TRUE (integer_valued_real_p (f_1));
15543 ASSERT_TRUE (integer_valued_real_p (wr_f_1));
15545 /* Test integer_pow2p. */
15546 ASSERT_FALSE (integer_pow2p (i_0));
15547 ASSERT_TRUE (integer_pow2p (i_1));
15548 ASSERT_TRUE (integer_pow2p (wr_i_1));
15550 /* Test uniform_integer_cst_p. */
15551 ASSERT_TRUE (uniform_integer_cst_p (i_0));
15552 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0));
15553 ASSERT_TRUE (uniform_integer_cst_p (i_1));
15554 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1));
15555 ASSERT_TRUE (uniform_integer_cst_p (i_m1));
15556 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1));
15557 ASSERT_FALSE (uniform_integer_cst_p (f_0));
15558 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0));
15559 ASSERT_FALSE (uniform_integer_cst_p (f_1));
15560 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1));
15561 ASSERT_FALSE (uniform_integer_cst_p (f_m1));
15562 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1));
15563 ASSERT_FALSE (uniform_integer_cst_p (c_i_0));
15564 ASSERT_FALSE (uniform_integer_cst_p (c_i_1));
15565 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1));
15566 ASSERT_FALSE (uniform_integer_cst_p (c_f_0));
15567 ASSERT_FALSE (uniform_integer_cst_p (c_f_1));
15568 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1));
15571 /* Check that string escaping works correctly. */
15573 static void
15574 test_escaped_strings (void)
15576 int saved_cutoff;
15577 escaped_string msg;
15579 msg.escape (NULL);
15580 /* ASSERT_STREQ does not accept NULL as a valid test
15581 result, so we have to use ASSERT_EQ instead. */
15582 ASSERT_EQ (NULL, (const char *) msg);
15584 msg.escape ("");
15585 ASSERT_STREQ ("", (const char *) msg);
15587 msg.escape ("foobar");
15588 ASSERT_STREQ ("foobar", (const char *) msg);
15590 /* Ensure that we have -fmessage-length set to 0. */
15591 saved_cutoff = pp_line_cutoff (global_dc->printer);
15592 pp_line_cutoff (global_dc->printer) = 0;
15594 msg.escape ("foo\nbar");
15595 ASSERT_STREQ ("foo\\nbar", (const char *) msg);
15597 msg.escape ("\a\b\f\n\r\t\v");
15598 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
15600 /* Now repeat the tests with -fmessage-length set to 5. */
15601 pp_line_cutoff (global_dc->printer) = 5;
15603 /* Note that the newline is not translated into an escape. */
15604 msg.escape ("foo\nbar");
15605 ASSERT_STREQ ("foo\nbar", (const char *) msg);
15607 msg.escape ("\a\b\f\n\r\t\v");
15608 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
15610 /* Restore the original message length setting. */
15611 pp_line_cutoff (global_dc->printer) = saved_cutoff;
15614 /* Run all of the selftests within this file. */
15616 void
15617 tree_cc_tests ()
15619 test_integer_constants ();
15620 test_identifiers ();
15621 test_labels ();
15622 test_vector_cst_patterns ();
15623 test_location_wrappers ();
15624 test_predicates ();
15625 test_escaped_strings ();
15628 } // namespace selftest
15630 #endif /* CHECKING_P */
15632 #include "gt-tree.h"