Fix issue for pointers to anonymous types with -fdump-ada-spec
[official-gcc.git] / gcc / tree.cc
blobb8017af6cfc17b85b013cbdba8a5dd6801061f65
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2022 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "langhooks-def.h"
58 #include "tree-diagnostic.h"
59 #include "except.h"
60 #include "builtins.h"
61 #include "print-tree.h"
62 #include "ipa-utils.h"
63 #include "selftest.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "rtl.h"
67 #include "regs.h"
68 #include "tree-vector-builder.h"
69 #include "gimple-fold.h"
70 #include "escaped_string.h"
71 #include "gimple-range.h"
72 #include "gomp-constants.h"
74 /* Tree code classes. */
76 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
77 #define END_OF_BASE_TREE_CODES tcc_exceptional,
79 const enum tree_code_class tree_code_type[] = {
80 #include "all-tree.def"
83 #undef DEFTREECODE
84 #undef END_OF_BASE_TREE_CODES
86 /* Table indexed by tree code giving number of expression
87 operands beyond the fixed part of the node structure.
88 Not used for types or decls. */
90 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
91 #define END_OF_BASE_TREE_CODES 0,
93 const unsigned char tree_code_length[] = {
94 #include "all-tree.def"
97 #undef DEFTREECODE
98 #undef END_OF_BASE_TREE_CODES
100 /* Names of tree components.
101 Used for printing out the tree and error messages. */
102 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
103 #define END_OF_BASE_TREE_CODES "@dummy",
105 static const char *const tree_code_name[] = {
106 #include "all-tree.def"
109 #undef DEFTREECODE
110 #undef END_OF_BASE_TREE_CODES
112 /* Each tree code class has an associated string representation.
113 These must correspond to the tree_code_class entries. */
115 const char *const tree_code_class_strings[] =
117 "exceptional",
118 "constant",
119 "type",
120 "declaration",
121 "reference",
122 "comparison",
123 "unary",
124 "binary",
125 "statement",
126 "vl_exp",
127 "expression"
130 /* obstack.[ch] explicitly declined to prototype this. */
131 extern int _obstack_allocated_p (struct obstack *h, void *obj);
133 /* Statistics-gathering stuff. */
135 static uint64_t tree_code_counts[MAX_TREE_CODES];
136 uint64_t tree_node_counts[(int) all_kinds];
137 uint64_t tree_node_sizes[(int) all_kinds];
139 /* Keep in sync with tree.h:enum tree_node_kind. */
140 static const char * const tree_node_kind_names[] = {
141 "decls",
142 "types",
143 "blocks",
144 "stmts",
145 "refs",
146 "exprs",
147 "constants",
148 "identifiers",
149 "vecs",
150 "binfos",
151 "ssa names",
152 "constructors",
153 "random kinds",
154 "lang_decl kinds",
155 "lang_type kinds",
156 "omp clauses",
159 /* Unique id for next decl created. */
160 static GTY(()) int next_decl_uid;
161 /* Unique id for next type created. */
162 static GTY(()) unsigned next_type_uid = 1;
163 /* Unique id for next debug decl created. Use negative numbers,
164 to catch erroneous uses. */
165 static GTY(()) int next_debug_decl_uid;
167 /* Since we cannot rehash a type after it is in the table, we have to
168 keep the hash code. */
170 struct GTY((for_user)) type_hash {
171 unsigned long hash;
172 tree type;
175 /* Initial size of the hash table (rounded to next prime). */
176 #define TYPE_HASH_INITIAL_SIZE 1000
178 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
180 static hashval_t hash (type_hash *t) { return t->hash; }
181 static bool equal (type_hash *a, type_hash *b);
183 static int
184 keep_cache_entry (type_hash *&t)
186 return ggc_marked_p (t->type);
190 /* Now here is the hash table. When recording a type, it is added to
191 the slot whose index is the hash code. Note that the hash table is
192 used for several kinds of types (function types, array types and
193 array index range types, for now). While all these live in the
194 same table, they are completely independent, and the hash code is
195 computed differently for each of these. */
197 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
199 /* Hash table and temporary node for larger integer const values. */
200 static GTY (()) tree int_cst_node;
202 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
204 static hashval_t hash (tree t);
205 static bool equal (tree x, tree y);
208 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
210 /* Class and variable for making sure that there is a single POLY_INT_CST
211 for a given value. */
212 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
214 typedef std::pair<tree, const poly_wide_int *> compare_type;
215 static hashval_t hash (tree t);
216 static bool equal (tree x, const compare_type &y);
219 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
221 /* Hash table for optimization flags and target option flags. Use the same
222 hash table for both sets of options. Nodes for building the current
223 optimization and target option nodes. The assumption is most of the time
224 the options created will already be in the hash table, so we avoid
225 allocating and freeing up a node repeatably. */
226 static GTY (()) tree cl_optimization_node;
227 static GTY (()) tree cl_target_option_node;
229 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
231 static hashval_t hash (tree t);
232 static bool equal (tree x, tree y);
235 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
237 /* General tree->tree mapping structure for use in hash tables. */
240 static GTY ((cache))
241 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
243 static GTY ((cache))
244 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
246 static GTY ((cache))
247 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
249 static void set_type_quals (tree, int);
250 static void print_type_hash_statistics (void);
251 static void print_debug_expr_statistics (void);
252 static void print_value_expr_statistics (void);
254 tree global_trees[TI_MAX];
255 tree integer_types[itk_none];
257 bool int_n_enabled_p[NUM_INT_N_ENTS];
258 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
260 bool tree_contains_struct[MAX_TREE_CODES][64];
262 /* Number of operands for each OMP clause. */
263 unsigned const char omp_clause_num_ops[] =
265 0, /* OMP_CLAUSE_ERROR */
266 1, /* OMP_CLAUSE_PRIVATE */
267 1, /* OMP_CLAUSE_SHARED */
268 1, /* OMP_CLAUSE_FIRSTPRIVATE */
269 2, /* OMP_CLAUSE_LASTPRIVATE */
270 5, /* OMP_CLAUSE_REDUCTION */
271 5, /* OMP_CLAUSE_TASK_REDUCTION */
272 5, /* OMP_CLAUSE_IN_REDUCTION */
273 1, /* OMP_CLAUSE_COPYIN */
274 1, /* OMP_CLAUSE_COPYPRIVATE */
275 3, /* OMP_CLAUSE_LINEAR */
276 1, /* OMP_CLAUSE_AFFINITY */
277 2, /* OMP_CLAUSE_ALIGNED */
278 3, /* OMP_CLAUSE_ALLOCATE */
279 1, /* OMP_CLAUSE_DEPEND */
280 1, /* OMP_CLAUSE_NONTEMPORAL */
281 1, /* OMP_CLAUSE_UNIFORM */
282 1, /* OMP_CLAUSE_TO_DECLARE */
283 1, /* OMP_CLAUSE_LINK */
284 1, /* OMP_CLAUSE_DETACH */
285 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
286 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
287 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
288 1, /* OMP_CLAUSE_INCLUSIVE */
289 1, /* OMP_CLAUSE_EXCLUSIVE */
290 2, /* OMP_CLAUSE_FROM */
291 2, /* OMP_CLAUSE_TO */
292 2, /* OMP_CLAUSE_MAP */
293 1, /* OMP_CLAUSE_HAS_DEVICE_ADDR */
294 2, /* OMP_CLAUSE__CACHE_ */
295 2, /* OMP_CLAUSE_GANG */
296 1, /* OMP_CLAUSE_ASYNC */
297 1, /* OMP_CLAUSE_WAIT */
298 0, /* OMP_CLAUSE_AUTO */
299 0, /* OMP_CLAUSE_SEQ */
300 1, /* OMP_CLAUSE__LOOPTEMP_ */
301 1, /* OMP_CLAUSE__REDUCTEMP_ */
302 1, /* OMP_CLAUSE__CONDTEMP_ */
303 1, /* OMP_CLAUSE__SCANTEMP_ */
304 1, /* OMP_CLAUSE_IF */
305 1, /* OMP_CLAUSE_NUM_THREADS */
306 1, /* OMP_CLAUSE_SCHEDULE */
307 0, /* OMP_CLAUSE_NOWAIT */
308 1, /* OMP_CLAUSE_ORDERED */
309 0, /* OMP_CLAUSE_DEFAULT */
310 3, /* OMP_CLAUSE_COLLAPSE */
311 0, /* OMP_CLAUSE_UNTIED */
312 1, /* OMP_CLAUSE_FINAL */
313 0, /* OMP_CLAUSE_MERGEABLE */
314 1, /* OMP_CLAUSE_DEVICE */
315 1, /* OMP_CLAUSE_DIST_SCHEDULE */
316 0, /* OMP_CLAUSE_INBRANCH */
317 0, /* OMP_CLAUSE_NOTINBRANCH */
318 2, /* OMP_CLAUSE_NUM_TEAMS */
319 1, /* OMP_CLAUSE_THREAD_LIMIT */
320 0, /* OMP_CLAUSE_PROC_BIND */
321 1, /* OMP_CLAUSE_SAFELEN */
322 1, /* OMP_CLAUSE_SIMDLEN */
323 0, /* OMP_CLAUSE_DEVICE_TYPE */
324 0, /* OMP_CLAUSE_FOR */
325 0, /* OMP_CLAUSE_PARALLEL */
326 0, /* OMP_CLAUSE_SECTIONS */
327 0, /* OMP_CLAUSE_TASKGROUP */
328 1, /* OMP_CLAUSE_PRIORITY */
329 1, /* OMP_CLAUSE_GRAINSIZE */
330 1, /* OMP_CLAUSE_NUM_TASKS */
331 0, /* OMP_CLAUSE_NOGROUP */
332 0, /* OMP_CLAUSE_THREADS */
333 0, /* OMP_CLAUSE_SIMD */
334 1, /* OMP_CLAUSE_HINT */
335 0, /* OMP_CLAUSE_DEFAULTMAP */
336 0, /* OMP_CLAUSE_ORDER */
337 0, /* OMP_CLAUSE_BIND */
338 1, /* OMP_CLAUSE_FILTER */
339 1, /* OMP_CLAUSE__SIMDUID_ */
340 0, /* OMP_CLAUSE__SIMT_ */
341 0, /* OMP_CLAUSE_INDEPENDENT */
342 1, /* OMP_CLAUSE_WORKER */
343 1, /* OMP_CLAUSE_VECTOR */
344 1, /* OMP_CLAUSE_NUM_GANGS */
345 1, /* OMP_CLAUSE_NUM_WORKERS */
346 1, /* OMP_CLAUSE_VECTOR_LENGTH */
347 3, /* OMP_CLAUSE_TILE */
348 0, /* OMP_CLAUSE_IF_PRESENT */
349 0, /* OMP_CLAUSE_FINALIZE */
350 0, /* OMP_CLAUSE_NOHOST */
353 const char * const omp_clause_code_name[] =
355 "error_clause",
356 "private",
357 "shared",
358 "firstprivate",
359 "lastprivate",
360 "reduction",
361 "task_reduction",
362 "in_reduction",
363 "copyin",
364 "copyprivate",
365 "linear",
366 "affinity",
367 "aligned",
368 "allocate",
369 "depend",
370 "nontemporal",
371 "uniform",
372 "to",
373 "link",
374 "detach",
375 "use_device_ptr",
376 "use_device_addr",
377 "is_device_ptr",
378 "inclusive",
379 "exclusive",
380 "from",
381 "to",
382 "map",
383 "has_device_addr",
384 "_cache_",
385 "gang",
386 "async",
387 "wait",
388 "auto",
389 "seq",
390 "_looptemp_",
391 "_reductemp_",
392 "_condtemp_",
393 "_scantemp_",
394 "if",
395 "num_threads",
396 "schedule",
397 "nowait",
398 "ordered",
399 "default",
400 "collapse",
401 "untied",
402 "final",
403 "mergeable",
404 "device",
405 "dist_schedule",
406 "inbranch",
407 "notinbranch",
408 "num_teams",
409 "thread_limit",
410 "proc_bind",
411 "safelen",
412 "simdlen",
413 "device_type",
414 "for",
415 "parallel",
416 "sections",
417 "taskgroup",
418 "priority",
419 "grainsize",
420 "num_tasks",
421 "nogroup",
422 "threads",
423 "simd",
424 "hint",
425 "defaultmap",
426 "order",
427 "bind",
428 "filter",
429 "_simduid_",
430 "_simt_",
431 "independent",
432 "worker",
433 "vector",
434 "num_gangs",
435 "num_workers",
436 "vector_length",
437 "tile",
438 "if_present",
439 "finalize",
440 "nohost",
443 /* Unless specific to OpenACC, we tend to internally maintain OpenMP-centric
444 clause names, but for use in diagnostics etc. would like to use the "user"
445 clause names. */
447 const char *
448 user_omp_clause_code_name (tree clause, bool oacc)
450 /* For OpenACC, the 'OMP_CLAUSE_MAP_KIND' of an 'OMP_CLAUSE_MAP' is used to
451 distinguish clauses as seen by the user. See also where front ends do
452 'build_omp_clause' with 'OMP_CLAUSE_MAP'. */
453 if (oacc && OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP)
454 switch (OMP_CLAUSE_MAP_KIND (clause))
456 case GOMP_MAP_FORCE_ALLOC:
457 case GOMP_MAP_ALLOC: return "create";
458 case GOMP_MAP_FORCE_TO:
459 case GOMP_MAP_TO: return "copyin";
460 case GOMP_MAP_FORCE_FROM:
461 case GOMP_MAP_FROM: return "copyout";
462 case GOMP_MAP_FORCE_TOFROM:
463 case GOMP_MAP_TOFROM: return "copy";
464 case GOMP_MAP_RELEASE: return "delete";
465 case GOMP_MAP_FORCE_PRESENT: return "present";
466 case GOMP_MAP_ATTACH: return "attach";
467 case GOMP_MAP_FORCE_DETACH:
468 case GOMP_MAP_DETACH: return "detach";
469 case GOMP_MAP_DEVICE_RESIDENT: return "device_resident";
470 case GOMP_MAP_LINK: return "link";
471 case GOMP_MAP_FORCE_DEVICEPTR: return "deviceptr";
472 default: break;
475 return omp_clause_code_name[OMP_CLAUSE_CODE (clause)];
479 /* Return the tree node structure used by tree code CODE. */
481 static inline enum tree_node_structure_enum
482 tree_node_structure_for_code (enum tree_code code)
484 switch (TREE_CODE_CLASS (code))
486 case tcc_declaration:
487 switch (code)
489 case CONST_DECL: return TS_CONST_DECL;
490 case DEBUG_EXPR_DECL: return TS_DECL_WRTL;
491 case FIELD_DECL: return TS_FIELD_DECL;
492 case FUNCTION_DECL: return TS_FUNCTION_DECL;
493 case LABEL_DECL: return TS_LABEL_DECL;
494 case PARM_DECL: return TS_PARM_DECL;
495 case RESULT_DECL: return TS_RESULT_DECL;
496 case TRANSLATION_UNIT_DECL: return TS_TRANSLATION_UNIT_DECL;
497 case TYPE_DECL: return TS_TYPE_DECL;
498 case VAR_DECL: return TS_VAR_DECL;
499 default: return TS_DECL_NON_COMMON;
502 case tcc_type: return TS_TYPE_NON_COMMON;
504 case tcc_binary:
505 case tcc_comparison:
506 case tcc_expression:
507 case tcc_reference:
508 case tcc_statement:
509 case tcc_unary:
510 case tcc_vl_exp: return TS_EXP;
512 default: /* tcc_constant and tcc_exceptional */
513 break;
516 switch (code)
518 /* tcc_constant cases. */
519 case COMPLEX_CST: return TS_COMPLEX;
520 case FIXED_CST: return TS_FIXED_CST;
521 case INTEGER_CST: return TS_INT_CST;
522 case POLY_INT_CST: return TS_POLY_INT_CST;
523 case REAL_CST: return TS_REAL_CST;
524 case STRING_CST: return TS_STRING;
525 case VECTOR_CST: return TS_VECTOR;
526 case VOID_CST: return TS_TYPED;
528 /* tcc_exceptional cases. */
529 case BLOCK: return TS_BLOCK;
530 case CONSTRUCTOR: return TS_CONSTRUCTOR;
531 case ERROR_MARK: return TS_COMMON;
532 case IDENTIFIER_NODE: return TS_IDENTIFIER;
533 case OMP_CLAUSE: return TS_OMP_CLAUSE;
534 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
535 case PLACEHOLDER_EXPR: return TS_COMMON;
536 case SSA_NAME: return TS_SSA_NAME;
537 case STATEMENT_LIST: return TS_STATEMENT_LIST;
538 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
539 case TREE_BINFO: return TS_BINFO;
540 case TREE_LIST: return TS_LIST;
541 case TREE_VEC: return TS_VEC;
543 default:
544 gcc_unreachable ();
549 /* Initialize tree_contains_struct to describe the hierarchy of tree
550 nodes. */
552 static void
553 initialize_tree_contains_struct (void)
555 unsigned i;
557 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
559 enum tree_code code;
560 enum tree_node_structure_enum ts_code;
562 code = (enum tree_code) i;
563 ts_code = tree_node_structure_for_code (code);
565 /* Mark the TS structure itself. */
566 tree_contains_struct[code][ts_code] = 1;
568 /* Mark all the structures that TS is derived from. */
569 switch (ts_code)
571 case TS_TYPED:
572 case TS_BLOCK:
573 case TS_OPTIMIZATION:
574 case TS_TARGET_OPTION:
575 MARK_TS_BASE (code);
576 break;
578 case TS_COMMON:
579 case TS_INT_CST:
580 case TS_POLY_INT_CST:
581 case TS_REAL_CST:
582 case TS_FIXED_CST:
583 case TS_VECTOR:
584 case TS_STRING:
585 case TS_COMPLEX:
586 case TS_SSA_NAME:
587 case TS_CONSTRUCTOR:
588 case TS_EXP:
589 case TS_STATEMENT_LIST:
590 MARK_TS_TYPED (code);
591 break;
593 case TS_IDENTIFIER:
594 case TS_DECL_MINIMAL:
595 case TS_TYPE_COMMON:
596 case TS_LIST:
597 case TS_VEC:
598 case TS_BINFO:
599 case TS_OMP_CLAUSE:
600 MARK_TS_COMMON (code);
601 break;
603 case TS_TYPE_WITH_LANG_SPECIFIC:
604 MARK_TS_TYPE_COMMON (code);
605 break;
607 case TS_TYPE_NON_COMMON:
608 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
609 break;
611 case TS_DECL_COMMON:
612 MARK_TS_DECL_MINIMAL (code);
613 break;
615 case TS_DECL_WRTL:
616 case TS_CONST_DECL:
617 MARK_TS_DECL_COMMON (code);
618 break;
620 case TS_DECL_NON_COMMON:
621 MARK_TS_DECL_WITH_VIS (code);
622 break;
624 case TS_DECL_WITH_VIS:
625 case TS_PARM_DECL:
626 case TS_LABEL_DECL:
627 case TS_RESULT_DECL:
628 MARK_TS_DECL_WRTL (code);
629 break;
631 case TS_FIELD_DECL:
632 MARK_TS_DECL_COMMON (code);
633 break;
635 case TS_VAR_DECL:
636 MARK_TS_DECL_WITH_VIS (code);
637 break;
639 case TS_TYPE_DECL:
640 case TS_FUNCTION_DECL:
641 MARK_TS_DECL_NON_COMMON (code);
642 break;
644 case TS_TRANSLATION_UNIT_DECL:
645 MARK_TS_DECL_COMMON (code);
646 break;
648 default:
649 gcc_unreachable ();
653 /* Basic consistency checks for attributes used in fold. */
654 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
655 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
656 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
657 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
658 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
659 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
660 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
661 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
662 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
663 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
664 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
665 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
666 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
667 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
668 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
669 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
670 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
671 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
672 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
673 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
674 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
675 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
676 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
677 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
678 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
679 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
680 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
681 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
682 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
683 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
684 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
685 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
686 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
687 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
688 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
689 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
690 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
691 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
692 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
693 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
697 /* Init tree.cc. */
699 void
700 init_ttree (void)
702 /* Initialize the hash table of types. */
703 type_hash_table
704 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
706 debug_expr_for_decl
707 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
709 value_expr_for_decl
710 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
712 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
714 poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
716 int_cst_node = make_int_cst (1, 1);
718 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
720 cl_optimization_node = make_node (OPTIMIZATION_NODE);
721 cl_target_option_node = make_node (TARGET_OPTION_NODE);
723 /* Initialize the tree_contains_struct array. */
724 initialize_tree_contains_struct ();
725 lang_hooks.init_ts ();
729 /* The name of the object as the assembler will see it (but before any
730 translations made by ASM_OUTPUT_LABELREF). Often this is the same
731 as DECL_NAME. It is an IDENTIFIER_NODE. */
732 tree
733 decl_assembler_name (tree decl)
735 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
736 lang_hooks.set_decl_assembler_name (decl);
737 return DECL_ASSEMBLER_NAME_RAW (decl);
740 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
741 (either of which may be NULL). Inform the FE, if this changes the
742 name. */
744 void
745 overwrite_decl_assembler_name (tree decl, tree name)
747 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
748 lang_hooks.overwrite_decl_assembler_name (decl, name);
751 /* Return true if DECL may need an assembler name to be set. */
753 static inline bool
754 need_assembler_name_p (tree decl)
756 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
757 Rule merging. This makes type_odr_p to return true on those types during
758 LTO and by comparing the mangled name, we can say what types are intended
759 to be equivalent across compilation unit.
761 We do not store names of type_in_anonymous_namespace_p.
763 Record, union and enumeration type have linkage that allows use
764 to check type_in_anonymous_namespace_p. We do not mangle compound types
765 that always can be compared structurally.
767 Similarly for builtin types, we compare properties of their main variant.
768 A special case are integer types where mangling do make differences
769 between char/signed char/unsigned char etc. Storing name for these makes
770 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
771 See cp/mangle.cc:write_builtin_type for details. */
773 if (TREE_CODE (decl) == TYPE_DECL)
775 if (DECL_NAME (decl)
776 && decl == TYPE_NAME (TREE_TYPE (decl))
777 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
778 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
779 && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
780 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE)
781 || TYPE_CXX_ODR_P (TREE_TYPE (decl)))
782 && (type_with_linkage_p (TREE_TYPE (decl))
783 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
784 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
785 return !DECL_ASSEMBLER_NAME_SET_P (decl);
786 return false;
788 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
789 if (!VAR_OR_FUNCTION_DECL_P (decl))
790 return false;
792 /* If DECL already has its assembler name set, it does not need a
793 new one. */
794 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
795 || DECL_ASSEMBLER_NAME_SET_P (decl))
796 return false;
798 /* Abstract decls do not need an assembler name. */
799 if (DECL_ABSTRACT_P (decl))
800 return false;
802 /* For VAR_DECLs, only static, public and external symbols need an
803 assembler name. */
804 if (VAR_P (decl)
805 && !TREE_STATIC (decl)
806 && !TREE_PUBLIC (decl)
807 && !DECL_EXTERNAL (decl))
808 return false;
810 if (TREE_CODE (decl) == FUNCTION_DECL)
812 /* Do not set assembler name on builtins. Allow RTL expansion to
813 decide whether to expand inline or via a regular call. */
814 if (fndecl_built_in_p (decl)
815 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
816 return false;
818 /* Functions represented in the callgraph need an assembler name. */
819 if (cgraph_node::get (decl) != NULL)
820 return true;
822 /* Unused and not public functions don't need an assembler name. */
823 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
824 return false;
827 return true;
830 /* If T needs an assembler name, have one created for it. */
832 void
833 assign_assembler_name_if_needed (tree t)
835 if (need_assembler_name_p (t))
837 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
838 diagnostics that use input_location to show locus
839 information. The problem here is that, at this point,
840 input_location is generally anchored to the end of the file
841 (since the parser is long gone), so we don't have a good
842 position to pin it to.
844 To alleviate this problem, this uses the location of T's
845 declaration. Examples of this are
846 testsuite/g++.dg/template/cond2.C and
847 testsuite/g++.dg/template/pr35240.C. */
848 location_t saved_location = input_location;
849 input_location = DECL_SOURCE_LOCATION (t);
851 decl_assembler_name (t);
853 input_location = saved_location;
857 /* When the target supports COMDAT groups, this indicates which group the
858 DECL is associated with. This can be either an IDENTIFIER_NODE or a
859 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
860 tree
861 decl_comdat_group (const_tree node)
863 struct symtab_node *snode = symtab_node::get (node);
864 if (!snode)
865 return NULL;
866 return snode->get_comdat_group ();
869 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
870 tree
871 decl_comdat_group_id (const_tree node)
873 struct symtab_node *snode = symtab_node::get (node);
874 if (!snode)
875 return NULL;
876 return snode->get_comdat_group_id ();
879 /* When the target supports named section, return its name as IDENTIFIER_NODE
880 or NULL if it is in no section. */
881 const char *
882 decl_section_name (const_tree node)
884 struct symtab_node *snode = symtab_node::get (node);
885 if (!snode)
886 return NULL;
887 return snode->get_section ();
890 /* Set section name of NODE to VALUE (that is expected to be
891 identifier node) */
892 void
893 set_decl_section_name (tree node, const char *value)
895 struct symtab_node *snode;
897 if (value == NULL)
899 snode = symtab_node::get (node);
900 if (!snode)
901 return;
903 else if (VAR_P (node))
904 snode = varpool_node::get_create (node);
905 else
906 snode = cgraph_node::get_create (node);
907 snode->set_section (value);
910 /* Set section name of NODE to match the section name of OTHER.
912 set_decl_section_name (decl, other) is equivalent to
913 set_decl_section_name (decl, DECL_SECTION_NAME (other)), but possibly more
914 efficient. */
915 void
916 set_decl_section_name (tree decl, const_tree other)
918 struct symtab_node *other_node = symtab_node::get (other);
919 if (other_node)
921 struct symtab_node *decl_node;
922 if (VAR_P (decl))
923 decl_node = varpool_node::get_create (decl);
924 else
925 decl_node = cgraph_node::get_create (decl);
926 decl_node->set_section (*other_node);
928 else
930 struct symtab_node *decl_node = symtab_node::get (decl);
931 if (!decl_node)
932 return;
933 decl_node->set_section (NULL);
937 /* Return TLS model of a variable NODE. */
938 enum tls_model
939 decl_tls_model (const_tree node)
941 struct varpool_node *snode = varpool_node::get (node);
942 if (!snode)
943 return TLS_MODEL_NONE;
944 return snode->tls_model;
947 /* Set TLS model of variable NODE to MODEL. */
948 void
949 set_decl_tls_model (tree node, enum tls_model model)
951 struct varpool_node *vnode;
953 if (model == TLS_MODEL_NONE)
955 vnode = varpool_node::get (node);
956 if (!vnode)
957 return;
959 else
960 vnode = varpool_node::get_create (node);
961 vnode->tls_model = model;
964 /* Compute the number of bytes occupied by a tree with code CODE.
965 This function cannot be used for nodes that have variable sizes,
966 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
967 size_t
968 tree_code_size (enum tree_code code)
970 switch (TREE_CODE_CLASS (code))
972 case tcc_declaration: /* A decl node */
973 switch (code)
975 case FIELD_DECL: return sizeof (tree_field_decl);
976 case PARM_DECL: return sizeof (tree_parm_decl);
977 case VAR_DECL: return sizeof (tree_var_decl);
978 case LABEL_DECL: return sizeof (tree_label_decl);
979 case RESULT_DECL: return sizeof (tree_result_decl);
980 case CONST_DECL: return sizeof (tree_const_decl);
981 case TYPE_DECL: return sizeof (tree_type_decl);
982 case FUNCTION_DECL: return sizeof (tree_function_decl);
983 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
984 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
985 case NAMESPACE_DECL:
986 case IMPORTED_DECL:
987 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
988 default:
989 gcc_checking_assert (code >= NUM_TREE_CODES);
990 return lang_hooks.tree_size (code);
993 case tcc_type: /* a type node */
994 switch (code)
996 case OFFSET_TYPE:
997 case ENUMERAL_TYPE:
998 case BOOLEAN_TYPE:
999 case INTEGER_TYPE:
1000 case REAL_TYPE:
1001 case OPAQUE_TYPE:
1002 case POINTER_TYPE:
1003 case REFERENCE_TYPE:
1004 case NULLPTR_TYPE:
1005 case FIXED_POINT_TYPE:
1006 case COMPLEX_TYPE:
1007 case VECTOR_TYPE:
1008 case ARRAY_TYPE:
1009 case RECORD_TYPE:
1010 case UNION_TYPE:
1011 case QUAL_UNION_TYPE:
1012 case VOID_TYPE:
1013 case FUNCTION_TYPE:
1014 case METHOD_TYPE:
1015 case LANG_TYPE: return sizeof (tree_type_non_common);
1016 default:
1017 gcc_checking_assert (code >= NUM_TREE_CODES);
1018 return lang_hooks.tree_size (code);
1021 case tcc_reference: /* a reference */
1022 case tcc_expression: /* an expression */
1023 case tcc_statement: /* an expression with side effects */
1024 case tcc_comparison: /* a comparison expression */
1025 case tcc_unary: /* a unary arithmetic expression */
1026 case tcc_binary: /* a binary arithmetic expression */
1027 return (sizeof (struct tree_exp)
1028 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
1030 case tcc_constant: /* a constant */
1031 switch (code)
1033 case VOID_CST: return sizeof (tree_typed);
1034 case INTEGER_CST: gcc_unreachable ();
1035 case POLY_INT_CST: return sizeof (tree_poly_int_cst);
1036 case REAL_CST: return sizeof (tree_real_cst);
1037 case FIXED_CST: return sizeof (tree_fixed_cst);
1038 case COMPLEX_CST: return sizeof (tree_complex);
1039 case VECTOR_CST: gcc_unreachable ();
1040 case STRING_CST: gcc_unreachable ();
1041 default:
1042 gcc_checking_assert (code >= NUM_TREE_CODES);
1043 return lang_hooks.tree_size (code);
1046 case tcc_exceptional: /* something random, like an identifier. */
1047 switch (code)
1049 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
1050 case TREE_LIST: return sizeof (tree_list);
1052 case ERROR_MARK:
1053 case PLACEHOLDER_EXPR: return sizeof (tree_common);
1055 case TREE_VEC: gcc_unreachable ();
1056 case OMP_CLAUSE: gcc_unreachable ();
1058 case SSA_NAME: return sizeof (tree_ssa_name);
1060 case STATEMENT_LIST: return sizeof (tree_statement_list);
1061 case BLOCK: return sizeof (struct tree_block);
1062 case CONSTRUCTOR: return sizeof (tree_constructor);
1063 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
1064 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
1066 default:
1067 gcc_checking_assert (code >= NUM_TREE_CODES);
1068 return lang_hooks.tree_size (code);
1071 default:
1072 gcc_unreachable ();
1076 /* Compute the number of bytes occupied by NODE. This routine only
1077 looks at TREE_CODE, except for those nodes that have variable sizes. */
1078 size_t
1079 tree_size (const_tree node)
1081 const enum tree_code code = TREE_CODE (node);
1082 switch (code)
1084 case INTEGER_CST:
1085 return (sizeof (struct tree_int_cst)
1086 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
1088 case TREE_BINFO:
1089 return (offsetof (struct tree_binfo, base_binfos)
1090 + vec<tree, va_gc>
1091 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
1093 case TREE_VEC:
1094 return (sizeof (struct tree_vec)
1095 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
1097 case VECTOR_CST:
1098 return (sizeof (struct tree_vector)
1099 + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
1101 case STRING_CST:
1102 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
1104 case OMP_CLAUSE:
1105 return (sizeof (struct tree_omp_clause)
1106 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
1107 * sizeof (tree));
1109 default:
1110 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
1111 return (sizeof (struct tree_exp)
1112 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
1113 else
1114 return tree_code_size (code);
1118 /* Return tree node kind based on tree CODE. */
1120 static tree_node_kind
1121 get_stats_node_kind (enum tree_code code)
1123 enum tree_code_class type = TREE_CODE_CLASS (code);
1125 switch (type)
1127 case tcc_declaration: /* A decl node */
1128 return d_kind;
1129 case tcc_type: /* a type node */
1130 return t_kind;
1131 case tcc_statement: /* an expression with side effects */
1132 return s_kind;
1133 case tcc_reference: /* a reference */
1134 return r_kind;
1135 case tcc_expression: /* an expression */
1136 case tcc_comparison: /* a comparison expression */
1137 case tcc_unary: /* a unary arithmetic expression */
1138 case tcc_binary: /* a binary arithmetic expression */
1139 return e_kind;
1140 case tcc_constant: /* a constant */
1141 return c_kind;
1142 case tcc_exceptional: /* something random, like an identifier. */
1143 switch (code)
1145 case IDENTIFIER_NODE:
1146 return id_kind;
1147 case TREE_VEC:
1148 return vec_kind;
1149 case TREE_BINFO:
1150 return binfo_kind;
1151 case SSA_NAME:
1152 return ssa_name_kind;
1153 case BLOCK:
1154 return b_kind;
1155 case CONSTRUCTOR:
1156 return constr_kind;
1157 case OMP_CLAUSE:
1158 return omp_clause_kind;
1159 default:
1160 return x_kind;
1162 break;
1163 case tcc_vl_exp:
1164 return e_kind;
1165 default:
1166 gcc_unreachable ();
1170 /* Record interesting allocation statistics for a tree node with CODE
1171 and LENGTH. */
1173 static void
1174 record_node_allocation_statistics (enum tree_code code, size_t length)
1176 if (!GATHER_STATISTICS)
1177 return;
1179 tree_node_kind kind = get_stats_node_kind (code);
1181 tree_code_counts[(int) code]++;
1182 tree_node_counts[(int) kind]++;
1183 tree_node_sizes[(int) kind] += length;
1186 /* Allocate and return a new UID from the DECL_UID namespace. */
1189 allocate_decl_uid (void)
1191 return next_decl_uid++;
1194 /* Return a newly allocated node of code CODE. For decl and type
1195 nodes, some other fields are initialized. The rest of the node is
1196 initialized to zero. This function cannot be used for TREE_VEC,
1197 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1198 tree_code_size.
1200 Achoo! I got a code in the node. */
1202 tree
1203 make_node (enum tree_code code MEM_STAT_DECL)
1205 tree t;
1206 enum tree_code_class type = TREE_CODE_CLASS (code);
1207 size_t length = tree_code_size (code);
1209 record_node_allocation_statistics (code, length);
1211 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1212 TREE_SET_CODE (t, code);
1214 switch (type)
1216 case tcc_statement:
1217 if (code != DEBUG_BEGIN_STMT)
1218 TREE_SIDE_EFFECTS (t) = 1;
1219 break;
1221 case tcc_declaration:
1222 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1224 if (code == FUNCTION_DECL)
1226 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1227 SET_DECL_MODE (t, FUNCTION_MODE);
1229 else
1230 SET_DECL_ALIGN (t, 1);
1232 DECL_SOURCE_LOCATION (t) = input_location;
1233 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1234 DECL_UID (t) = --next_debug_decl_uid;
1235 else
1237 DECL_UID (t) = allocate_decl_uid ();
1238 SET_DECL_PT_UID (t, -1);
1240 if (TREE_CODE (t) == LABEL_DECL)
1241 LABEL_DECL_UID (t) = -1;
1243 break;
1245 case tcc_type:
1246 TYPE_UID (t) = next_type_uid++;
1247 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1248 TYPE_USER_ALIGN (t) = 0;
1249 TYPE_MAIN_VARIANT (t) = t;
1250 TYPE_CANONICAL (t) = t;
1252 /* Default to no attributes for type, but let target change that. */
1253 TYPE_ATTRIBUTES (t) = NULL_TREE;
1254 targetm.set_default_type_attributes (t);
1256 /* We have not yet computed the alias set for this type. */
1257 TYPE_ALIAS_SET (t) = -1;
1258 break;
1260 case tcc_constant:
1261 TREE_CONSTANT (t) = 1;
1262 break;
1264 case tcc_expression:
1265 switch (code)
1267 case INIT_EXPR:
1268 case MODIFY_EXPR:
1269 case VA_ARG_EXPR:
1270 case PREDECREMENT_EXPR:
1271 case PREINCREMENT_EXPR:
1272 case POSTDECREMENT_EXPR:
1273 case POSTINCREMENT_EXPR:
1274 /* All of these have side-effects, no matter what their
1275 operands are. */
1276 TREE_SIDE_EFFECTS (t) = 1;
1277 break;
1279 default:
1280 break;
1282 break;
1284 case tcc_exceptional:
1285 switch (code)
1287 case TARGET_OPTION_NODE:
1288 TREE_TARGET_OPTION(t)
1289 = ggc_cleared_alloc<struct cl_target_option> ();
1290 break;
1292 case OPTIMIZATION_NODE:
1293 TREE_OPTIMIZATION (t)
1294 = ggc_cleared_alloc<struct cl_optimization> ();
1295 break;
1297 default:
1298 break;
1300 break;
1302 default:
1303 /* Other classes need no special treatment. */
1304 break;
1307 return t;
1310 /* Free tree node. */
1312 void
1313 free_node (tree node)
1315 enum tree_code code = TREE_CODE (node);
1316 if (GATHER_STATISTICS)
1318 enum tree_node_kind kind = get_stats_node_kind (code);
1320 gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1321 gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1322 gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1324 tree_code_counts[(int) TREE_CODE (node)]--;
1325 tree_node_counts[(int) kind]--;
1326 tree_node_sizes[(int) kind] -= tree_size (node);
1328 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1329 vec_free (CONSTRUCTOR_ELTS (node));
1330 else if (code == BLOCK)
1331 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1332 else if (code == TREE_BINFO)
1333 vec_free (BINFO_BASE_ACCESSES (node));
1334 else if (code == OPTIMIZATION_NODE)
1335 cl_optimization_option_free (TREE_OPTIMIZATION (node));
1336 else if (code == TARGET_OPTION_NODE)
1337 cl_target_option_free (TREE_TARGET_OPTION (node));
1338 ggc_free (node);
1341 /* Return a new node with the same contents as NODE except that its
1342 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1344 tree
1345 copy_node (tree node MEM_STAT_DECL)
1347 tree t;
1348 enum tree_code code = TREE_CODE (node);
1349 size_t length;
1351 gcc_assert (code != STATEMENT_LIST);
1353 length = tree_size (node);
1354 record_node_allocation_statistics (code, length);
1355 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1356 memcpy (t, node, length);
1358 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1359 TREE_CHAIN (t) = 0;
1360 TREE_ASM_WRITTEN (t) = 0;
1361 TREE_VISITED (t) = 0;
1363 if (TREE_CODE_CLASS (code) == tcc_declaration)
1365 if (code == DEBUG_EXPR_DECL)
1366 DECL_UID (t) = --next_debug_decl_uid;
1367 else
1369 DECL_UID (t) = allocate_decl_uid ();
1370 if (DECL_PT_UID_SET_P (node))
1371 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1373 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1374 && DECL_HAS_VALUE_EXPR_P (node))
1376 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1377 DECL_HAS_VALUE_EXPR_P (t) = 1;
1379 /* DECL_DEBUG_EXPR is copied explicitly by callers. */
1380 if (VAR_P (node))
1382 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1383 t->decl_with_vis.symtab_node = NULL;
1385 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1387 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1388 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1390 if (TREE_CODE (node) == FUNCTION_DECL)
1392 DECL_STRUCT_FUNCTION (t) = NULL;
1393 t->decl_with_vis.symtab_node = NULL;
1396 else if (TREE_CODE_CLASS (code) == tcc_type)
1398 TYPE_UID (t) = next_type_uid++;
1399 /* The following is so that the debug code for
1400 the copy is different from the original type.
1401 The two statements usually duplicate each other
1402 (because they clear fields of the same union),
1403 but the optimizer should catch that. */
1404 TYPE_SYMTAB_ADDRESS (t) = 0;
1405 TYPE_SYMTAB_DIE (t) = 0;
1407 /* Do not copy the values cache. */
1408 if (TYPE_CACHED_VALUES_P (t))
1410 TYPE_CACHED_VALUES_P (t) = 0;
1411 TYPE_CACHED_VALUES (t) = NULL_TREE;
1414 else if (code == TARGET_OPTION_NODE)
1416 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1417 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1418 sizeof (struct cl_target_option));
1420 else if (code == OPTIMIZATION_NODE)
1422 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1423 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1424 sizeof (struct cl_optimization));
1427 return t;
1430 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1431 For example, this can copy a list made of TREE_LIST nodes. */
1433 tree
1434 copy_list (tree list)
1436 tree head;
1437 tree prev, next;
1439 if (list == 0)
1440 return 0;
1442 head = prev = copy_node (list);
1443 next = TREE_CHAIN (list);
1444 while (next)
1446 TREE_CHAIN (prev) = copy_node (next);
1447 prev = TREE_CHAIN (prev);
1448 next = TREE_CHAIN (next);
1450 return head;
1454 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1455 INTEGER_CST with value CST and type TYPE. */
1457 static unsigned int
1458 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1460 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1461 /* We need extra HWIs if CST is an unsigned integer with its
1462 upper bit set. */
1463 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1464 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1465 return cst.get_len ();
1468 /* Return a new INTEGER_CST with value CST and type TYPE. */
1470 static tree
1471 build_new_int_cst (tree type, const wide_int &cst)
1473 unsigned int len = cst.get_len ();
1474 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1475 tree nt = make_int_cst (len, ext_len);
1477 if (len < ext_len)
1479 --ext_len;
1480 TREE_INT_CST_ELT (nt, ext_len)
1481 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1482 for (unsigned int i = len; i < ext_len; ++i)
1483 TREE_INT_CST_ELT (nt, i) = -1;
1485 else if (TYPE_UNSIGNED (type)
1486 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1488 len--;
1489 TREE_INT_CST_ELT (nt, len)
1490 = zext_hwi (cst.elt (len),
1491 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1494 for (unsigned int i = 0; i < len; i++)
1495 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1496 TREE_TYPE (nt) = type;
1497 return nt;
1500 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1502 static tree
1503 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1504 CXX_MEM_STAT_INFO)
1506 size_t length = sizeof (struct tree_poly_int_cst);
1507 record_node_allocation_statistics (POLY_INT_CST, length);
1509 tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1511 TREE_SET_CODE (t, POLY_INT_CST);
1512 TREE_CONSTANT (t) = 1;
1513 TREE_TYPE (t) = type;
1514 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1515 POLY_INT_CST_COEFF (t, i) = coeffs[i];
1516 return t;
1519 /* Create a constant tree that contains CST sign-extended to TYPE. */
1521 tree
1522 build_int_cst (tree type, poly_int64 cst)
1524 /* Support legacy code. */
1525 if (!type)
1526 type = integer_type_node;
1528 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1531 /* Create a constant tree that contains CST zero-extended to TYPE. */
1533 tree
1534 build_int_cstu (tree type, poly_uint64 cst)
1536 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1539 /* Create a constant tree that contains CST sign-extended to TYPE. */
1541 tree
1542 build_int_cst_type (tree type, poly_int64 cst)
1544 gcc_assert (type);
1545 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1548 /* Constructs tree in type TYPE from with value given by CST. Signedness
1549 of CST is assumed to be the same as the signedness of TYPE. */
1551 tree
1552 double_int_to_tree (tree type, double_int cst)
1554 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1557 /* We force the wide_int CST to the range of the type TYPE by sign or
1558 zero extending it. OVERFLOWABLE indicates if we are interested in
1559 overflow of the value, when >0 we are only interested in signed
1560 overflow, for <0 we are interested in any overflow. OVERFLOWED
1561 indicates whether overflow has already occurred. CONST_OVERFLOWED
1562 indicates whether constant overflow has already occurred. We force
1563 T's value to be within range of T's type (by setting to 0 or 1 all
1564 the bits outside the type's range). We set TREE_OVERFLOWED if,
1565 OVERFLOWED is nonzero,
1566 or OVERFLOWABLE is >0 and signed overflow occurs
1567 or OVERFLOWABLE is <0 and any overflow occurs
1568 We return a new tree node for the extended wide_int. The node
1569 is shared if no overflow flags are set. */
1572 tree
1573 force_fit_type (tree type, const poly_wide_int_ref &cst,
1574 int overflowable, bool overflowed)
1576 signop sign = TYPE_SIGN (type);
1578 /* If we need to set overflow flags, return a new unshared node. */
1579 if (overflowed || !wi::fits_to_tree_p (cst, type))
1581 if (overflowed
1582 || overflowable < 0
1583 || (overflowable > 0 && sign == SIGNED))
1585 poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1586 sign);
1587 tree t;
1588 if (tmp.is_constant ())
1589 t = build_new_int_cst (type, tmp.coeffs[0]);
1590 else
1592 tree coeffs[NUM_POLY_INT_COEFFS];
1593 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1595 coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1596 TREE_OVERFLOW (coeffs[i]) = 1;
1598 t = build_new_poly_int_cst (type, coeffs);
1600 TREE_OVERFLOW (t) = 1;
1601 return t;
1605 /* Else build a shared node. */
1606 return wide_int_to_tree (type, cst);
1609 /* These are the hash table functions for the hash table of INTEGER_CST
1610 nodes of a sizetype. */
1612 /* Return the hash code X, an INTEGER_CST. */
1614 hashval_t
1615 int_cst_hasher::hash (tree x)
1617 const_tree const t = x;
1618 hashval_t code = TYPE_UID (TREE_TYPE (t));
1619 int i;
1621 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1622 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1624 return code;
1627 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1628 is the same as that given by *Y, which is the same. */
1630 bool
1631 int_cst_hasher::equal (tree x, tree y)
1633 const_tree const xt = x;
1634 const_tree const yt = y;
1636 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1637 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1638 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1639 return false;
1641 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1642 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1643 return false;
1645 return true;
1648 /* Cache wide_int CST into the TYPE_CACHED_VALUES cache for TYPE.
1649 SLOT is the slot entry to store it in, and MAX_SLOTS is the maximum
1650 number of slots that can be cached for the type. */
1652 static inline tree
1653 cache_wide_int_in_type_cache (tree type, const wide_int &cst,
1654 int slot, int max_slots)
1656 gcc_checking_assert (slot >= 0);
1657 /* Initialize cache. */
1658 if (!TYPE_CACHED_VALUES_P (type))
1660 TYPE_CACHED_VALUES_P (type) = 1;
1661 TYPE_CACHED_VALUES (type) = make_tree_vec (max_slots);
1663 tree t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot);
1664 if (!t)
1666 /* Create a new shared int. */
1667 t = build_new_int_cst (type, cst);
1668 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot) = t;
1670 return t;
1673 /* Create an INT_CST node of TYPE and value CST.
1674 The returned node is always shared. For small integers we use a
1675 per-type vector cache, for larger ones we use a single hash table.
1676 The value is extended from its precision according to the sign of
1677 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1678 the upper bits and ensures that hashing and value equality based
1679 upon the underlying HOST_WIDE_INTs works without masking. */
1681 static tree
1682 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1684 tree t;
1685 int ix = -1;
1686 int limit = 0;
1688 gcc_assert (type);
1689 unsigned int prec = TYPE_PRECISION (type);
1690 signop sgn = TYPE_SIGN (type);
1692 /* Verify that everything is canonical. */
1693 int l = pcst.get_len ();
1694 if (l > 1)
1696 if (pcst.elt (l - 1) == 0)
1697 gcc_checking_assert (pcst.elt (l - 2) < 0);
1698 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1699 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1702 wide_int cst = wide_int::from (pcst, prec, sgn);
1703 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1705 enum tree_code code = TREE_CODE (type);
1706 if (code == POINTER_TYPE || code == REFERENCE_TYPE)
1708 /* Cache NULL pointer and zero bounds. */
1709 if (cst == 0)
1710 ix = 0;
1711 /* Cache upper bounds of pointers. */
1712 else if (cst == wi::max_value (prec, sgn))
1713 ix = 1;
1714 /* Cache 1 which is used for a non-zero range. */
1715 else if (cst == 1)
1716 ix = 2;
1718 if (ix >= 0)
1720 t = cache_wide_int_in_type_cache (type, cst, ix, 3);
1721 /* Make sure no one is clobbering the shared constant. */
1722 gcc_checking_assert (TREE_TYPE (t) == type
1723 && cst == wi::to_wide (t));
1724 return t;
1727 if (ext_len == 1)
1729 /* We just need to store a single HOST_WIDE_INT. */
1730 HOST_WIDE_INT hwi;
1731 if (TYPE_UNSIGNED (type))
1732 hwi = cst.to_uhwi ();
1733 else
1734 hwi = cst.to_shwi ();
1736 switch (code)
1738 case NULLPTR_TYPE:
1739 gcc_assert (hwi == 0);
1740 /* Fallthru. */
1742 case POINTER_TYPE:
1743 case REFERENCE_TYPE:
1744 /* Ignore pointers, as they were already handled above. */
1745 break;
1747 case BOOLEAN_TYPE:
1748 /* Cache false or true. */
1749 limit = 2;
1750 if (IN_RANGE (hwi, 0, 1))
1751 ix = hwi;
1752 break;
1754 case INTEGER_TYPE:
1755 case OFFSET_TYPE:
1756 if (TYPE_SIGN (type) == UNSIGNED)
1758 /* Cache [0, N). */
1759 limit = param_integer_share_limit;
1760 if (IN_RANGE (hwi, 0, param_integer_share_limit - 1))
1761 ix = hwi;
1763 else
1765 /* Cache [-1, N). */
1766 limit = param_integer_share_limit + 1;
1767 if (IN_RANGE (hwi, -1, param_integer_share_limit - 1))
1768 ix = hwi + 1;
1770 break;
1772 case ENUMERAL_TYPE:
1773 break;
1775 default:
1776 gcc_unreachable ();
1779 if (ix >= 0)
1781 t = cache_wide_int_in_type_cache (type, cst, ix, limit);
1782 /* Make sure no one is clobbering the shared constant. */
1783 gcc_checking_assert (TREE_TYPE (t) == type
1784 && TREE_INT_CST_NUNITS (t) == 1
1785 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1786 && TREE_INT_CST_EXT_NUNITS (t) == 1
1787 && TREE_INT_CST_ELT (t, 0) == hwi);
1788 return t;
1790 else
1792 /* Use the cache of larger shared ints, using int_cst_node as
1793 a temporary. */
1795 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1796 TREE_TYPE (int_cst_node) = type;
1798 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1799 t = *slot;
1800 if (!t)
1802 /* Insert this one into the hash table. */
1803 t = int_cst_node;
1804 *slot = t;
1805 /* Make a new node for next time round. */
1806 int_cst_node = make_int_cst (1, 1);
1810 else
1812 /* The value either hashes properly or we drop it on the floor
1813 for the gc to take care of. There will not be enough of them
1814 to worry about. */
1816 tree nt = build_new_int_cst (type, cst);
1817 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1818 t = *slot;
1819 if (!t)
1821 /* Insert this one into the hash table. */
1822 t = nt;
1823 *slot = t;
1825 else
1826 ggc_free (nt);
1829 return t;
1832 hashval_t
1833 poly_int_cst_hasher::hash (tree t)
1835 inchash::hash hstate;
1837 hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1838 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1839 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1841 return hstate.end ();
1844 bool
1845 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1847 if (TREE_TYPE (x) != y.first)
1848 return false;
1849 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1850 if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1851 return false;
1852 return true;
1855 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1856 The elements must also have type TYPE. */
1858 tree
1859 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1861 unsigned int prec = TYPE_PRECISION (type);
1862 gcc_assert (prec <= values.coeffs[0].get_precision ());
1863 poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1865 inchash::hash h;
1866 h.add_int (TYPE_UID (type));
1867 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1868 h.add_wide_int (c.coeffs[i]);
1869 poly_int_cst_hasher::compare_type comp (type, &c);
1870 tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1871 INSERT);
1872 if (*slot == NULL_TREE)
1874 tree coeffs[NUM_POLY_INT_COEFFS];
1875 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1876 coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1877 *slot = build_new_poly_int_cst (type, coeffs);
1879 return *slot;
1882 /* Create a constant tree with value VALUE in type TYPE. */
1884 tree
1885 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1887 if (value.is_constant ())
1888 return wide_int_to_tree_1 (type, value.coeffs[0]);
1889 return build_poly_int_cst (type, value);
1892 /* Insert INTEGER_CST T into a cache of integer constants. And return
1893 the cached constant (which may or may not be T). If MIGHT_DUPLICATE
1894 is false, and T falls into the type's 'smaller values' range, there
1895 cannot be an existing entry. Otherwise, if MIGHT_DUPLICATE is true,
1896 or the value is large, should an existing entry exist, it is
1897 returned (rather than inserting T). */
1899 tree
1900 cache_integer_cst (tree t, bool might_duplicate ATTRIBUTE_UNUSED)
1902 tree type = TREE_TYPE (t);
1903 int ix = -1;
1904 int limit = 0;
1905 int prec = TYPE_PRECISION (type);
1907 gcc_assert (!TREE_OVERFLOW (t));
1909 /* The caching indices here must match those in
1910 wide_int_to_type_1. */
1911 switch (TREE_CODE (type))
1913 case NULLPTR_TYPE:
1914 gcc_checking_assert (integer_zerop (t));
1915 /* Fallthru. */
1917 case POINTER_TYPE:
1918 case REFERENCE_TYPE:
1920 if (integer_zerop (t))
1921 ix = 0;
1922 else if (integer_onep (t))
1923 ix = 2;
1925 if (ix >= 0)
1926 limit = 3;
1928 break;
1930 case BOOLEAN_TYPE:
1931 /* Cache false or true. */
1932 limit = 2;
1933 if (wi::ltu_p (wi::to_wide (t), 2))
1934 ix = TREE_INT_CST_ELT (t, 0);
1935 break;
1937 case INTEGER_TYPE:
1938 case OFFSET_TYPE:
1939 if (TYPE_UNSIGNED (type))
1941 /* Cache 0..N */
1942 limit = param_integer_share_limit;
1944 /* This is a little hokie, but if the prec is smaller than
1945 what is necessary to hold param_integer_share_limit, then the
1946 obvious test will not get the correct answer. */
1947 if (prec < HOST_BITS_PER_WIDE_INT)
1949 if (tree_to_uhwi (t)
1950 < (unsigned HOST_WIDE_INT) param_integer_share_limit)
1951 ix = tree_to_uhwi (t);
1953 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1954 ix = tree_to_uhwi (t);
1956 else
1958 /* Cache -1..N */
1959 limit = param_integer_share_limit + 1;
1961 if (integer_minus_onep (t))
1962 ix = 0;
1963 else if (!wi::neg_p (wi::to_wide (t)))
1965 if (prec < HOST_BITS_PER_WIDE_INT)
1967 if (tree_to_shwi (t) < param_integer_share_limit)
1968 ix = tree_to_shwi (t) + 1;
1970 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1971 ix = tree_to_shwi (t) + 1;
1974 break;
1976 case ENUMERAL_TYPE:
1977 /* The slot used by TYPE_CACHED_VALUES is used for the enum
1978 members. */
1979 break;
1981 default:
1982 gcc_unreachable ();
1985 if (ix >= 0)
1987 /* Look for it in the type's vector of small shared ints. */
1988 if (!TYPE_CACHED_VALUES_P (type))
1990 TYPE_CACHED_VALUES_P (type) = 1;
1991 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1994 if (tree r = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix))
1996 gcc_checking_assert (might_duplicate);
1997 t = r;
1999 else
2000 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
2002 else
2004 /* Use the cache of larger shared ints. */
2005 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
2006 if (tree r = *slot)
2008 /* If there is already an entry for the number verify it's the
2009 same value. */
2010 gcc_checking_assert (wi::to_wide (tree (r)) == wi::to_wide (t));
2011 /* And return the cached value. */
2012 t = r;
2014 else
2015 /* Otherwise insert this one into the hash table. */
2016 *slot = t;
2019 return t;
2023 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
2024 and the rest are zeros. */
2026 tree
2027 build_low_bits_mask (tree type, unsigned bits)
2029 gcc_assert (bits <= TYPE_PRECISION (type));
2031 return wide_int_to_tree (type, wi::mask (bits, false,
2032 TYPE_PRECISION (type)));
2035 /* Checks that X is integer constant that can be expressed in (unsigned)
2036 HOST_WIDE_INT without loss of precision. */
2038 bool
2039 cst_and_fits_in_hwi (const_tree x)
2041 return (TREE_CODE (x) == INTEGER_CST
2042 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
2045 /* Build a newly constructed VECTOR_CST with the given values of
2046 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
2048 tree
2049 make_vector (unsigned log2_npatterns,
2050 unsigned int nelts_per_pattern MEM_STAT_DECL)
2052 gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
2053 tree t;
2054 unsigned npatterns = 1 << log2_npatterns;
2055 unsigned encoded_nelts = npatterns * nelts_per_pattern;
2056 unsigned length = (sizeof (struct tree_vector)
2057 + (encoded_nelts - 1) * sizeof (tree));
2059 record_node_allocation_statistics (VECTOR_CST, length);
2061 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2063 TREE_SET_CODE (t, VECTOR_CST);
2064 TREE_CONSTANT (t) = 1;
2065 VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
2066 VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
2068 return t;
2071 /* Return a new VECTOR_CST node whose type is TYPE and whose values
2072 are extracted from V, a vector of CONSTRUCTOR_ELT. */
2074 tree
2075 build_vector_from_ctor (tree type, const vec<constructor_elt, va_gc> *v)
2077 if (vec_safe_length (v) == 0)
2078 return build_zero_cst (type);
2080 unsigned HOST_WIDE_INT idx, nelts;
2081 tree value;
2083 /* We can't construct a VECTOR_CST for a variable number of elements. */
2084 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
2085 tree_vector_builder vec (type, nelts, 1);
2086 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
2088 if (TREE_CODE (value) == VECTOR_CST)
2090 /* If NELTS is constant then this must be too. */
2091 unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
2092 for (unsigned i = 0; i < sub_nelts; ++i)
2093 vec.quick_push (VECTOR_CST_ELT (value, i));
2095 else
2096 vec.quick_push (value);
2098 while (vec.length () < nelts)
2099 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
2101 return vec.build ();
2104 /* Build a vector of type VECTYPE where all the elements are SCs. */
2105 tree
2106 build_vector_from_val (tree vectype, tree sc)
2108 unsigned HOST_WIDE_INT i, nunits;
2110 if (sc == error_mark_node)
2111 return sc;
2113 /* Verify that the vector type is suitable for SC. Note that there
2114 is some inconsistency in the type-system with respect to restrict
2115 qualifications of pointers. Vector types always have a main-variant
2116 element type and the qualification is applied to the vector-type.
2117 So TREE_TYPE (vector-type) does not return a properly qualified
2118 vector element-type. */
2119 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
2120 TREE_TYPE (vectype)));
2122 if (CONSTANT_CLASS_P (sc))
2124 tree_vector_builder v (vectype, 1, 1);
2125 v.quick_push (sc);
2126 return v.build ();
2128 else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
2129 return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
2130 else
2132 vec<constructor_elt, va_gc> *v;
2133 vec_alloc (v, nunits);
2134 for (i = 0; i < nunits; ++i)
2135 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
2136 return build_constructor (vectype, v);
2140 /* If TYPE is not a vector type, just return SC, otherwise return
2141 build_vector_from_val (TYPE, SC). */
2143 tree
2144 build_uniform_cst (tree type, tree sc)
2146 if (!VECTOR_TYPE_P (type))
2147 return sc;
2149 return build_vector_from_val (type, sc);
2152 /* Build a vector series of type TYPE in which element I has the value
2153 BASE + I * STEP. The result is a constant if BASE and STEP are constant
2154 and a VEC_SERIES_EXPR otherwise. */
2156 tree
2157 build_vec_series (tree type, tree base, tree step)
2159 if (integer_zerop (step))
2160 return build_vector_from_val (type, base);
2161 if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
2163 tree_vector_builder builder (type, 1, 3);
2164 tree elt1 = wide_int_to_tree (TREE_TYPE (base),
2165 wi::to_wide (base) + wi::to_wide (step));
2166 tree elt2 = wide_int_to_tree (TREE_TYPE (base),
2167 wi::to_wide (elt1) + wi::to_wide (step));
2168 builder.quick_push (base);
2169 builder.quick_push (elt1);
2170 builder.quick_push (elt2);
2171 return builder.build ();
2173 return build2 (VEC_SERIES_EXPR, type, base, step);
2176 /* Return a vector with the same number of units and number of bits
2177 as VEC_TYPE, but in which the elements are a linear series of unsigned
2178 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
2180 tree
2181 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
2183 tree index_vec_type = vec_type;
2184 tree index_elt_type = TREE_TYPE (vec_type);
2185 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
2186 if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
2188 index_elt_type = build_nonstandard_integer_type
2189 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
2190 index_vec_type = build_vector_type (index_elt_type, nunits);
2193 tree_vector_builder v (index_vec_type, 1, 3);
2194 for (unsigned int i = 0; i < 3; ++i)
2195 v.quick_push (build_int_cstu (index_elt_type, base + i * step));
2196 return v.build ();
2199 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
2200 elements are A and the rest are B. */
2202 tree
2203 build_vector_a_then_b (tree vec_type, unsigned int num_a, tree a, tree b)
2205 gcc_assert (known_le (num_a, TYPE_VECTOR_SUBPARTS (vec_type)));
2206 unsigned int count = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type));
2207 /* Optimize the constant case. */
2208 if ((count & 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type).is_constant ())
2209 count /= 2;
2210 tree_vector_builder builder (vec_type, count, 2);
2211 for (unsigned int i = 0; i < count * 2; ++i)
2212 builder.quick_push (i < num_a ? a : b);
2213 return builder.build ();
2216 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2217 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
2219 void
2220 recompute_constructor_flags (tree c)
2222 unsigned int i;
2223 tree val;
2224 bool constant_p = true;
2225 bool side_effects_p = false;
2226 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2228 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2230 /* Mostly ctors will have elts that don't have side-effects, so
2231 the usual case is to scan all the elements. Hence a single
2232 loop for both const and side effects, rather than one loop
2233 each (with early outs). */
2234 if (!TREE_CONSTANT (val))
2235 constant_p = false;
2236 if (TREE_SIDE_EFFECTS (val))
2237 side_effects_p = true;
2240 TREE_SIDE_EFFECTS (c) = side_effects_p;
2241 TREE_CONSTANT (c) = constant_p;
2244 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2245 CONSTRUCTOR C. */
2247 void
2248 verify_constructor_flags (tree c)
2250 unsigned int i;
2251 tree val;
2252 bool constant_p = TREE_CONSTANT (c);
2253 bool side_effects_p = TREE_SIDE_EFFECTS (c);
2254 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2256 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2258 if (constant_p && !TREE_CONSTANT (val))
2259 internal_error ("non-constant element in constant CONSTRUCTOR");
2260 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
2261 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2265 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2266 are in the vec pointed to by VALS. */
2267 tree
2268 build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL)
2270 tree c = make_node (CONSTRUCTOR PASS_MEM_STAT);
2272 TREE_TYPE (c) = type;
2273 CONSTRUCTOR_ELTS (c) = vals;
2275 recompute_constructor_flags (c);
2277 return c;
2280 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2281 INDEX and VALUE. */
2282 tree
2283 build_constructor_single (tree type, tree index, tree value)
2285 vec<constructor_elt, va_gc> *v;
2286 constructor_elt elt = {index, value};
2288 vec_alloc (v, 1);
2289 v->quick_push (elt);
2291 return build_constructor (type, v);
2295 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2296 are in a list pointed to by VALS. */
2297 tree
2298 build_constructor_from_list (tree type, tree vals)
2300 tree t;
2301 vec<constructor_elt, va_gc> *v = NULL;
2303 if (vals)
2305 vec_alloc (v, list_length (vals));
2306 for (t = vals; t; t = TREE_CHAIN (t))
2307 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2310 return build_constructor (type, v);
2313 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2314 are in a vector pointed to by VALS. Note that the TREE_PURPOSE
2315 fields in the constructor remain null. */
2317 tree
2318 build_constructor_from_vec (tree type, const vec<tree, va_gc> *vals)
2320 vec<constructor_elt, va_gc> *v = NULL;
2322 for (tree t : vals)
2323 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t);
2325 return build_constructor (type, v);
2328 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2329 of elements, provided as index/value pairs. */
2331 tree
2332 build_constructor_va (tree type, int nelts, ...)
2334 vec<constructor_elt, va_gc> *v = NULL;
2335 va_list p;
2337 va_start (p, nelts);
2338 vec_alloc (v, nelts);
2339 while (nelts--)
2341 tree index = va_arg (p, tree);
2342 tree value = va_arg (p, tree);
2343 CONSTRUCTOR_APPEND_ELT (v, index, value);
2345 va_end (p);
2346 return build_constructor (type, v);
2349 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2351 tree
2352 build_clobber (tree type, enum clobber_kind kind)
2354 tree clobber = build_constructor (type, NULL);
2355 TREE_THIS_VOLATILE (clobber) = true;
2356 CLOBBER_KIND (clobber) = kind;
2357 return clobber;
2360 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2362 tree
2363 build_fixed (tree type, FIXED_VALUE_TYPE f)
2365 tree v;
2366 FIXED_VALUE_TYPE *fp;
2368 v = make_node (FIXED_CST);
2369 fp = ggc_alloc<fixed_value> ();
2370 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2372 TREE_TYPE (v) = type;
2373 TREE_FIXED_CST_PTR (v) = fp;
2374 return v;
2377 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2379 tree
2380 build_real (tree type, REAL_VALUE_TYPE d)
2382 tree v;
2383 REAL_VALUE_TYPE *dp;
2384 int overflow = 0;
2386 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2387 Consider doing it via real_convert now. */
2389 v = make_node (REAL_CST);
2390 dp = ggc_alloc<real_value> ();
2391 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
2393 TREE_TYPE (v) = type;
2394 TREE_REAL_CST_PTR (v) = dp;
2395 TREE_OVERFLOW (v) = overflow;
2396 return v;
2399 /* Like build_real, but first truncate D to the type. */
2401 tree
2402 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2404 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2407 /* Return a new REAL_CST node whose type is TYPE
2408 and whose value is the integer value of the INTEGER_CST node I. */
2410 REAL_VALUE_TYPE
2411 real_value_from_int_cst (const_tree type, const_tree i)
2413 REAL_VALUE_TYPE d;
2415 /* Clear all bits of the real value type so that we can later do
2416 bitwise comparisons to see if two values are the same. */
2417 memset (&d, 0, sizeof d);
2419 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2420 TYPE_SIGN (TREE_TYPE (i)));
2421 return d;
2424 /* Given a tree representing an integer constant I, return a tree
2425 representing the same value as a floating-point constant of type TYPE. */
2427 tree
2428 build_real_from_int_cst (tree type, const_tree i)
2430 tree v;
2431 int overflow = TREE_OVERFLOW (i);
2433 v = build_real (type, real_value_from_int_cst (type, i));
2435 TREE_OVERFLOW (v) |= overflow;
2436 return v;
2439 /* Return a new REAL_CST node whose type is TYPE
2440 and whose value is the integer value I which has sign SGN. */
2442 tree
2443 build_real_from_wide (tree type, const wide_int_ref &i, signop sgn)
2445 REAL_VALUE_TYPE d;
2447 /* Clear all bits of the real value type so that we can later do
2448 bitwise comparisons to see if two values are the same. */
2449 memset (&d, 0, sizeof d);
2451 real_from_integer (&d, TYPE_MODE (type), i, sgn);
2452 return build_real (type, d);
2455 /* Return a newly constructed STRING_CST node whose value is the LEN
2456 characters at STR when STR is nonnull, or all zeros otherwise.
2457 Note that for a C string literal, LEN should include the trailing NUL.
2458 The TREE_TYPE is not initialized. */
2460 tree
2461 build_string (unsigned len, const char *str /*= NULL */)
2463 /* Do not waste bytes provided by padding of struct tree_string. */
2464 unsigned size = len + offsetof (struct tree_string, str) + 1;
2466 record_node_allocation_statistics (STRING_CST, size);
2468 tree s = (tree) ggc_internal_alloc (size);
2470 memset (s, 0, sizeof (struct tree_typed));
2471 TREE_SET_CODE (s, STRING_CST);
2472 TREE_CONSTANT (s) = 1;
2473 TREE_STRING_LENGTH (s) = len;
2474 if (str)
2475 memcpy (s->string.str, str, len);
2476 else
2477 memset (s->string.str, 0, len);
2478 s->string.str[len] = '\0';
2480 return s;
2483 /* Return a newly constructed COMPLEX_CST node whose value is
2484 specified by the real and imaginary parts REAL and IMAG.
2485 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2486 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2488 tree
2489 build_complex (tree type, tree real, tree imag)
2491 gcc_assert (CONSTANT_CLASS_P (real));
2492 gcc_assert (CONSTANT_CLASS_P (imag));
2494 tree t = make_node (COMPLEX_CST);
2496 TREE_REALPART (t) = real;
2497 TREE_IMAGPART (t) = imag;
2498 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2499 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2500 return t;
2503 /* Build a complex (inf +- 0i), such as for the result of cproj.
2504 TYPE is the complex tree type of the result. If NEG is true, the
2505 imaginary zero is negative. */
2507 tree
2508 build_complex_inf (tree type, bool neg)
2510 REAL_VALUE_TYPE rinf, rzero = dconst0;
2512 real_inf (&rinf);
2513 rzero.sign = neg;
2514 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2515 build_real (TREE_TYPE (type), rzero));
2518 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2519 element is set to 1. In particular, this is 1 + i for complex types. */
2521 tree
2522 build_each_one_cst (tree type)
2524 if (TREE_CODE (type) == COMPLEX_TYPE)
2526 tree scalar = build_one_cst (TREE_TYPE (type));
2527 return build_complex (type, scalar, scalar);
2529 else
2530 return build_one_cst (type);
2533 /* Return a constant of arithmetic type TYPE which is the
2534 multiplicative identity of the set TYPE. */
2536 tree
2537 build_one_cst (tree type)
2539 switch (TREE_CODE (type))
2541 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2542 case POINTER_TYPE: case REFERENCE_TYPE:
2543 case OFFSET_TYPE:
2544 return build_int_cst (type, 1);
2546 case REAL_TYPE:
2547 return build_real (type, dconst1);
2549 case FIXED_POINT_TYPE:
2550 /* We can only generate 1 for accum types. */
2551 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2552 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2554 case VECTOR_TYPE:
2556 tree scalar = build_one_cst (TREE_TYPE (type));
2558 return build_vector_from_val (type, scalar);
2561 case COMPLEX_TYPE:
2562 return build_complex (type,
2563 build_one_cst (TREE_TYPE (type)),
2564 build_zero_cst (TREE_TYPE (type)));
2566 default:
2567 gcc_unreachable ();
2571 /* Return an integer of type TYPE containing all 1's in as much precision as
2572 it contains, or a complex or vector whose subparts are such integers. */
2574 tree
2575 build_all_ones_cst (tree type)
2577 if (TREE_CODE (type) == COMPLEX_TYPE)
2579 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2580 return build_complex (type, scalar, scalar);
2582 else
2583 return build_minus_one_cst (type);
2586 /* Return a constant of arithmetic type TYPE which is the
2587 opposite of the multiplicative identity of the set TYPE. */
2589 tree
2590 build_minus_one_cst (tree type)
2592 switch (TREE_CODE (type))
2594 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2595 case POINTER_TYPE: case REFERENCE_TYPE:
2596 case OFFSET_TYPE:
2597 return build_int_cst (type, -1);
2599 case REAL_TYPE:
2600 return build_real (type, dconstm1);
2602 case FIXED_POINT_TYPE:
2603 /* We can only generate 1 for accum types. */
2604 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2605 return build_fixed (type,
2606 fixed_from_double_int (double_int_minus_one,
2607 SCALAR_TYPE_MODE (type)));
2609 case VECTOR_TYPE:
2611 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2613 return build_vector_from_val (type, scalar);
2616 case COMPLEX_TYPE:
2617 return build_complex (type,
2618 build_minus_one_cst (TREE_TYPE (type)),
2619 build_zero_cst (TREE_TYPE (type)));
2621 default:
2622 gcc_unreachable ();
2626 /* Build 0 constant of type TYPE. This is used by constructor folding
2627 and thus the constant should be represented in memory by
2628 zero(es). */
2630 tree
2631 build_zero_cst (tree type)
2633 switch (TREE_CODE (type))
2635 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2636 case POINTER_TYPE: case REFERENCE_TYPE:
2637 case OFFSET_TYPE: case NULLPTR_TYPE:
2638 return build_int_cst (type, 0);
2640 case REAL_TYPE:
2641 return build_real (type, dconst0);
2643 case FIXED_POINT_TYPE:
2644 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2646 case VECTOR_TYPE:
2648 tree scalar = build_zero_cst (TREE_TYPE (type));
2650 return build_vector_from_val (type, scalar);
2653 case COMPLEX_TYPE:
2655 tree zero = build_zero_cst (TREE_TYPE (type));
2657 return build_complex (type, zero, zero);
2660 default:
2661 if (!AGGREGATE_TYPE_P (type))
2662 return fold_convert (type, integer_zero_node);
2663 return build_constructor (type, NULL);
2668 /* Build a BINFO with LEN language slots. */
2670 tree
2671 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2673 tree t;
2674 size_t length = (offsetof (struct tree_binfo, base_binfos)
2675 + vec<tree, va_gc>::embedded_size (base_binfos));
2677 record_node_allocation_statistics (TREE_BINFO, length);
2679 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2681 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2683 TREE_SET_CODE (t, TREE_BINFO);
2685 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2687 return t;
2690 /* Create a CASE_LABEL_EXPR tree node and return it. */
2692 tree
2693 build_case_label (tree low_value, tree high_value, tree label_decl)
2695 tree t = make_node (CASE_LABEL_EXPR);
2697 TREE_TYPE (t) = void_type_node;
2698 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2700 CASE_LOW (t) = low_value;
2701 CASE_HIGH (t) = high_value;
2702 CASE_LABEL (t) = label_decl;
2703 CASE_CHAIN (t) = NULL_TREE;
2705 return t;
2708 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2709 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2710 The latter determines the length of the HOST_WIDE_INT vector. */
2712 tree
2713 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2715 tree t;
2716 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2717 + sizeof (struct tree_int_cst));
2719 gcc_assert (len);
2720 record_node_allocation_statistics (INTEGER_CST, length);
2722 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2724 TREE_SET_CODE (t, INTEGER_CST);
2725 TREE_INT_CST_NUNITS (t) = len;
2726 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2727 /* to_offset can only be applied to trees that are offset_int-sized
2728 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2729 must be exactly the precision of offset_int and so LEN is correct. */
2730 if (ext_len <= OFFSET_INT_ELTS)
2731 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2732 else
2733 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2735 TREE_CONSTANT (t) = 1;
2737 return t;
2740 /* Build a newly constructed TREE_VEC node of length LEN. */
2742 tree
2743 make_tree_vec (int len MEM_STAT_DECL)
2745 tree t;
2746 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2748 record_node_allocation_statistics (TREE_VEC, length);
2750 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2752 TREE_SET_CODE (t, TREE_VEC);
2753 TREE_VEC_LENGTH (t) = len;
2755 return t;
2758 /* Grow a TREE_VEC node to new length LEN. */
2760 tree
2761 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2763 gcc_assert (TREE_CODE (v) == TREE_VEC);
2765 int oldlen = TREE_VEC_LENGTH (v);
2766 gcc_assert (len > oldlen);
2768 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2769 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2771 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2773 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2775 TREE_VEC_LENGTH (v) = len;
2777 return v;
2780 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2781 fixed, and scalar, complex or vector. */
2783 bool
2784 zerop (const_tree expr)
2786 return (integer_zerop (expr)
2787 || real_zerop (expr)
2788 || fixed_zerop (expr));
2791 /* Return 1 if EXPR is the integer constant zero or a complex constant
2792 of zero, or a location wrapper for such a constant. */
2794 bool
2795 integer_zerop (const_tree expr)
2797 STRIP_ANY_LOCATION_WRAPPER (expr);
2799 switch (TREE_CODE (expr))
2801 case INTEGER_CST:
2802 return wi::to_wide (expr) == 0;
2803 case COMPLEX_CST:
2804 return (integer_zerop (TREE_REALPART (expr))
2805 && integer_zerop (TREE_IMAGPART (expr)));
2806 case VECTOR_CST:
2807 return (VECTOR_CST_NPATTERNS (expr) == 1
2808 && VECTOR_CST_DUPLICATE_P (expr)
2809 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2810 default:
2811 return false;
2815 /* Return 1 if EXPR is the integer constant one or the corresponding
2816 complex constant, or a location wrapper for such a constant. */
2818 bool
2819 integer_onep (const_tree expr)
2821 STRIP_ANY_LOCATION_WRAPPER (expr);
2823 switch (TREE_CODE (expr))
2825 case INTEGER_CST:
2826 return wi::eq_p (wi::to_widest (expr), 1);
2827 case COMPLEX_CST:
2828 return (integer_onep (TREE_REALPART (expr))
2829 && integer_zerop (TREE_IMAGPART (expr)));
2830 case VECTOR_CST:
2831 return (VECTOR_CST_NPATTERNS (expr) == 1
2832 && VECTOR_CST_DUPLICATE_P (expr)
2833 && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2834 default:
2835 return false;
2839 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2840 return 1 if every piece is the integer constant one.
2841 Also return 1 for location wrappers for such a constant. */
2843 bool
2844 integer_each_onep (const_tree expr)
2846 STRIP_ANY_LOCATION_WRAPPER (expr);
2848 if (TREE_CODE (expr) == COMPLEX_CST)
2849 return (integer_onep (TREE_REALPART (expr))
2850 && integer_onep (TREE_IMAGPART (expr)));
2851 else
2852 return integer_onep (expr);
2855 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2856 it contains, or a complex or vector whose subparts are such integers,
2857 or a location wrapper for such a constant. */
2859 bool
2860 integer_all_onesp (const_tree expr)
2862 STRIP_ANY_LOCATION_WRAPPER (expr);
2864 if (TREE_CODE (expr) == COMPLEX_CST
2865 && integer_all_onesp (TREE_REALPART (expr))
2866 && integer_all_onesp (TREE_IMAGPART (expr)))
2867 return true;
2869 else if (TREE_CODE (expr) == VECTOR_CST)
2870 return (VECTOR_CST_NPATTERNS (expr) == 1
2871 && VECTOR_CST_DUPLICATE_P (expr)
2872 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2874 else if (TREE_CODE (expr) != INTEGER_CST)
2875 return false;
2877 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2878 == wi::to_wide (expr));
2881 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
2882 for such a constant. */
2884 bool
2885 integer_minus_onep (const_tree expr)
2887 STRIP_ANY_LOCATION_WRAPPER (expr);
2889 if (TREE_CODE (expr) == COMPLEX_CST)
2890 return (integer_all_onesp (TREE_REALPART (expr))
2891 && integer_zerop (TREE_IMAGPART (expr)));
2892 else
2893 return integer_all_onesp (expr);
2896 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2897 one bit on), or a location wrapper for such a constant. */
2899 bool
2900 integer_pow2p (const_tree expr)
2902 STRIP_ANY_LOCATION_WRAPPER (expr);
2904 if (TREE_CODE (expr) == COMPLEX_CST
2905 && integer_pow2p (TREE_REALPART (expr))
2906 && integer_zerop (TREE_IMAGPART (expr)))
2907 return true;
2909 if (TREE_CODE (expr) != INTEGER_CST)
2910 return false;
2912 return wi::popcount (wi::to_wide (expr)) == 1;
2915 /* Return 1 if EXPR is an integer constant other than zero or a
2916 complex constant other than zero, or a location wrapper for such a
2917 constant. */
2919 bool
2920 integer_nonzerop (const_tree expr)
2922 STRIP_ANY_LOCATION_WRAPPER (expr);
2924 return ((TREE_CODE (expr) == INTEGER_CST
2925 && wi::to_wide (expr) != 0)
2926 || (TREE_CODE (expr) == COMPLEX_CST
2927 && (integer_nonzerop (TREE_REALPART (expr))
2928 || integer_nonzerop (TREE_IMAGPART (expr)))));
2931 /* Return 1 if EXPR is the integer constant one. For vector,
2932 return 1 if every piece is the integer constant minus one
2933 (representing the value TRUE).
2934 Also return 1 for location wrappers for such a constant. */
2936 bool
2937 integer_truep (const_tree expr)
2939 STRIP_ANY_LOCATION_WRAPPER (expr);
2941 if (TREE_CODE (expr) == VECTOR_CST)
2942 return integer_all_onesp (expr);
2943 return integer_onep (expr);
2946 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
2947 for such a constant. */
2949 bool
2950 fixed_zerop (const_tree expr)
2952 STRIP_ANY_LOCATION_WRAPPER (expr);
2954 return (TREE_CODE (expr) == FIXED_CST
2955 && TREE_FIXED_CST (expr).data.is_zero ());
2958 /* Return the power of two represented by a tree node known to be a
2959 power of two. */
2962 tree_log2 (const_tree expr)
2964 if (TREE_CODE (expr) == COMPLEX_CST)
2965 return tree_log2 (TREE_REALPART (expr));
2967 return wi::exact_log2 (wi::to_wide (expr));
2970 /* Similar, but return the largest integer Y such that 2 ** Y is less
2971 than or equal to EXPR. */
2974 tree_floor_log2 (const_tree expr)
2976 if (TREE_CODE (expr) == COMPLEX_CST)
2977 return tree_log2 (TREE_REALPART (expr));
2979 return wi::floor_log2 (wi::to_wide (expr));
2982 /* Return number of known trailing zero bits in EXPR, or, if the value of
2983 EXPR is known to be zero, the precision of it's type. */
2985 unsigned int
2986 tree_ctz (const_tree expr)
2988 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2989 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2990 return 0;
2992 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2993 switch (TREE_CODE (expr))
2995 case INTEGER_CST:
2996 ret1 = wi::ctz (wi::to_wide (expr));
2997 return MIN (ret1, prec);
2998 case SSA_NAME:
2999 ret1 = wi::ctz (get_nonzero_bits (expr));
3000 return MIN (ret1, prec);
3001 case PLUS_EXPR:
3002 case MINUS_EXPR:
3003 case BIT_IOR_EXPR:
3004 case BIT_XOR_EXPR:
3005 case MIN_EXPR:
3006 case MAX_EXPR:
3007 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3008 if (ret1 == 0)
3009 return ret1;
3010 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3011 return MIN (ret1, ret2);
3012 case POINTER_PLUS_EXPR:
3013 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3014 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3015 /* Second operand is sizetype, which could be in theory
3016 wider than pointer's precision. Make sure we never
3017 return more than prec. */
3018 ret2 = MIN (ret2, prec);
3019 return MIN (ret1, ret2);
3020 case BIT_AND_EXPR:
3021 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3022 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3023 return MAX (ret1, ret2);
3024 case MULT_EXPR:
3025 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3026 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3027 return MIN (ret1 + ret2, prec);
3028 case LSHIFT_EXPR:
3029 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3030 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3031 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3033 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3034 return MIN (ret1 + ret2, prec);
3036 return ret1;
3037 case RSHIFT_EXPR:
3038 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3039 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3041 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3042 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3043 if (ret1 > ret2)
3044 return ret1 - ret2;
3046 return 0;
3047 case TRUNC_DIV_EXPR:
3048 case CEIL_DIV_EXPR:
3049 case FLOOR_DIV_EXPR:
3050 case ROUND_DIV_EXPR:
3051 case EXACT_DIV_EXPR:
3052 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3053 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
3055 int l = tree_log2 (TREE_OPERAND (expr, 1));
3056 if (l >= 0)
3058 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3059 ret2 = l;
3060 if (ret1 > ret2)
3061 return ret1 - ret2;
3064 return 0;
3065 CASE_CONVERT:
3066 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3067 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
3068 ret1 = prec;
3069 return MIN (ret1, prec);
3070 case SAVE_EXPR:
3071 return tree_ctz (TREE_OPERAND (expr, 0));
3072 case COND_EXPR:
3073 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
3074 if (ret1 == 0)
3075 return 0;
3076 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
3077 return MIN (ret1, ret2);
3078 case COMPOUND_EXPR:
3079 return tree_ctz (TREE_OPERAND (expr, 1));
3080 case ADDR_EXPR:
3081 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
3082 if (ret1 > BITS_PER_UNIT)
3084 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
3085 return MIN (ret1, prec);
3087 return 0;
3088 default:
3089 return 0;
3093 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
3094 decimal float constants, so don't return 1 for them.
3095 Also return 1 for location wrappers around such a constant. */
3097 bool
3098 real_zerop (const_tree expr)
3100 STRIP_ANY_LOCATION_WRAPPER (expr);
3102 switch (TREE_CODE (expr))
3104 case REAL_CST:
3105 return real_equal (&TREE_REAL_CST (expr), &dconst0)
3106 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3107 case COMPLEX_CST:
3108 return real_zerop (TREE_REALPART (expr))
3109 && real_zerop (TREE_IMAGPART (expr));
3110 case VECTOR_CST:
3112 /* Don't simply check for a duplicate because the predicate
3113 accepts both +0.0 and -0.0. */
3114 unsigned count = vector_cst_encoded_nelts (expr);
3115 for (unsigned int i = 0; i < count; ++i)
3116 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
3117 return false;
3118 return true;
3120 default:
3121 return false;
3125 /* Return 1 if EXPR is the real constant one in real or complex form.
3126 Trailing zeroes matter for decimal float constants, so don't return
3127 1 for them.
3128 Also return 1 for location wrappers around such a constant. */
3130 bool
3131 real_onep (const_tree expr)
3133 STRIP_ANY_LOCATION_WRAPPER (expr);
3135 switch (TREE_CODE (expr))
3137 case REAL_CST:
3138 return real_equal (&TREE_REAL_CST (expr), &dconst1)
3139 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3140 case COMPLEX_CST:
3141 return real_onep (TREE_REALPART (expr))
3142 && real_zerop (TREE_IMAGPART (expr));
3143 case VECTOR_CST:
3144 return (VECTOR_CST_NPATTERNS (expr) == 1
3145 && VECTOR_CST_DUPLICATE_P (expr)
3146 && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3147 default:
3148 return false;
3152 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
3153 matter for decimal float constants, so don't return 1 for them.
3154 Also return 1 for location wrappers around such a constant. */
3156 bool
3157 real_minus_onep (const_tree expr)
3159 STRIP_ANY_LOCATION_WRAPPER (expr);
3161 switch (TREE_CODE (expr))
3163 case REAL_CST:
3164 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
3165 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3166 case COMPLEX_CST:
3167 return real_minus_onep (TREE_REALPART (expr))
3168 && real_zerop (TREE_IMAGPART (expr));
3169 case VECTOR_CST:
3170 return (VECTOR_CST_NPATTERNS (expr) == 1
3171 && VECTOR_CST_DUPLICATE_P (expr)
3172 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3173 default:
3174 return false;
3178 /* Nonzero if EXP is a constant or a cast of a constant. */
3180 bool
3181 really_constant_p (const_tree exp)
3183 /* This is not quite the same as STRIP_NOPS. It does more. */
3184 while (CONVERT_EXPR_P (exp)
3185 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3186 exp = TREE_OPERAND (exp, 0);
3187 return TREE_CONSTANT (exp);
3190 /* Return true if T holds a polynomial pointer difference, storing it in
3191 *VALUE if so. A true return means that T's precision is no greater
3192 than 64 bits, which is the largest address space we support, so *VALUE
3193 never loses precision. However, the signedness of the result does
3194 not necessarily match the signedness of T: sometimes an unsigned type
3195 like sizetype is used to encode a value that is actually negative. */
3197 bool
3198 ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
3200 if (!t)
3201 return false;
3202 if (TREE_CODE (t) == INTEGER_CST)
3204 if (!cst_and_fits_in_hwi (t))
3205 return false;
3206 *value = int_cst_value (t);
3207 return true;
3209 if (POLY_INT_CST_P (t))
3211 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3212 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
3213 return false;
3214 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3215 value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
3216 return true;
3218 return false;
3221 poly_int64
3222 tree_to_poly_int64 (const_tree t)
3224 gcc_assert (tree_fits_poly_int64_p (t));
3225 if (POLY_INT_CST_P (t))
3226 return poly_int_cst_value (t).force_shwi ();
3227 return TREE_INT_CST_LOW (t);
3230 poly_uint64
3231 tree_to_poly_uint64 (const_tree t)
3233 gcc_assert (tree_fits_poly_uint64_p (t));
3234 if (POLY_INT_CST_P (t))
3235 return poly_int_cst_value (t).force_uhwi ();
3236 return TREE_INT_CST_LOW (t);
3239 /* Return first list element whose TREE_VALUE is ELEM.
3240 Return 0 if ELEM is not in LIST. */
3242 tree
3243 value_member (tree elem, tree list)
3245 while (list)
3247 if (elem == TREE_VALUE (list))
3248 return list;
3249 list = TREE_CHAIN (list);
3251 return NULL_TREE;
3254 /* Return first list element whose TREE_PURPOSE is ELEM.
3255 Return 0 if ELEM is not in LIST. */
3257 tree
3258 purpose_member (const_tree elem, tree list)
3260 while (list)
3262 if (elem == TREE_PURPOSE (list))
3263 return list;
3264 list = TREE_CHAIN (list);
3266 return NULL_TREE;
3269 /* Return true if ELEM is in V. */
3271 bool
3272 vec_member (const_tree elem, vec<tree, va_gc> *v)
3274 unsigned ix;
3275 tree t;
3276 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
3277 if (elem == t)
3278 return true;
3279 return false;
3282 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3283 NULL_TREE. */
3285 tree
3286 chain_index (int idx, tree chain)
3288 for (; chain && idx > 0; --idx)
3289 chain = TREE_CHAIN (chain);
3290 return chain;
3293 /* Return nonzero if ELEM is part of the chain CHAIN. */
3295 bool
3296 chain_member (const_tree elem, const_tree chain)
3298 while (chain)
3300 if (elem == chain)
3301 return true;
3302 chain = DECL_CHAIN (chain);
3305 return false;
3308 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3309 We expect a null pointer to mark the end of the chain.
3310 This is the Lisp primitive `length'. */
3313 list_length (const_tree t)
3315 const_tree p = t;
3316 #ifdef ENABLE_TREE_CHECKING
3317 const_tree q = t;
3318 #endif
3319 int len = 0;
3321 while (p)
3323 p = TREE_CHAIN (p);
3324 #ifdef ENABLE_TREE_CHECKING
3325 if (len % 2)
3326 q = TREE_CHAIN (q);
3327 gcc_assert (p != q);
3328 #endif
3329 len++;
3332 return len;
3335 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3336 UNION_TYPE TYPE, or NULL_TREE if none. */
3338 tree
3339 first_field (const_tree type)
3341 tree t = TYPE_FIELDS (type);
3342 while (t && TREE_CODE (t) != FIELD_DECL)
3343 t = TREE_CHAIN (t);
3344 return t;
3347 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3348 UNION_TYPE TYPE, or NULL_TREE if none. */
3350 tree
3351 last_field (const_tree type)
3353 tree last = NULL_TREE;
3355 for (tree fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
3357 if (TREE_CODE (fld) != FIELD_DECL)
3358 continue;
3360 last = fld;
3363 return last;
3366 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3367 by modifying the last node in chain 1 to point to chain 2.
3368 This is the Lisp primitive `nconc'. */
3370 tree
3371 chainon (tree op1, tree op2)
3373 tree t1;
3375 if (!op1)
3376 return op2;
3377 if (!op2)
3378 return op1;
3380 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3381 continue;
3382 TREE_CHAIN (t1) = op2;
3384 #ifdef ENABLE_TREE_CHECKING
3386 tree t2;
3387 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3388 gcc_assert (t2 != t1);
3390 #endif
3392 return op1;
3395 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3397 tree
3398 tree_last (tree chain)
3400 tree next;
3401 if (chain)
3402 while ((next = TREE_CHAIN (chain)))
3403 chain = next;
3404 return chain;
3407 /* Reverse the order of elements in the chain T,
3408 and return the new head of the chain (old last element). */
3410 tree
3411 nreverse (tree t)
3413 tree prev = 0, decl, next;
3414 for (decl = t; decl; decl = next)
3416 /* We shouldn't be using this function to reverse BLOCK chains; we
3417 have blocks_nreverse for that. */
3418 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3419 next = TREE_CHAIN (decl);
3420 TREE_CHAIN (decl) = prev;
3421 prev = decl;
3423 return prev;
3426 /* Return a newly created TREE_LIST node whose
3427 purpose and value fields are PARM and VALUE. */
3429 tree
3430 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3432 tree t = make_node (TREE_LIST PASS_MEM_STAT);
3433 TREE_PURPOSE (t) = parm;
3434 TREE_VALUE (t) = value;
3435 return t;
3438 /* Build a chain of TREE_LIST nodes from a vector. */
3440 tree
3441 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3443 tree ret = NULL_TREE;
3444 tree *pp = &ret;
3445 unsigned int i;
3446 tree t;
3447 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3449 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3450 pp = &TREE_CHAIN (*pp);
3452 return ret;
3455 /* Return a newly created TREE_LIST node whose
3456 purpose and value fields are PURPOSE and VALUE
3457 and whose TREE_CHAIN is CHAIN. */
3459 tree
3460 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3462 tree node;
3464 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3465 memset (node, 0, sizeof (struct tree_common));
3467 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3469 TREE_SET_CODE (node, TREE_LIST);
3470 TREE_CHAIN (node) = chain;
3471 TREE_PURPOSE (node) = purpose;
3472 TREE_VALUE (node) = value;
3473 return node;
3476 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3477 trees. */
3479 vec<tree, va_gc> *
3480 ctor_to_vec (tree ctor)
3482 vec<tree, va_gc> *vec;
3483 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3484 unsigned int ix;
3485 tree val;
3487 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3488 vec->quick_push (val);
3490 return vec;
3493 /* Return the size nominally occupied by an object of type TYPE
3494 when it resides in memory. The value is measured in units of bytes,
3495 and its data type is that normally used for type sizes
3496 (which is the first type created by make_signed_type or
3497 make_unsigned_type). */
3499 tree
3500 size_in_bytes_loc (location_t loc, const_tree type)
3502 tree t;
3504 if (type == error_mark_node)
3505 return integer_zero_node;
3507 type = TYPE_MAIN_VARIANT (type);
3508 t = TYPE_SIZE_UNIT (type);
3510 if (t == 0)
3512 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3513 return size_zero_node;
3516 return t;
3519 /* Return the size of TYPE (in bytes) as a wide integer
3520 or return -1 if the size can vary or is larger than an integer. */
3522 HOST_WIDE_INT
3523 int_size_in_bytes (const_tree type)
3525 tree t;
3527 if (type == error_mark_node)
3528 return 0;
3530 type = TYPE_MAIN_VARIANT (type);
3531 t = TYPE_SIZE_UNIT (type);
3533 if (t && tree_fits_uhwi_p (t))
3534 return TREE_INT_CST_LOW (t);
3535 else
3536 return -1;
3539 /* Return the maximum size of TYPE (in bytes) as a wide integer
3540 or return -1 if the size can vary or is larger than an integer. */
3542 HOST_WIDE_INT
3543 max_int_size_in_bytes (const_tree type)
3545 HOST_WIDE_INT size = -1;
3546 tree size_tree;
3548 /* If this is an array type, check for a possible MAX_SIZE attached. */
3550 if (TREE_CODE (type) == ARRAY_TYPE)
3552 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3554 if (size_tree && tree_fits_uhwi_p (size_tree))
3555 size = tree_to_uhwi (size_tree);
3558 /* If we still haven't been able to get a size, see if the language
3559 can compute a maximum size. */
3561 if (size == -1)
3563 size_tree = lang_hooks.types.max_size (type);
3565 if (size_tree && tree_fits_uhwi_p (size_tree))
3566 size = tree_to_uhwi (size_tree);
3569 return size;
3572 /* Return the bit position of FIELD, in bits from the start of the record.
3573 This is a tree of type bitsizetype. */
3575 tree
3576 bit_position (const_tree field)
3578 return bit_from_pos (DECL_FIELD_OFFSET (field),
3579 DECL_FIELD_BIT_OFFSET (field));
3582 /* Return the byte position of FIELD, in bytes from the start of the record.
3583 This is a tree of type sizetype. */
3585 tree
3586 byte_position (const_tree field)
3588 return byte_from_pos (DECL_FIELD_OFFSET (field),
3589 DECL_FIELD_BIT_OFFSET (field));
3592 /* Likewise, but return as an integer. It must be representable in
3593 that way (since it could be a signed value, we don't have the
3594 option of returning -1 like int_size_in_byte can. */
3596 HOST_WIDE_INT
3597 int_byte_position (const_tree field)
3599 return tree_to_shwi (byte_position (field));
3602 /* Return, as a tree node, the number of elements for TYPE (which is an
3603 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3605 tree
3606 array_type_nelts (const_tree type)
3608 tree index_type, min, max;
3610 /* If they did it with unspecified bounds, then we should have already
3611 given an error about it before we got here. */
3612 if (! TYPE_DOMAIN (type))
3613 return error_mark_node;
3615 index_type = TYPE_DOMAIN (type);
3616 min = TYPE_MIN_VALUE (index_type);
3617 max = TYPE_MAX_VALUE (index_type);
3619 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3620 if (!max)
3622 /* zero sized arrays are represented from C FE as complete types with
3623 NULL TYPE_MAX_VALUE and zero TYPE_SIZE, while C++ FE represents
3624 them as min 0, max -1. */
3625 if (COMPLETE_TYPE_P (type)
3626 && integer_zerop (TYPE_SIZE (type))
3627 && integer_zerop (min))
3628 return build_int_cst (TREE_TYPE (min), -1);
3630 return error_mark_node;
3633 return (integer_zerop (min)
3634 ? max
3635 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3638 /* If arg is static -- a reference to an object in static storage -- then
3639 return the object. This is not the same as the C meaning of `static'.
3640 If arg isn't static, return NULL. */
3642 tree
3643 staticp (tree arg)
3645 switch (TREE_CODE (arg))
3647 case FUNCTION_DECL:
3648 /* Nested functions are static, even though taking their address will
3649 involve a trampoline as we unnest the nested function and create
3650 the trampoline on the tree level. */
3651 return arg;
3653 case VAR_DECL:
3654 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3655 && ! DECL_THREAD_LOCAL_P (arg)
3656 && ! DECL_DLLIMPORT_P (arg)
3657 ? arg : NULL);
3659 case CONST_DECL:
3660 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3661 ? arg : NULL);
3663 case CONSTRUCTOR:
3664 return TREE_STATIC (arg) ? arg : NULL;
3666 case LABEL_DECL:
3667 case STRING_CST:
3668 return arg;
3670 case COMPONENT_REF:
3671 /* If the thing being referenced is not a field, then it is
3672 something language specific. */
3673 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3675 /* If we are referencing a bitfield, we can't evaluate an
3676 ADDR_EXPR at compile time and so it isn't a constant. */
3677 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3678 return NULL;
3680 return staticp (TREE_OPERAND (arg, 0));
3682 case BIT_FIELD_REF:
3683 return NULL;
3685 case INDIRECT_REF:
3686 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3688 case ARRAY_REF:
3689 case ARRAY_RANGE_REF:
3690 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3691 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3692 return staticp (TREE_OPERAND (arg, 0));
3693 else
3694 return NULL;
3696 case COMPOUND_LITERAL_EXPR:
3697 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3699 default:
3700 return NULL;
3707 /* Return whether OP is a DECL whose address is function-invariant. */
3709 bool
3710 decl_address_invariant_p (const_tree op)
3712 /* The conditions below are slightly less strict than the one in
3713 staticp. */
3715 switch (TREE_CODE (op))
3717 case PARM_DECL:
3718 case RESULT_DECL:
3719 case LABEL_DECL:
3720 case FUNCTION_DECL:
3721 return true;
3723 case VAR_DECL:
3724 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3725 || DECL_THREAD_LOCAL_P (op)
3726 || DECL_CONTEXT (op) == current_function_decl
3727 || decl_function_context (op) == current_function_decl)
3728 return true;
3729 break;
3731 case CONST_DECL:
3732 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3733 || decl_function_context (op) == current_function_decl)
3734 return true;
3735 break;
3737 default:
3738 break;
3741 return false;
3744 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3746 bool
3747 decl_address_ip_invariant_p (const_tree op)
3749 /* The conditions below are slightly less strict than the one in
3750 staticp. */
3752 switch (TREE_CODE (op))
3754 case LABEL_DECL:
3755 case FUNCTION_DECL:
3756 case STRING_CST:
3757 return true;
3759 case VAR_DECL:
3760 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3761 && !DECL_DLLIMPORT_P (op))
3762 || DECL_THREAD_LOCAL_P (op))
3763 return true;
3764 break;
3766 case CONST_DECL:
3767 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3768 return true;
3769 break;
3771 default:
3772 break;
3775 return false;
3779 /* Return true if T is function-invariant (internal function, does
3780 not handle arithmetic; that's handled in skip_simple_arithmetic and
3781 tree_invariant_p). */
3783 static bool
3784 tree_invariant_p_1 (tree t)
3786 tree op;
3788 if (TREE_CONSTANT (t)
3789 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3790 return true;
3792 switch (TREE_CODE (t))
3794 case SAVE_EXPR:
3795 return true;
3797 case ADDR_EXPR:
3798 op = TREE_OPERAND (t, 0);
3799 while (handled_component_p (op))
3801 switch (TREE_CODE (op))
3803 case ARRAY_REF:
3804 case ARRAY_RANGE_REF:
3805 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3806 || TREE_OPERAND (op, 2) != NULL_TREE
3807 || TREE_OPERAND (op, 3) != NULL_TREE)
3808 return false;
3809 break;
3811 case COMPONENT_REF:
3812 if (TREE_OPERAND (op, 2) != NULL_TREE)
3813 return false;
3814 break;
3816 default:;
3818 op = TREE_OPERAND (op, 0);
3821 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3823 default:
3824 break;
3827 return false;
3830 /* Return true if T is function-invariant. */
3832 bool
3833 tree_invariant_p (tree t)
3835 tree inner = skip_simple_arithmetic (t);
3836 return tree_invariant_p_1 (inner);
3839 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3840 Do this to any expression which may be used in more than one place,
3841 but must be evaluated only once.
3843 Normally, expand_expr would reevaluate the expression each time.
3844 Calling save_expr produces something that is evaluated and recorded
3845 the first time expand_expr is called on it. Subsequent calls to
3846 expand_expr just reuse the recorded value.
3848 The call to expand_expr that generates code that actually computes
3849 the value is the first call *at compile time*. Subsequent calls
3850 *at compile time* generate code to use the saved value.
3851 This produces correct result provided that *at run time* control
3852 always flows through the insns made by the first expand_expr
3853 before reaching the other places where the save_expr was evaluated.
3854 You, the caller of save_expr, must make sure this is so.
3856 Constants, and certain read-only nodes, are returned with no
3857 SAVE_EXPR because that is safe. Expressions containing placeholders
3858 are not touched; see tree.def for an explanation of what these
3859 are used for. */
3861 tree
3862 save_expr (tree expr)
3864 tree inner;
3866 /* If the tree evaluates to a constant, then we don't want to hide that
3867 fact (i.e. this allows further folding, and direct checks for constants).
3868 However, a read-only object that has side effects cannot be bypassed.
3869 Since it is no problem to reevaluate literals, we just return the
3870 literal node. */
3871 inner = skip_simple_arithmetic (expr);
3872 if (TREE_CODE (inner) == ERROR_MARK)
3873 return inner;
3875 if (tree_invariant_p_1 (inner))
3876 return expr;
3878 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3879 it means that the size or offset of some field of an object depends on
3880 the value within another field.
3882 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3883 and some variable since it would then need to be both evaluated once and
3884 evaluated more than once. Front-ends must assure this case cannot
3885 happen by surrounding any such subexpressions in their own SAVE_EXPR
3886 and forcing evaluation at the proper time. */
3887 if (contains_placeholder_p (inner))
3888 return expr;
3890 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3892 /* This expression might be placed ahead of a jump to ensure that the
3893 value was computed on both sides of the jump. So make sure it isn't
3894 eliminated as dead. */
3895 TREE_SIDE_EFFECTS (expr) = 1;
3896 return expr;
3899 /* Look inside EXPR into any simple arithmetic operations. Return the
3900 outermost non-arithmetic or non-invariant node. */
3902 tree
3903 skip_simple_arithmetic (tree expr)
3905 /* We don't care about whether this can be used as an lvalue in this
3906 context. */
3907 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3908 expr = TREE_OPERAND (expr, 0);
3910 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3911 a constant, it will be more efficient to not make another SAVE_EXPR since
3912 it will allow better simplification and GCSE will be able to merge the
3913 computations if they actually occur. */
3914 while (true)
3916 if (UNARY_CLASS_P (expr))
3917 expr = TREE_OPERAND (expr, 0);
3918 else if (BINARY_CLASS_P (expr))
3920 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3921 expr = TREE_OPERAND (expr, 0);
3922 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3923 expr = TREE_OPERAND (expr, 1);
3924 else
3925 break;
3927 else
3928 break;
3931 return expr;
3934 /* Look inside EXPR into simple arithmetic operations involving constants.
3935 Return the outermost non-arithmetic or non-constant node. */
3937 tree
3938 skip_simple_constant_arithmetic (tree expr)
3940 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3941 expr = TREE_OPERAND (expr, 0);
3943 while (true)
3945 if (UNARY_CLASS_P (expr))
3946 expr = TREE_OPERAND (expr, 0);
3947 else if (BINARY_CLASS_P (expr))
3949 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3950 expr = TREE_OPERAND (expr, 0);
3951 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3952 expr = TREE_OPERAND (expr, 1);
3953 else
3954 break;
3956 else
3957 break;
3960 return expr;
3963 /* Return which tree structure is used by T. */
3965 enum tree_node_structure_enum
3966 tree_node_structure (const_tree t)
3968 const enum tree_code code = TREE_CODE (t);
3969 return tree_node_structure_for_code (code);
3972 /* Set various status flags when building a CALL_EXPR object T. */
3974 static void
3975 process_call_operands (tree t)
3977 bool side_effects = TREE_SIDE_EFFECTS (t);
3978 bool read_only = false;
3979 int i = call_expr_flags (t);
3981 /* Calls have side-effects, except those to const or pure functions. */
3982 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3983 side_effects = true;
3984 /* Propagate TREE_READONLY of arguments for const functions. */
3985 if (i & ECF_CONST)
3986 read_only = true;
3988 if (!side_effects || read_only)
3989 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3991 tree op = TREE_OPERAND (t, i);
3992 if (op && TREE_SIDE_EFFECTS (op))
3993 side_effects = true;
3994 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3995 read_only = false;
3998 TREE_SIDE_EFFECTS (t) = side_effects;
3999 TREE_READONLY (t) = read_only;
4002 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
4003 size or offset that depends on a field within a record. */
4005 bool
4006 contains_placeholder_p (const_tree exp)
4008 enum tree_code code;
4010 if (!exp)
4011 return 0;
4013 code = TREE_CODE (exp);
4014 if (code == PLACEHOLDER_EXPR)
4015 return 1;
4017 switch (TREE_CODE_CLASS (code))
4019 case tcc_reference:
4020 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
4021 position computations since they will be converted into a
4022 WITH_RECORD_EXPR involving the reference, which will assume
4023 here will be valid. */
4024 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4026 case tcc_exceptional:
4027 if (code == TREE_LIST)
4028 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
4029 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
4030 break;
4032 case tcc_unary:
4033 case tcc_binary:
4034 case tcc_comparison:
4035 case tcc_expression:
4036 switch (code)
4038 case COMPOUND_EXPR:
4039 /* Ignoring the first operand isn't quite right, but works best. */
4040 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
4042 case COND_EXPR:
4043 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4044 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
4045 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
4047 case SAVE_EXPR:
4048 /* The save_expr function never wraps anything containing
4049 a PLACEHOLDER_EXPR. */
4050 return 0;
4052 default:
4053 break;
4056 switch (TREE_CODE_LENGTH (code))
4058 case 1:
4059 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4060 case 2:
4061 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4062 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
4063 default:
4064 return 0;
4067 case tcc_vl_exp:
4068 switch (code)
4070 case CALL_EXPR:
4072 const_tree arg;
4073 const_call_expr_arg_iterator iter;
4074 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
4075 if (CONTAINS_PLACEHOLDER_P (arg))
4076 return 1;
4077 return 0;
4079 default:
4080 return 0;
4083 default:
4084 return 0;
4086 return 0;
4089 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
4090 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
4091 field positions. */
4093 static bool
4094 type_contains_placeholder_1 (const_tree type)
4096 /* If the size contains a placeholder or the parent type (component type in
4097 the case of arrays) type involves a placeholder, this type does. */
4098 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
4099 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
4100 || (!POINTER_TYPE_P (type)
4101 && TREE_TYPE (type)
4102 && type_contains_placeholder_p (TREE_TYPE (type))))
4103 return true;
4105 /* Now do type-specific checks. Note that the last part of the check above
4106 greatly limits what we have to do below. */
4107 switch (TREE_CODE (type))
4109 case VOID_TYPE:
4110 case OPAQUE_TYPE:
4111 case COMPLEX_TYPE:
4112 case ENUMERAL_TYPE:
4113 case BOOLEAN_TYPE:
4114 case POINTER_TYPE:
4115 case OFFSET_TYPE:
4116 case REFERENCE_TYPE:
4117 case METHOD_TYPE:
4118 case FUNCTION_TYPE:
4119 case VECTOR_TYPE:
4120 case NULLPTR_TYPE:
4121 return false;
4123 case INTEGER_TYPE:
4124 case REAL_TYPE:
4125 case FIXED_POINT_TYPE:
4126 /* Here we just check the bounds. */
4127 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
4128 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
4130 case ARRAY_TYPE:
4131 /* We have already checked the component type above, so just check
4132 the domain type. Flexible array members have a null domain. */
4133 return TYPE_DOMAIN (type) ?
4134 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
4136 case RECORD_TYPE:
4137 case UNION_TYPE:
4138 case QUAL_UNION_TYPE:
4140 tree field;
4142 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4143 if (TREE_CODE (field) == FIELD_DECL
4144 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
4145 || (TREE_CODE (type) == QUAL_UNION_TYPE
4146 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
4147 || type_contains_placeholder_p (TREE_TYPE (field))))
4148 return true;
4150 return false;
4153 default:
4154 gcc_unreachable ();
4158 /* Wrapper around above function used to cache its result. */
4160 bool
4161 type_contains_placeholder_p (tree type)
4163 bool result;
4165 /* If the contains_placeholder_bits field has been initialized,
4166 then we know the answer. */
4167 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
4168 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
4170 /* Indicate that we've seen this type node, and the answer is false.
4171 This is what we want to return if we run into recursion via fields. */
4172 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
4174 /* Compute the real value. */
4175 result = type_contains_placeholder_1 (type);
4177 /* Store the real value. */
4178 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
4180 return result;
4183 /* Push tree EXP onto vector QUEUE if it is not already present. */
4185 static void
4186 push_without_duplicates (tree exp, vec<tree> *queue)
4188 unsigned int i;
4189 tree iter;
4191 FOR_EACH_VEC_ELT (*queue, i, iter)
4192 if (simple_cst_equal (iter, exp) == 1)
4193 break;
4195 if (!iter)
4196 queue->safe_push (exp);
4199 /* Given a tree EXP, find all occurrences of references to fields
4200 in a PLACEHOLDER_EXPR and place them in vector REFS without
4201 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
4202 we assume here that EXP contains only arithmetic expressions
4203 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
4204 argument list. */
4206 void
4207 find_placeholder_in_expr (tree exp, vec<tree> *refs)
4209 enum tree_code code = TREE_CODE (exp);
4210 tree inner;
4211 int i;
4213 /* We handle TREE_LIST and COMPONENT_REF separately. */
4214 if (code == TREE_LIST)
4216 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
4217 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
4219 else if (code == COMPONENT_REF)
4221 for (inner = TREE_OPERAND (exp, 0);
4222 REFERENCE_CLASS_P (inner);
4223 inner = TREE_OPERAND (inner, 0))
4226 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
4227 push_without_duplicates (exp, refs);
4228 else
4229 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
4231 else
4232 switch (TREE_CODE_CLASS (code))
4234 case tcc_constant:
4235 break;
4237 case tcc_declaration:
4238 /* Variables allocated to static storage can stay. */
4239 if (!TREE_STATIC (exp))
4240 push_without_duplicates (exp, refs);
4241 break;
4243 case tcc_expression:
4244 /* This is the pattern built in ada/make_aligning_type. */
4245 if (code == ADDR_EXPR
4246 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
4248 push_without_duplicates (exp, refs);
4249 break;
4252 /* Fall through. */
4254 case tcc_exceptional:
4255 case tcc_unary:
4256 case tcc_binary:
4257 case tcc_comparison:
4258 case tcc_reference:
4259 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
4260 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4261 break;
4263 case tcc_vl_exp:
4264 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4265 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4266 break;
4268 default:
4269 gcc_unreachable ();
4273 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4274 return a tree with all occurrences of references to F in a
4275 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4276 CONST_DECLs. Note that we assume here that EXP contains only
4277 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4278 occurring only in their argument list. */
4280 tree
4281 substitute_in_expr (tree exp, tree f, tree r)
4283 enum tree_code code = TREE_CODE (exp);
4284 tree op0, op1, op2, op3;
4285 tree new_tree;
4287 /* We handle TREE_LIST and COMPONENT_REF separately. */
4288 if (code == TREE_LIST)
4290 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
4291 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
4292 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4293 return exp;
4295 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4297 else if (code == COMPONENT_REF)
4299 tree inner;
4301 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4302 and it is the right field, replace it with R. */
4303 for (inner = TREE_OPERAND (exp, 0);
4304 REFERENCE_CLASS_P (inner);
4305 inner = TREE_OPERAND (inner, 0))
4308 /* The field. */
4309 op1 = TREE_OPERAND (exp, 1);
4311 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
4312 return r;
4314 /* If this expression hasn't been completed let, leave it alone. */
4315 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
4316 return exp;
4318 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4319 if (op0 == TREE_OPERAND (exp, 0))
4320 return exp;
4322 new_tree
4323 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4325 else
4326 switch (TREE_CODE_CLASS (code))
4328 case tcc_constant:
4329 return exp;
4331 case tcc_declaration:
4332 if (exp == f)
4333 return r;
4334 else
4335 return exp;
4337 case tcc_expression:
4338 if (exp == f)
4339 return r;
4341 /* Fall through. */
4343 case tcc_exceptional:
4344 case tcc_unary:
4345 case tcc_binary:
4346 case tcc_comparison:
4347 case tcc_reference:
4348 switch (TREE_CODE_LENGTH (code))
4350 case 0:
4351 return exp;
4353 case 1:
4354 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4355 if (op0 == TREE_OPERAND (exp, 0))
4356 return exp;
4358 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4359 break;
4361 case 2:
4362 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4363 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4365 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4366 return exp;
4368 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4369 break;
4371 case 3:
4372 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4373 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4374 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4376 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4377 && op2 == TREE_OPERAND (exp, 2))
4378 return exp;
4380 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4381 break;
4383 case 4:
4384 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4385 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4386 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4387 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4389 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4390 && op2 == TREE_OPERAND (exp, 2)
4391 && op3 == TREE_OPERAND (exp, 3))
4392 return exp;
4394 new_tree
4395 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4396 break;
4398 default:
4399 gcc_unreachable ();
4401 break;
4403 case tcc_vl_exp:
4405 int i;
4407 new_tree = NULL_TREE;
4409 /* If we are trying to replace F with a constant or with another
4410 instance of one of the arguments of the call, inline back
4411 functions which do nothing else than computing a value from
4412 the arguments they are passed. This makes it possible to
4413 fold partially or entirely the replacement expression. */
4414 if (code == CALL_EXPR)
4416 bool maybe_inline = false;
4417 if (CONSTANT_CLASS_P (r))
4418 maybe_inline = true;
4419 else
4420 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4421 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4423 maybe_inline = true;
4424 break;
4426 if (maybe_inline)
4428 tree t = maybe_inline_call_in_expr (exp);
4429 if (t)
4430 return SUBSTITUTE_IN_EXPR (t, f, r);
4434 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4436 tree op = TREE_OPERAND (exp, i);
4437 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4438 if (new_op != op)
4440 if (!new_tree)
4441 new_tree = copy_node (exp);
4442 TREE_OPERAND (new_tree, i) = new_op;
4446 if (new_tree)
4448 new_tree = fold (new_tree);
4449 if (TREE_CODE (new_tree) == CALL_EXPR)
4450 process_call_operands (new_tree);
4452 else
4453 return exp;
4455 break;
4457 default:
4458 gcc_unreachable ();
4461 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4463 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4464 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4466 return new_tree;
4469 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4470 for it within OBJ, a tree that is an object or a chain of references. */
4472 tree
4473 substitute_placeholder_in_expr (tree exp, tree obj)
4475 enum tree_code code = TREE_CODE (exp);
4476 tree op0, op1, op2, op3;
4477 tree new_tree;
4479 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4480 in the chain of OBJ. */
4481 if (code == PLACEHOLDER_EXPR)
4483 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4484 tree elt;
4486 for (elt = obj; elt != 0;
4487 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4488 || TREE_CODE (elt) == COND_EXPR)
4489 ? TREE_OPERAND (elt, 1)
4490 : (REFERENCE_CLASS_P (elt)
4491 || UNARY_CLASS_P (elt)
4492 || BINARY_CLASS_P (elt)
4493 || VL_EXP_CLASS_P (elt)
4494 || EXPRESSION_CLASS_P (elt))
4495 ? TREE_OPERAND (elt, 0) : 0))
4496 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4497 return elt;
4499 for (elt = obj; elt != 0;
4500 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4501 || TREE_CODE (elt) == COND_EXPR)
4502 ? TREE_OPERAND (elt, 1)
4503 : (REFERENCE_CLASS_P (elt)
4504 || UNARY_CLASS_P (elt)
4505 || BINARY_CLASS_P (elt)
4506 || VL_EXP_CLASS_P (elt)
4507 || EXPRESSION_CLASS_P (elt))
4508 ? TREE_OPERAND (elt, 0) : 0))
4509 if (POINTER_TYPE_P (TREE_TYPE (elt))
4510 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4511 == need_type))
4512 return fold_build1 (INDIRECT_REF, need_type, elt);
4514 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4515 survives until RTL generation, there will be an error. */
4516 return exp;
4519 /* TREE_LIST is special because we need to look at TREE_VALUE
4520 and TREE_CHAIN, not TREE_OPERANDS. */
4521 else if (code == TREE_LIST)
4523 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4524 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4525 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4526 return exp;
4528 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4530 else
4531 switch (TREE_CODE_CLASS (code))
4533 case tcc_constant:
4534 case tcc_declaration:
4535 return exp;
4537 case tcc_exceptional:
4538 case tcc_unary:
4539 case tcc_binary:
4540 case tcc_comparison:
4541 case tcc_expression:
4542 case tcc_reference:
4543 case tcc_statement:
4544 switch (TREE_CODE_LENGTH (code))
4546 case 0:
4547 return exp;
4549 case 1:
4550 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4551 if (op0 == TREE_OPERAND (exp, 0))
4552 return exp;
4554 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4555 break;
4557 case 2:
4558 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4559 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4561 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4562 return exp;
4564 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4565 break;
4567 case 3:
4568 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4569 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4570 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4572 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4573 && op2 == TREE_OPERAND (exp, 2))
4574 return exp;
4576 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4577 break;
4579 case 4:
4580 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4581 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4582 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4583 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4585 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4586 && op2 == TREE_OPERAND (exp, 2)
4587 && op3 == TREE_OPERAND (exp, 3))
4588 return exp;
4590 new_tree
4591 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4592 break;
4594 default:
4595 gcc_unreachable ();
4597 break;
4599 case tcc_vl_exp:
4601 int i;
4603 new_tree = NULL_TREE;
4605 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4607 tree op = TREE_OPERAND (exp, i);
4608 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4609 if (new_op != op)
4611 if (!new_tree)
4612 new_tree = copy_node (exp);
4613 TREE_OPERAND (new_tree, i) = new_op;
4617 if (new_tree)
4619 new_tree = fold (new_tree);
4620 if (TREE_CODE (new_tree) == CALL_EXPR)
4621 process_call_operands (new_tree);
4623 else
4624 return exp;
4626 break;
4628 default:
4629 gcc_unreachable ();
4632 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4634 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4635 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4637 return new_tree;
4641 /* Subroutine of stabilize_reference; this is called for subtrees of
4642 references. Any expression with side-effects must be put in a SAVE_EXPR
4643 to ensure that it is only evaluated once.
4645 We don't put SAVE_EXPR nodes around everything, because assigning very
4646 simple expressions to temporaries causes us to miss good opportunities
4647 for optimizations. Among other things, the opportunity to fold in the
4648 addition of a constant into an addressing mode often gets lost, e.g.
4649 "y[i+1] += x;". In general, we take the approach that we should not make
4650 an assignment unless we are forced into it - i.e., that any non-side effect
4651 operator should be allowed, and that cse should take care of coalescing
4652 multiple utterances of the same expression should that prove fruitful. */
4654 static tree
4655 stabilize_reference_1 (tree e)
4657 tree result;
4658 enum tree_code code = TREE_CODE (e);
4660 /* We cannot ignore const expressions because it might be a reference
4661 to a const array but whose index contains side-effects. But we can
4662 ignore things that are actual constant or that already have been
4663 handled by this function. */
4665 if (tree_invariant_p (e))
4666 return e;
4668 switch (TREE_CODE_CLASS (code))
4670 case tcc_exceptional:
4671 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4672 have side-effects. */
4673 if (code == STATEMENT_LIST)
4674 return save_expr (e);
4675 /* FALLTHRU */
4676 case tcc_type:
4677 case tcc_declaration:
4678 case tcc_comparison:
4679 case tcc_statement:
4680 case tcc_expression:
4681 case tcc_reference:
4682 case tcc_vl_exp:
4683 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4684 so that it will only be evaluated once. */
4685 /* The reference (r) and comparison (<) classes could be handled as
4686 below, but it is generally faster to only evaluate them once. */
4687 if (TREE_SIDE_EFFECTS (e))
4688 return save_expr (e);
4689 return e;
4691 case tcc_constant:
4692 /* Constants need no processing. In fact, we should never reach
4693 here. */
4694 return e;
4696 case tcc_binary:
4697 /* Division is slow and tends to be compiled with jumps,
4698 especially the division by powers of 2 that is often
4699 found inside of an array reference. So do it just once. */
4700 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4701 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4702 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4703 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4704 return save_expr (e);
4705 /* Recursively stabilize each operand. */
4706 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4707 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4708 break;
4710 case tcc_unary:
4711 /* Recursively stabilize each operand. */
4712 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4713 break;
4715 default:
4716 gcc_unreachable ();
4719 TREE_TYPE (result) = TREE_TYPE (e);
4720 TREE_READONLY (result) = TREE_READONLY (e);
4721 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4722 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4724 return result;
4727 /* Stabilize a reference so that we can use it any number of times
4728 without causing its operands to be evaluated more than once.
4729 Returns the stabilized reference. This works by means of save_expr,
4730 so see the caveats in the comments about save_expr.
4732 Also allows conversion expressions whose operands are references.
4733 Any other kind of expression is returned unchanged. */
4735 tree
4736 stabilize_reference (tree ref)
4738 tree result;
4739 enum tree_code code = TREE_CODE (ref);
4741 switch (code)
4743 case VAR_DECL:
4744 case PARM_DECL:
4745 case RESULT_DECL:
4746 /* No action is needed in this case. */
4747 return ref;
4749 CASE_CONVERT:
4750 case FLOAT_EXPR:
4751 case FIX_TRUNC_EXPR:
4752 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4753 break;
4755 case INDIRECT_REF:
4756 result = build_nt (INDIRECT_REF,
4757 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4758 break;
4760 case COMPONENT_REF:
4761 result = build_nt (COMPONENT_REF,
4762 stabilize_reference (TREE_OPERAND (ref, 0)),
4763 TREE_OPERAND (ref, 1), NULL_TREE);
4764 break;
4766 case BIT_FIELD_REF:
4767 result = build_nt (BIT_FIELD_REF,
4768 stabilize_reference (TREE_OPERAND (ref, 0)),
4769 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4770 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4771 break;
4773 case ARRAY_REF:
4774 result = build_nt (ARRAY_REF,
4775 stabilize_reference (TREE_OPERAND (ref, 0)),
4776 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4777 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4778 break;
4780 case ARRAY_RANGE_REF:
4781 result = build_nt (ARRAY_RANGE_REF,
4782 stabilize_reference (TREE_OPERAND (ref, 0)),
4783 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4784 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4785 break;
4787 case COMPOUND_EXPR:
4788 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4789 it wouldn't be ignored. This matters when dealing with
4790 volatiles. */
4791 return stabilize_reference_1 (ref);
4793 /* If arg isn't a kind of lvalue we recognize, make no change.
4794 Caller should recognize the error for an invalid lvalue. */
4795 default:
4796 return ref;
4798 case ERROR_MARK:
4799 return error_mark_node;
4802 TREE_TYPE (result) = TREE_TYPE (ref);
4803 TREE_READONLY (result) = TREE_READONLY (ref);
4804 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4805 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4806 protected_set_expr_location (result, EXPR_LOCATION (ref));
4808 return result;
4811 /* Low-level constructors for expressions. */
4813 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4814 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4816 void
4817 recompute_tree_invariant_for_addr_expr (tree t)
4819 tree node;
4820 bool tc = true, se = false;
4822 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4824 /* We started out assuming this address is both invariant and constant, but
4825 does not have side effects. Now go down any handled components and see if
4826 any of them involve offsets that are either non-constant or non-invariant.
4827 Also check for side-effects.
4829 ??? Note that this code makes no attempt to deal with the case where
4830 taking the address of something causes a copy due to misalignment. */
4832 #define UPDATE_FLAGS(NODE) \
4833 do { tree _node = (NODE); \
4834 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4835 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4837 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4838 node = TREE_OPERAND (node, 0))
4840 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4841 array reference (probably made temporarily by the G++ front end),
4842 so ignore all the operands. */
4843 if ((TREE_CODE (node) == ARRAY_REF
4844 || TREE_CODE (node) == ARRAY_RANGE_REF)
4845 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4847 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4848 if (TREE_OPERAND (node, 2))
4849 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4850 if (TREE_OPERAND (node, 3))
4851 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4853 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4854 FIELD_DECL, apparently. The G++ front end can put something else
4855 there, at least temporarily. */
4856 else if (TREE_CODE (node) == COMPONENT_REF
4857 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4859 if (TREE_OPERAND (node, 2))
4860 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4864 node = lang_hooks.expr_to_decl (node, &tc, &se);
4866 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4867 the address, since &(*a)->b is a form of addition. If it's a constant, the
4868 address is constant too. If it's a decl, its address is constant if the
4869 decl is static. Everything else is not constant and, furthermore,
4870 taking the address of a volatile variable is not volatile. */
4871 if (TREE_CODE (node) == INDIRECT_REF
4872 || TREE_CODE (node) == MEM_REF)
4873 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4874 else if (CONSTANT_CLASS_P (node))
4876 else if (DECL_P (node))
4877 tc &= (staticp (node) != NULL_TREE);
4878 else
4880 tc = false;
4881 se |= TREE_SIDE_EFFECTS (node);
4885 TREE_CONSTANT (t) = tc;
4886 TREE_SIDE_EFFECTS (t) = se;
4887 #undef UPDATE_FLAGS
4890 /* Build an expression of code CODE, data type TYPE, and operands as
4891 specified. Expressions and reference nodes can be created this way.
4892 Constants, decls, types and misc nodes cannot be.
4894 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4895 enough for all extant tree codes. */
4897 tree
4898 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4900 tree t;
4902 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4904 t = make_node (code PASS_MEM_STAT);
4905 TREE_TYPE (t) = tt;
4907 return t;
4910 tree
4911 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4913 int length = sizeof (struct tree_exp);
4914 tree t;
4916 record_node_allocation_statistics (code, length);
4918 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4920 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4922 memset (t, 0, sizeof (struct tree_common));
4924 TREE_SET_CODE (t, code);
4926 TREE_TYPE (t) = type;
4927 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4928 TREE_OPERAND (t, 0) = node;
4929 if (node && !TYPE_P (node))
4931 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4932 TREE_READONLY (t) = TREE_READONLY (node);
4935 if (TREE_CODE_CLASS (code) == tcc_statement)
4937 if (code != DEBUG_BEGIN_STMT)
4938 TREE_SIDE_EFFECTS (t) = 1;
4940 else switch (code)
4942 case VA_ARG_EXPR:
4943 /* All of these have side-effects, no matter what their
4944 operands are. */
4945 TREE_SIDE_EFFECTS (t) = 1;
4946 TREE_READONLY (t) = 0;
4947 break;
4949 case INDIRECT_REF:
4950 /* Whether a dereference is readonly has nothing to do with whether
4951 its operand is readonly. */
4952 TREE_READONLY (t) = 0;
4953 break;
4955 case ADDR_EXPR:
4956 if (node)
4957 recompute_tree_invariant_for_addr_expr (t);
4958 break;
4960 default:
4961 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4962 && node && !TYPE_P (node)
4963 && TREE_CONSTANT (node))
4964 TREE_CONSTANT (t) = 1;
4965 if (TREE_CODE_CLASS (code) == tcc_reference
4966 && node && TREE_THIS_VOLATILE (node))
4967 TREE_THIS_VOLATILE (t) = 1;
4968 break;
4971 return t;
4974 #define PROCESS_ARG(N) \
4975 do { \
4976 TREE_OPERAND (t, N) = arg##N; \
4977 if (arg##N &&!TYPE_P (arg##N)) \
4979 if (TREE_SIDE_EFFECTS (arg##N)) \
4980 side_effects = 1; \
4981 if (!TREE_READONLY (arg##N) \
4982 && !CONSTANT_CLASS_P (arg##N)) \
4983 (void) (read_only = 0); \
4984 if (!TREE_CONSTANT (arg##N)) \
4985 (void) (constant = 0); \
4987 } while (0)
4989 tree
4990 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4992 bool constant, read_only, side_effects, div_by_zero;
4993 tree t;
4995 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4997 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4998 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4999 /* When sizetype precision doesn't match that of pointers
5000 we need to be able to build explicit extensions or truncations
5001 of the offset argument. */
5002 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
5003 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
5004 && TREE_CODE (arg1) == INTEGER_CST);
5006 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
5007 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
5008 && ptrofftype_p (TREE_TYPE (arg1)));
5010 t = make_node (code PASS_MEM_STAT);
5011 TREE_TYPE (t) = tt;
5013 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
5014 result based on those same flags for the arguments. But if the
5015 arguments aren't really even `tree' expressions, we shouldn't be trying
5016 to do this. */
5018 /* Expressions without side effects may be constant if their
5019 arguments are as well. */
5020 constant = (TREE_CODE_CLASS (code) == tcc_comparison
5021 || TREE_CODE_CLASS (code) == tcc_binary);
5022 read_only = 1;
5023 side_effects = TREE_SIDE_EFFECTS (t);
5025 switch (code)
5027 case TRUNC_DIV_EXPR:
5028 case CEIL_DIV_EXPR:
5029 case FLOOR_DIV_EXPR:
5030 case ROUND_DIV_EXPR:
5031 case EXACT_DIV_EXPR:
5032 case CEIL_MOD_EXPR:
5033 case FLOOR_MOD_EXPR:
5034 case ROUND_MOD_EXPR:
5035 case TRUNC_MOD_EXPR:
5036 div_by_zero = integer_zerop (arg1);
5037 break;
5038 default:
5039 div_by_zero = false;
5042 PROCESS_ARG (0);
5043 PROCESS_ARG (1);
5045 TREE_SIDE_EFFECTS (t) = side_effects;
5046 if (code == MEM_REF)
5048 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5050 tree o = TREE_OPERAND (arg0, 0);
5051 TREE_READONLY (t) = TREE_READONLY (o);
5052 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5055 else
5057 TREE_READONLY (t) = read_only;
5058 /* Don't mark X / 0 as constant. */
5059 TREE_CONSTANT (t) = constant && !div_by_zero;
5060 TREE_THIS_VOLATILE (t)
5061 = (TREE_CODE_CLASS (code) == tcc_reference
5062 && arg0 && TREE_THIS_VOLATILE (arg0));
5065 return t;
5069 tree
5070 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
5071 tree arg2 MEM_STAT_DECL)
5073 bool constant, read_only, side_effects;
5074 tree t;
5076 gcc_assert (TREE_CODE_LENGTH (code) == 3);
5077 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5079 t = make_node (code PASS_MEM_STAT);
5080 TREE_TYPE (t) = tt;
5082 read_only = 1;
5084 /* As a special exception, if COND_EXPR has NULL branches, we
5085 assume that it is a gimple statement and always consider
5086 it to have side effects. */
5087 if (code == COND_EXPR
5088 && tt == void_type_node
5089 && arg1 == NULL_TREE
5090 && arg2 == NULL_TREE)
5091 side_effects = true;
5092 else
5093 side_effects = TREE_SIDE_EFFECTS (t);
5095 PROCESS_ARG (0);
5096 PROCESS_ARG (1);
5097 PROCESS_ARG (2);
5099 if (code == COND_EXPR)
5100 TREE_READONLY (t) = read_only;
5102 TREE_SIDE_EFFECTS (t) = side_effects;
5103 TREE_THIS_VOLATILE (t)
5104 = (TREE_CODE_CLASS (code) == tcc_reference
5105 && arg0 && TREE_THIS_VOLATILE (arg0));
5107 return t;
5110 tree
5111 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
5112 tree arg2, tree arg3 MEM_STAT_DECL)
5114 bool constant, read_only, side_effects;
5115 tree t;
5117 gcc_assert (TREE_CODE_LENGTH (code) == 4);
5119 t = make_node (code PASS_MEM_STAT);
5120 TREE_TYPE (t) = tt;
5122 side_effects = TREE_SIDE_EFFECTS (t);
5124 PROCESS_ARG (0);
5125 PROCESS_ARG (1);
5126 PROCESS_ARG (2);
5127 PROCESS_ARG (3);
5129 TREE_SIDE_EFFECTS (t) = side_effects;
5130 TREE_THIS_VOLATILE (t)
5131 = (TREE_CODE_CLASS (code) == tcc_reference
5132 && arg0 && TREE_THIS_VOLATILE (arg0));
5134 return t;
5137 tree
5138 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
5139 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
5141 bool constant, read_only, side_effects;
5142 tree t;
5144 gcc_assert (TREE_CODE_LENGTH (code) == 5);
5146 t = make_node (code PASS_MEM_STAT);
5147 TREE_TYPE (t) = tt;
5149 side_effects = TREE_SIDE_EFFECTS (t);
5151 PROCESS_ARG (0);
5152 PROCESS_ARG (1);
5153 PROCESS_ARG (2);
5154 PROCESS_ARG (3);
5155 PROCESS_ARG (4);
5157 TREE_SIDE_EFFECTS (t) = side_effects;
5158 if (code == TARGET_MEM_REF)
5160 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5162 tree o = TREE_OPERAND (arg0, 0);
5163 TREE_READONLY (t) = TREE_READONLY (o);
5164 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5167 else
5168 TREE_THIS_VOLATILE (t)
5169 = (TREE_CODE_CLASS (code) == tcc_reference
5170 && arg0 && TREE_THIS_VOLATILE (arg0));
5172 return t;
5175 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
5176 on the pointer PTR. */
5178 tree
5179 build_simple_mem_ref_loc (location_t loc, tree ptr)
5181 poly_int64 offset = 0;
5182 tree ptype = TREE_TYPE (ptr);
5183 tree tem;
5184 /* For convenience allow addresses that collapse to a simple base
5185 and offset. */
5186 if (TREE_CODE (ptr) == ADDR_EXPR
5187 && (handled_component_p (TREE_OPERAND (ptr, 0))
5188 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
5190 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
5191 gcc_assert (ptr);
5192 if (TREE_CODE (ptr) == MEM_REF)
5194 offset += mem_ref_offset (ptr).force_shwi ();
5195 ptr = TREE_OPERAND (ptr, 0);
5197 else
5198 ptr = build_fold_addr_expr (ptr);
5199 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
5201 tem = build2 (MEM_REF, TREE_TYPE (ptype),
5202 ptr, build_int_cst (ptype, offset));
5203 SET_EXPR_LOCATION (tem, loc);
5204 return tem;
5207 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
5209 poly_offset_int
5210 mem_ref_offset (const_tree t)
5212 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
5213 SIGNED);
5216 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
5217 offsetted by OFFSET units. */
5219 tree
5220 build_invariant_address (tree type, tree base, poly_int64 offset)
5222 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
5223 build_fold_addr_expr (base),
5224 build_int_cst (ptr_type_node, offset));
5225 tree addr = build1 (ADDR_EXPR, type, ref);
5226 recompute_tree_invariant_for_addr_expr (addr);
5227 return addr;
5230 /* Similar except don't specify the TREE_TYPE
5231 and leave the TREE_SIDE_EFFECTS as 0.
5232 It is permissible for arguments to be null,
5233 or even garbage if their values do not matter. */
5235 tree
5236 build_nt (enum tree_code code, ...)
5238 tree t;
5239 int length;
5240 int i;
5241 va_list p;
5243 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5245 va_start (p, code);
5247 t = make_node (code);
5248 length = TREE_CODE_LENGTH (code);
5250 for (i = 0; i < length; i++)
5251 TREE_OPERAND (t, i) = va_arg (p, tree);
5253 va_end (p);
5254 return t;
5257 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5258 tree vec. */
5260 tree
5261 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
5263 tree ret, t;
5264 unsigned int ix;
5266 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
5267 CALL_EXPR_FN (ret) = fn;
5268 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
5269 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
5270 CALL_EXPR_ARG (ret, ix) = t;
5271 return ret;
5274 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5275 and data type TYPE.
5276 We do NOT enter this node in any sort of symbol table.
5278 LOC is the location of the decl.
5280 layout_decl is used to set up the decl's storage layout.
5281 Other slots are initialized to 0 or null pointers. */
5283 tree
5284 build_decl (location_t loc, enum tree_code code, tree name,
5285 tree type MEM_STAT_DECL)
5287 tree t;
5289 t = make_node (code PASS_MEM_STAT);
5290 DECL_SOURCE_LOCATION (t) = loc;
5292 /* if (type == error_mark_node)
5293 type = integer_type_node; */
5294 /* That is not done, deliberately, so that having error_mark_node
5295 as the type can suppress useless errors in the use of this variable. */
5297 DECL_NAME (t) = name;
5298 TREE_TYPE (t) = type;
5300 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
5301 layout_decl (t, 0);
5303 return t;
5306 /* Create and return a DEBUG_EXPR_DECL node of the given TYPE. */
5308 tree
5309 build_debug_expr_decl (tree type)
5311 tree vexpr = make_node (DEBUG_EXPR_DECL);
5312 DECL_ARTIFICIAL (vexpr) = 1;
5313 TREE_TYPE (vexpr) = type;
5314 SET_DECL_MODE (vexpr, TYPE_MODE (type));
5315 return vexpr;
5318 /* Builds and returns function declaration with NAME and TYPE. */
5320 tree
5321 build_fn_decl (const char *name, tree type)
5323 tree id = get_identifier (name);
5324 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
5326 DECL_EXTERNAL (decl) = 1;
5327 TREE_PUBLIC (decl) = 1;
5328 DECL_ARTIFICIAL (decl) = 1;
5329 TREE_NOTHROW (decl) = 1;
5331 return decl;
5334 vec<tree, va_gc> *all_translation_units;
5336 /* Builds a new translation-unit decl with name NAME, queues it in the
5337 global list of translation-unit decls and returns it. */
5339 tree
5340 build_translation_unit_decl (tree name)
5342 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5343 name, NULL_TREE);
5344 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5345 vec_safe_push (all_translation_units, tu);
5346 return tu;
5350 /* BLOCK nodes are used to represent the structure of binding contours
5351 and declarations, once those contours have been exited and their contents
5352 compiled. This information is used for outputting debugging info. */
5354 tree
5355 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5357 tree block = make_node (BLOCK);
5359 BLOCK_VARS (block) = vars;
5360 BLOCK_SUBBLOCKS (block) = subblocks;
5361 BLOCK_SUPERCONTEXT (block) = supercontext;
5362 BLOCK_CHAIN (block) = chain;
5363 return block;
5367 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5369 LOC is the location to use in tree T. */
5371 void
5372 protected_set_expr_location (tree t, location_t loc)
5374 if (CAN_HAVE_LOCATION_P (t))
5375 SET_EXPR_LOCATION (t, loc);
5376 else if (t && TREE_CODE (t) == STATEMENT_LIST)
5378 t = expr_single (t);
5379 if (t && CAN_HAVE_LOCATION_P (t))
5380 SET_EXPR_LOCATION (t, loc);
5384 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
5385 UNKNOWN_LOCATION. */
5387 void
5388 protected_set_expr_location_if_unset (tree t, location_t loc)
5390 t = expr_single (t);
5391 if (t && !EXPR_HAS_LOCATION (t))
5392 protected_set_expr_location (t, loc);
5395 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5396 of the various TYPE_QUAL values. */
5398 static void
5399 set_type_quals (tree type, int type_quals)
5401 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
5402 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
5403 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
5404 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
5405 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
5408 /* Returns true iff CAND and BASE have equivalent language-specific
5409 qualifiers. */
5411 bool
5412 check_lang_type (const_tree cand, const_tree base)
5414 if (lang_hooks.types.type_hash_eq == NULL)
5415 return true;
5416 /* type_hash_eq currently only applies to these types. */
5417 if (TREE_CODE (cand) != FUNCTION_TYPE
5418 && TREE_CODE (cand) != METHOD_TYPE)
5419 return true;
5420 return lang_hooks.types.type_hash_eq (cand, base);
5423 /* This function checks to see if TYPE matches the size one of the built-in
5424 atomic types, and returns that core atomic type. */
5426 static tree
5427 find_atomic_core_type (const_tree type)
5429 tree base_atomic_type;
5431 /* Only handle complete types. */
5432 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
5433 return NULL_TREE;
5435 switch (tree_to_uhwi (TYPE_SIZE (type)))
5437 case 8:
5438 base_atomic_type = atomicQI_type_node;
5439 break;
5441 case 16:
5442 base_atomic_type = atomicHI_type_node;
5443 break;
5445 case 32:
5446 base_atomic_type = atomicSI_type_node;
5447 break;
5449 case 64:
5450 base_atomic_type = atomicDI_type_node;
5451 break;
5453 case 128:
5454 base_atomic_type = atomicTI_type_node;
5455 break;
5457 default:
5458 base_atomic_type = NULL_TREE;
5461 return base_atomic_type;
5464 /* Returns true iff unqualified CAND and BASE are equivalent. */
5466 bool
5467 check_base_type (const_tree cand, const_tree base)
5469 if (TYPE_NAME (cand) != TYPE_NAME (base)
5470 /* Apparently this is needed for Objective-C. */
5471 || TYPE_CONTEXT (cand) != TYPE_CONTEXT (base)
5472 || !attribute_list_equal (TYPE_ATTRIBUTES (cand),
5473 TYPE_ATTRIBUTES (base)))
5474 return false;
5475 /* Check alignment. */
5476 if (TYPE_ALIGN (cand) == TYPE_ALIGN (base)
5477 && TYPE_USER_ALIGN (cand) == TYPE_USER_ALIGN (base))
5478 return true;
5479 /* Atomic types increase minimal alignment. We must to do so as well
5480 or we get duplicated canonical types. See PR88686. */
5481 if ((TYPE_QUALS (cand) & TYPE_QUAL_ATOMIC))
5483 /* See if this object can map to a basic atomic type. */
5484 tree atomic_type = find_atomic_core_type (cand);
5485 if (atomic_type && TYPE_ALIGN (atomic_type) == TYPE_ALIGN (cand))
5486 return true;
5488 return false;
5491 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5493 bool
5494 check_qualified_type (const_tree cand, const_tree base, int type_quals)
5496 return (TYPE_QUALS (cand) == type_quals
5497 && check_base_type (cand, base)
5498 && check_lang_type (cand, base));
5501 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
5503 static bool
5504 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
5506 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
5507 && TYPE_NAME (cand) == TYPE_NAME (base)
5508 /* Apparently this is needed for Objective-C. */
5509 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5510 /* Check alignment. */
5511 && TYPE_ALIGN (cand) == align
5512 /* Check this is a user-aligned type as build_aligned_type
5513 would create. */
5514 && TYPE_USER_ALIGN (cand)
5515 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5516 TYPE_ATTRIBUTES (base))
5517 && check_lang_type (cand, base));
5520 /* Return a version of the TYPE, qualified as indicated by the
5521 TYPE_QUALS, if one exists. If no qualified version exists yet,
5522 return NULL_TREE. */
5524 tree
5525 get_qualified_type (tree type, int type_quals)
5527 if (TYPE_QUALS (type) == type_quals)
5528 return type;
5530 tree mv = TYPE_MAIN_VARIANT (type);
5531 if (check_qualified_type (mv, type, type_quals))
5532 return mv;
5534 /* Search the chain of variants to see if there is already one there just
5535 like the one we need to have. If so, use that existing one. We must
5536 preserve the TYPE_NAME, since there is code that depends on this. */
5537 for (tree *tp = &TYPE_NEXT_VARIANT (mv); *tp; tp = &TYPE_NEXT_VARIANT (*tp))
5538 if (check_qualified_type (*tp, type, type_quals))
5540 /* Put the found variant at the head of the variant list so
5541 frequently searched variants get found faster. The C++ FE
5542 benefits greatly from this. */
5543 tree t = *tp;
5544 *tp = TYPE_NEXT_VARIANT (t);
5545 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
5546 TYPE_NEXT_VARIANT (mv) = t;
5547 return t;
5550 return NULL_TREE;
5553 /* Like get_qualified_type, but creates the type if it does not
5554 exist. This function never returns NULL_TREE. */
5556 tree
5557 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
5559 tree t;
5561 /* See if we already have the appropriate qualified variant. */
5562 t = get_qualified_type (type, type_quals);
5564 /* If not, build it. */
5565 if (!t)
5567 t = build_variant_type_copy (type PASS_MEM_STAT);
5568 set_type_quals (t, type_quals);
5570 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
5572 /* See if this object can map to a basic atomic type. */
5573 tree atomic_type = find_atomic_core_type (type);
5574 if (atomic_type)
5576 /* Ensure the alignment of this type is compatible with
5577 the required alignment of the atomic type. */
5578 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
5579 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
5583 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5584 /* Propagate structural equality. */
5585 SET_TYPE_STRUCTURAL_EQUALITY (t);
5586 else if (TYPE_CANONICAL (type) != type)
5587 /* Build the underlying canonical type, since it is different
5588 from TYPE. */
5590 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
5591 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
5593 else
5594 /* T is its own canonical type. */
5595 TYPE_CANONICAL (t) = t;
5599 return t;
5602 /* Create a variant of type T with alignment ALIGN. */
5604 tree
5605 build_aligned_type (tree type, unsigned int align)
5607 tree t;
5609 if (TYPE_PACKED (type)
5610 || TYPE_ALIGN (type) == align)
5611 return type;
5613 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5614 if (check_aligned_type (t, type, align))
5615 return t;
5617 t = build_variant_type_copy (type);
5618 SET_TYPE_ALIGN (t, align);
5619 TYPE_USER_ALIGN (t) = 1;
5621 return t;
5624 /* Create a new distinct copy of TYPE. The new type is made its own
5625 MAIN_VARIANT. If TYPE requires structural equality checks, the
5626 resulting type requires structural equality checks; otherwise, its
5627 TYPE_CANONICAL points to itself. */
5629 tree
5630 build_distinct_type_copy (tree type MEM_STAT_DECL)
5632 tree t = copy_node (type PASS_MEM_STAT);
5634 TYPE_POINTER_TO (t) = 0;
5635 TYPE_REFERENCE_TO (t) = 0;
5637 /* Set the canonical type either to a new equivalence class, or
5638 propagate the need for structural equality checks. */
5639 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5640 SET_TYPE_STRUCTURAL_EQUALITY (t);
5641 else
5642 TYPE_CANONICAL (t) = t;
5644 /* Make it its own variant. */
5645 TYPE_MAIN_VARIANT (t) = t;
5646 TYPE_NEXT_VARIANT (t) = 0;
5648 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
5649 whose TREE_TYPE is not t. This can also happen in the Ada
5650 frontend when using subtypes. */
5652 return t;
5655 /* Create a new variant of TYPE, equivalent but distinct. This is so
5656 the caller can modify it. TYPE_CANONICAL for the return type will
5657 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
5658 are considered equal by the language itself (or that both types
5659 require structural equality checks). */
5661 tree
5662 build_variant_type_copy (tree type MEM_STAT_DECL)
5664 tree t, m = TYPE_MAIN_VARIANT (type);
5666 t = build_distinct_type_copy (type PASS_MEM_STAT);
5668 /* Since we're building a variant, assume that it is a non-semantic
5669 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
5670 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
5671 /* Type variants have no alias set defined. */
5672 TYPE_ALIAS_SET (t) = -1;
5674 /* Add the new type to the chain of variants of TYPE. */
5675 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
5676 TYPE_NEXT_VARIANT (m) = t;
5677 TYPE_MAIN_VARIANT (t) = m;
5679 return t;
5682 /* Return true if the from tree in both tree maps are equal. */
5685 tree_map_base_eq (const void *va, const void *vb)
5687 const struct tree_map_base *const a = (const struct tree_map_base *) va,
5688 *const b = (const struct tree_map_base *) vb;
5689 return (a->from == b->from);
5692 /* Hash a from tree in a tree_base_map. */
5694 unsigned int
5695 tree_map_base_hash (const void *item)
5697 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
5700 /* Return true if this tree map structure is marked for garbage collection
5701 purposes. We simply return true if the from tree is marked, so that this
5702 structure goes away when the from tree goes away. */
5705 tree_map_base_marked_p (const void *p)
5707 return ggc_marked_p (((const struct tree_map_base *) p)->from);
5710 /* Hash a from tree in a tree_map. */
5712 unsigned int
5713 tree_map_hash (const void *item)
5715 return (((const struct tree_map *) item)->hash);
5718 /* Hash a from tree in a tree_decl_map. */
5720 unsigned int
5721 tree_decl_map_hash (const void *item)
5723 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
5726 /* Return the initialization priority for DECL. */
5728 priority_type
5729 decl_init_priority_lookup (tree decl)
5731 symtab_node *snode = symtab_node::get (decl);
5733 if (!snode)
5734 return DEFAULT_INIT_PRIORITY;
5735 return
5736 snode->get_init_priority ();
5739 /* Return the finalization priority for DECL. */
5741 priority_type
5742 decl_fini_priority_lookup (tree decl)
5744 cgraph_node *node = cgraph_node::get (decl);
5746 if (!node)
5747 return DEFAULT_INIT_PRIORITY;
5748 return
5749 node->get_fini_priority ();
5752 /* Set the initialization priority for DECL to PRIORITY. */
5754 void
5755 decl_init_priority_insert (tree decl, priority_type priority)
5757 struct symtab_node *snode;
5759 if (priority == DEFAULT_INIT_PRIORITY)
5761 snode = symtab_node::get (decl);
5762 if (!snode)
5763 return;
5765 else if (VAR_P (decl))
5766 snode = varpool_node::get_create (decl);
5767 else
5768 snode = cgraph_node::get_create (decl);
5769 snode->set_init_priority (priority);
5772 /* Set the finalization priority for DECL to PRIORITY. */
5774 void
5775 decl_fini_priority_insert (tree decl, priority_type priority)
5777 struct cgraph_node *node;
5779 if (priority == DEFAULT_INIT_PRIORITY)
5781 node = cgraph_node::get (decl);
5782 if (!node)
5783 return;
5785 else
5786 node = cgraph_node::get_create (decl);
5787 node->set_fini_priority (priority);
5790 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
5792 static void
5793 print_debug_expr_statistics (void)
5795 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
5796 (long) debug_expr_for_decl->size (),
5797 (long) debug_expr_for_decl->elements (),
5798 debug_expr_for_decl->collisions ());
5801 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
5803 static void
5804 print_value_expr_statistics (void)
5806 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
5807 (long) value_expr_for_decl->size (),
5808 (long) value_expr_for_decl->elements (),
5809 value_expr_for_decl->collisions ());
5812 /* Lookup a debug expression for FROM, and return it if we find one. */
5814 tree
5815 decl_debug_expr_lookup (tree from)
5817 struct tree_decl_map *h, in;
5818 in.base.from = from;
5820 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5821 if (h)
5822 return h->to;
5823 return NULL_TREE;
5826 /* Insert a mapping FROM->TO in the debug expression hashtable. */
5828 void
5829 decl_debug_expr_insert (tree from, tree to)
5831 struct tree_decl_map *h;
5833 h = ggc_alloc<tree_decl_map> ();
5834 h->base.from = from;
5835 h->to = to;
5836 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5839 /* Lookup a value expression for FROM, and return it if we find one. */
5841 tree
5842 decl_value_expr_lookup (tree from)
5844 struct tree_decl_map *h, in;
5845 in.base.from = from;
5847 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5848 if (h)
5849 return h->to;
5850 return NULL_TREE;
5853 /* Insert a mapping FROM->TO in the value expression hashtable. */
5855 void
5856 decl_value_expr_insert (tree from, tree to)
5858 struct tree_decl_map *h;
5860 h = ggc_alloc<tree_decl_map> ();
5861 h->base.from = from;
5862 h->to = to;
5863 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5866 /* Lookup a vector of debug arguments for FROM, and return it if we
5867 find one. */
5869 vec<tree, va_gc> **
5870 decl_debug_args_lookup (tree from)
5872 struct tree_vec_map *h, in;
5874 if (!DECL_HAS_DEBUG_ARGS_P (from))
5875 return NULL;
5876 gcc_checking_assert (debug_args_for_decl != NULL);
5877 in.base.from = from;
5878 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
5879 if (h)
5880 return &h->to;
5881 return NULL;
5884 /* Insert a mapping FROM->empty vector of debug arguments in the value
5885 expression hashtable. */
5887 vec<tree, va_gc> **
5888 decl_debug_args_insert (tree from)
5890 struct tree_vec_map *h;
5891 tree_vec_map **loc;
5893 if (DECL_HAS_DEBUG_ARGS_P (from))
5894 return decl_debug_args_lookup (from);
5895 if (debug_args_for_decl == NULL)
5896 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
5897 h = ggc_alloc<tree_vec_map> ();
5898 h->base.from = from;
5899 h->to = NULL;
5900 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
5901 *loc = h;
5902 DECL_HAS_DEBUG_ARGS_P (from) = 1;
5903 return &h->to;
5906 /* Hashing of types so that we don't make duplicates.
5907 The entry point is `type_hash_canon'. */
5909 /* Generate the default hash code for TYPE. This is designed for
5910 speed, rather than maximum entropy. */
5912 hashval_t
5913 type_hash_canon_hash (tree type)
5915 inchash::hash hstate;
5917 hstate.add_int (TREE_CODE (type));
5919 if (TREE_TYPE (type))
5920 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
5922 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
5923 /* Just the identifier is adequate to distinguish. */
5924 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
5926 switch (TREE_CODE (type))
5928 case METHOD_TYPE:
5929 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
5930 /* FALLTHROUGH. */
5931 case FUNCTION_TYPE:
5932 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
5933 if (TREE_VALUE (t) != error_mark_node)
5934 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
5935 break;
5937 case OFFSET_TYPE:
5938 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
5939 break;
5941 case ARRAY_TYPE:
5943 if (TYPE_DOMAIN (type))
5944 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
5945 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
5947 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
5948 hstate.add_object (typeless);
5951 break;
5953 case INTEGER_TYPE:
5955 tree t = TYPE_MAX_VALUE (type);
5956 if (!t)
5957 t = TYPE_MIN_VALUE (type);
5958 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
5959 hstate.add_object (TREE_INT_CST_ELT (t, i));
5960 break;
5963 case REAL_TYPE:
5964 case FIXED_POINT_TYPE:
5966 unsigned prec = TYPE_PRECISION (type);
5967 hstate.add_object (prec);
5968 break;
5971 case VECTOR_TYPE:
5972 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
5973 break;
5975 default:
5976 break;
5979 return hstate.end ();
5982 /* These are the Hashtable callback functions. */
5984 /* Returns true iff the types are equivalent. */
5986 bool
5987 type_cache_hasher::equal (type_hash *a, type_hash *b)
5989 /* First test the things that are the same for all types. */
5990 if (a->hash != b->hash
5991 || TREE_CODE (a->type) != TREE_CODE (b->type)
5992 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
5993 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
5994 TYPE_ATTRIBUTES (b->type))
5995 || (TREE_CODE (a->type) != COMPLEX_TYPE
5996 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
5997 return 0;
5999 /* Be careful about comparing arrays before and after the element type
6000 has been completed; don't compare TYPE_ALIGN unless both types are
6001 complete. */
6002 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6003 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6004 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6005 return 0;
6007 switch (TREE_CODE (a->type))
6009 case VOID_TYPE:
6010 case OPAQUE_TYPE:
6011 case COMPLEX_TYPE:
6012 case POINTER_TYPE:
6013 case REFERENCE_TYPE:
6014 case NULLPTR_TYPE:
6015 return 1;
6017 case VECTOR_TYPE:
6018 return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
6019 TYPE_VECTOR_SUBPARTS (b->type));
6021 case ENUMERAL_TYPE:
6022 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6023 && !(TYPE_VALUES (a->type)
6024 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6025 && TYPE_VALUES (b->type)
6026 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6027 && type_list_equal (TYPE_VALUES (a->type),
6028 TYPE_VALUES (b->type))))
6029 return 0;
6031 /* fall through */
6033 case INTEGER_TYPE:
6034 case REAL_TYPE:
6035 case BOOLEAN_TYPE:
6036 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6037 return false;
6038 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6039 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6040 TYPE_MAX_VALUE (b->type)))
6041 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6042 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6043 TYPE_MIN_VALUE (b->type))));
6045 case FIXED_POINT_TYPE:
6046 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6048 case OFFSET_TYPE:
6049 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6051 case METHOD_TYPE:
6052 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6053 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6054 || (TYPE_ARG_TYPES (a->type)
6055 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6056 && TYPE_ARG_TYPES (b->type)
6057 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6058 && type_list_equal (TYPE_ARG_TYPES (a->type),
6059 TYPE_ARG_TYPES (b->type)))))
6060 break;
6061 return 0;
6062 case ARRAY_TYPE:
6063 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6064 where the flag should be inherited from the element type
6065 and can change after ARRAY_TYPEs are created; on non-aggregates
6066 compare it and hash it, scalars will never have that flag set
6067 and we need to differentiate between arrays created by different
6068 front-ends or middle-end created arrays. */
6069 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
6070 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
6071 || (TYPE_TYPELESS_STORAGE (a->type)
6072 == TYPE_TYPELESS_STORAGE (b->type))));
6074 case RECORD_TYPE:
6075 case UNION_TYPE:
6076 case QUAL_UNION_TYPE:
6077 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6078 || (TYPE_FIELDS (a->type)
6079 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6080 && TYPE_FIELDS (b->type)
6081 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6082 && type_list_equal (TYPE_FIELDS (a->type),
6083 TYPE_FIELDS (b->type))));
6085 case FUNCTION_TYPE:
6086 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6087 || (TYPE_ARG_TYPES (a->type)
6088 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6089 && TYPE_ARG_TYPES (b->type)
6090 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6091 && type_list_equal (TYPE_ARG_TYPES (a->type),
6092 TYPE_ARG_TYPES (b->type))))
6093 break;
6094 return 0;
6096 default:
6097 return 0;
6100 if (lang_hooks.types.type_hash_eq != NULL)
6101 return lang_hooks.types.type_hash_eq (a->type, b->type);
6103 return 1;
6106 /* Given TYPE, and HASHCODE its hash code, return the canonical
6107 object for an identical type if one already exists.
6108 Otherwise, return TYPE, and record it as the canonical object.
6110 To use this function, first create a type of the sort you want.
6111 Then compute its hash code from the fields of the type that
6112 make it different from other similar types.
6113 Then call this function and use the value. */
6115 tree
6116 type_hash_canon (unsigned int hashcode, tree type)
6118 type_hash in;
6119 type_hash **loc;
6121 /* The hash table only contains main variants, so ensure that's what we're
6122 being passed. */
6123 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6125 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6126 must call that routine before comparing TYPE_ALIGNs. */
6127 layout_type (type);
6129 in.hash = hashcode;
6130 in.type = type;
6132 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6133 if (*loc)
6135 tree t1 = ((type_hash *) *loc)->type;
6136 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
6137 && t1 != type);
6138 if (TYPE_UID (type) + 1 == next_type_uid)
6139 --next_type_uid;
6140 /* Free also min/max values and the cache for integer
6141 types. This can't be done in free_node, as LTO frees
6142 those on its own. */
6143 if (TREE_CODE (type) == INTEGER_TYPE)
6145 if (TYPE_MIN_VALUE (type)
6146 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
6148 /* Zero is always in TYPE_CACHED_VALUES. */
6149 if (! TYPE_UNSIGNED (type))
6150 int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
6151 ggc_free (TYPE_MIN_VALUE (type));
6153 if (TYPE_MAX_VALUE (type)
6154 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
6156 int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
6157 ggc_free (TYPE_MAX_VALUE (type));
6159 if (TYPE_CACHED_VALUES_P (type))
6160 ggc_free (TYPE_CACHED_VALUES (type));
6162 free_node (type);
6163 return t1;
6165 else
6167 struct type_hash *h;
6169 h = ggc_alloc<type_hash> ();
6170 h->hash = hashcode;
6171 h->type = type;
6172 *loc = h;
6174 return type;
6178 static void
6179 print_type_hash_statistics (void)
6181 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6182 (long) type_hash_table->size (),
6183 (long) type_hash_table->elements (),
6184 type_hash_table->collisions ());
6187 /* Given two lists of types
6188 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6189 return 1 if the lists contain the same types in the same order.
6190 Also, the TREE_PURPOSEs must match. */
6192 bool
6193 type_list_equal (const_tree l1, const_tree l2)
6195 const_tree t1, t2;
6197 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6198 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6199 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6200 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6201 && (TREE_TYPE (TREE_PURPOSE (t1))
6202 == TREE_TYPE (TREE_PURPOSE (t2))))))
6203 return false;
6205 return t1 == t2;
6208 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6209 given by TYPE. If the argument list accepts variable arguments,
6210 then this function counts only the ordinary arguments. */
6213 type_num_arguments (const_tree fntype)
6215 int i = 0;
6217 for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
6218 /* If the function does not take a variable number of arguments,
6219 the last element in the list will have type `void'. */
6220 if (VOID_TYPE_P (TREE_VALUE (t)))
6221 break;
6222 else
6223 ++i;
6225 return i;
6228 /* Return the type of the function TYPE's argument ARGNO if known.
6229 For vararg function's where ARGNO refers to one of the variadic
6230 arguments return null. Otherwise, return a void_type_node for
6231 out-of-bounds ARGNO. */
6233 tree
6234 type_argument_type (const_tree fntype, unsigned argno)
6236 /* Treat zero the same as an out-of-bounds argument number. */
6237 if (!argno)
6238 return void_type_node;
6240 function_args_iterator iter;
6242 tree argtype;
6243 unsigned i = 1;
6244 FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
6246 /* A vararg function's argument list ends in a null. Otherwise,
6247 an ordinary function's argument list ends with void. Return
6248 null if ARGNO refers to a vararg argument, void_type_node if
6249 it's out of bounds, and the formal argument type otherwise. */
6250 if (!argtype)
6251 break;
6253 if (i == argno || VOID_TYPE_P (argtype))
6254 return argtype;
6256 ++i;
6259 return NULL_TREE;
6262 /* Nonzero if integer constants T1 and T2
6263 represent the same constant value. */
6266 tree_int_cst_equal (const_tree t1, const_tree t2)
6268 if (t1 == t2)
6269 return 1;
6271 if (t1 == 0 || t2 == 0)
6272 return 0;
6274 STRIP_ANY_LOCATION_WRAPPER (t1);
6275 STRIP_ANY_LOCATION_WRAPPER (t2);
6277 if (TREE_CODE (t1) == INTEGER_CST
6278 && TREE_CODE (t2) == INTEGER_CST
6279 && wi::to_widest (t1) == wi::to_widest (t2))
6280 return 1;
6282 return 0;
6285 /* Return true if T is an INTEGER_CST whose numerical value (extended
6286 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6288 bool
6289 tree_fits_shwi_p (const_tree t)
6291 return (t != NULL_TREE
6292 && TREE_CODE (t) == INTEGER_CST
6293 && wi::fits_shwi_p (wi::to_widest (t)));
6296 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6297 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
6299 bool
6300 tree_fits_poly_int64_p (const_tree t)
6302 if (t == NULL_TREE)
6303 return false;
6304 if (POLY_INT_CST_P (t))
6306 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6307 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
6308 return false;
6309 return true;
6311 return (TREE_CODE (t) == INTEGER_CST
6312 && wi::fits_shwi_p (wi::to_widest (t)));
6315 /* Return true if T is an INTEGER_CST whose numerical value (extended
6316 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6318 bool
6319 tree_fits_uhwi_p (const_tree t)
6321 return (t != NULL_TREE
6322 && TREE_CODE (t) == INTEGER_CST
6323 && wi::fits_uhwi_p (wi::to_widest (t)));
6326 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6327 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
6329 bool
6330 tree_fits_poly_uint64_p (const_tree t)
6332 if (t == NULL_TREE)
6333 return false;
6334 if (POLY_INT_CST_P (t))
6336 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6337 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
6338 return false;
6339 return true;
6341 return (TREE_CODE (t) == INTEGER_CST
6342 && wi::fits_uhwi_p (wi::to_widest (t)));
6345 /* T is an INTEGER_CST whose numerical value (extended according to
6346 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6347 HOST_WIDE_INT. */
6349 HOST_WIDE_INT
6350 tree_to_shwi (const_tree t)
6352 gcc_assert (tree_fits_shwi_p (t));
6353 return TREE_INT_CST_LOW (t);
6356 /* T is an INTEGER_CST whose numerical value (extended according to
6357 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6358 HOST_WIDE_INT. */
6360 unsigned HOST_WIDE_INT
6361 tree_to_uhwi (const_tree t)
6363 gcc_assert (tree_fits_uhwi_p (t));
6364 return TREE_INT_CST_LOW (t);
6367 /* Return the most significant (sign) bit of T. */
6370 tree_int_cst_sign_bit (const_tree t)
6372 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
6374 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
6377 /* Return an indication of the sign of the integer constant T.
6378 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6379 Note that -1 will never be returned if T's type is unsigned. */
6382 tree_int_cst_sgn (const_tree t)
6384 if (wi::to_wide (t) == 0)
6385 return 0;
6386 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
6387 return 1;
6388 else if (wi::neg_p (wi::to_wide (t)))
6389 return -1;
6390 else
6391 return 1;
6394 /* Return the minimum number of bits needed to represent VALUE in a
6395 signed or unsigned type, UNSIGNEDP says which. */
6397 unsigned int
6398 tree_int_cst_min_precision (tree value, signop sgn)
6400 /* If the value is negative, compute its negative minus 1. The latter
6401 adjustment is because the absolute value of the largest negative value
6402 is one larger than the largest positive value. This is equivalent to
6403 a bit-wise negation, so use that operation instead. */
6405 if (tree_int_cst_sgn (value) < 0)
6406 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
6408 /* Return the number of bits needed, taking into account the fact
6409 that we need one more bit for a signed than unsigned type.
6410 If value is 0 or -1, the minimum precision is 1 no matter
6411 whether unsignedp is true or false. */
6413 if (integer_zerop (value))
6414 return 1;
6415 else
6416 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
6419 /* Return truthvalue of whether T1 is the same tree structure as T2.
6420 Return 1 if they are the same.
6421 Return 0 if they are understandably different.
6422 Return -1 if either contains tree structure not understood by
6423 this function. */
6426 simple_cst_equal (const_tree t1, const_tree t2)
6428 enum tree_code code1, code2;
6429 int cmp;
6430 int i;
6432 if (t1 == t2)
6433 return 1;
6434 if (t1 == 0 || t2 == 0)
6435 return 0;
6437 /* For location wrappers to be the same, they must be at the same
6438 source location (and wrap the same thing). */
6439 if (location_wrapper_p (t1) && location_wrapper_p (t2))
6441 if (EXPR_LOCATION (t1) != EXPR_LOCATION (t2))
6442 return 0;
6443 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6446 code1 = TREE_CODE (t1);
6447 code2 = TREE_CODE (t2);
6449 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
6451 if (CONVERT_EXPR_CODE_P (code2)
6452 || code2 == NON_LVALUE_EXPR)
6453 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6454 else
6455 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
6458 else if (CONVERT_EXPR_CODE_P (code2)
6459 || code2 == NON_LVALUE_EXPR)
6460 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
6462 if (code1 != code2)
6463 return 0;
6465 switch (code1)
6467 case INTEGER_CST:
6468 return wi::to_widest (t1) == wi::to_widest (t2);
6470 case REAL_CST:
6471 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
6473 case FIXED_CST:
6474 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
6476 case STRING_CST:
6477 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
6478 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
6479 TREE_STRING_LENGTH (t1)));
6481 case CONSTRUCTOR:
6483 unsigned HOST_WIDE_INT idx;
6484 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
6485 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
6487 if (vec_safe_length (v1) != vec_safe_length (v2))
6488 return false;
6490 for (idx = 0; idx < vec_safe_length (v1); ++idx)
6491 /* ??? Should we handle also fields here? */
6492 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
6493 return false;
6494 return true;
6497 case SAVE_EXPR:
6498 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6500 case CALL_EXPR:
6501 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
6502 if (cmp <= 0)
6503 return cmp;
6504 if (call_expr_nargs (t1) != call_expr_nargs (t2))
6505 return 0;
6507 const_tree arg1, arg2;
6508 const_call_expr_arg_iterator iter1, iter2;
6509 for (arg1 = first_const_call_expr_arg (t1, &iter1),
6510 arg2 = first_const_call_expr_arg (t2, &iter2);
6511 arg1 && arg2;
6512 arg1 = next_const_call_expr_arg (&iter1),
6513 arg2 = next_const_call_expr_arg (&iter2))
6515 cmp = simple_cst_equal (arg1, arg2);
6516 if (cmp <= 0)
6517 return cmp;
6519 return arg1 == arg2;
6522 case TARGET_EXPR:
6523 /* Special case: if either target is an unallocated VAR_DECL,
6524 it means that it's going to be unified with whatever the
6525 TARGET_EXPR is really supposed to initialize, so treat it
6526 as being equivalent to anything. */
6527 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
6528 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
6529 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
6530 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
6531 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
6532 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
6533 cmp = 1;
6534 else
6535 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6537 if (cmp <= 0)
6538 return cmp;
6540 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
6542 case WITH_CLEANUP_EXPR:
6543 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6544 if (cmp <= 0)
6545 return cmp;
6547 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
6549 case COMPONENT_REF:
6550 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
6551 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6553 return 0;
6555 case VAR_DECL:
6556 case PARM_DECL:
6557 case CONST_DECL:
6558 case FUNCTION_DECL:
6559 return 0;
6561 default:
6562 if (POLY_INT_CST_P (t1))
6563 /* A false return means maybe_ne rather than known_ne. */
6564 return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
6565 TYPE_SIGN (TREE_TYPE (t1))),
6566 poly_widest_int::from (poly_int_cst_value (t2),
6567 TYPE_SIGN (TREE_TYPE (t2))));
6568 break;
6571 /* This general rule works for most tree codes. All exceptions should be
6572 handled above. If this is a language-specific tree code, we can't
6573 trust what might be in the operand, so say we don't know
6574 the situation. */
6575 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
6576 return -1;
6578 switch (TREE_CODE_CLASS (code1))
6580 case tcc_unary:
6581 case tcc_binary:
6582 case tcc_comparison:
6583 case tcc_expression:
6584 case tcc_reference:
6585 case tcc_statement:
6586 cmp = 1;
6587 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
6589 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
6590 if (cmp <= 0)
6591 return cmp;
6594 return cmp;
6596 default:
6597 return -1;
6601 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
6602 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
6603 than U, respectively. */
6606 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
6608 if (tree_int_cst_sgn (t) < 0)
6609 return -1;
6610 else if (!tree_fits_uhwi_p (t))
6611 return 1;
6612 else if (TREE_INT_CST_LOW (t) == u)
6613 return 0;
6614 else if (TREE_INT_CST_LOW (t) < u)
6615 return -1;
6616 else
6617 return 1;
6620 /* Return true if SIZE represents a constant size that is in bounds of
6621 what the middle-end and the backend accepts (covering not more than
6622 half of the address-space).
6623 When PERR is non-null, set *PERR on failure to the description of
6624 why SIZE is not valid. */
6626 bool
6627 valid_constant_size_p (const_tree size, cst_size_error *perr /* = NULL */)
6629 if (POLY_INT_CST_P (size))
6631 if (TREE_OVERFLOW (size))
6632 return false;
6633 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
6634 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
6635 return false;
6636 return true;
6639 cst_size_error error;
6640 if (!perr)
6641 perr = &error;
6643 if (TREE_CODE (size) != INTEGER_CST)
6645 *perr = cst_size_not_constant;
6646 return false;
6649 if (TREE_OVERFLOW_P (size))
6651 *perr = cst_size_overflow;
6652 return false;
6655 if (tree_int_cst_sgn (size) < 0)
6657 *perr = cst_size_negative;
6658 return false;
6660 if (!tree_fits_uhwi_p (size)
6661 || (wi::to_widest (TYPE_MAX_VALUE (sizetype))
6662 < wi::to_widest (size) * 2))
6664 *perr = cst_size_too_big;
6665 return false;
6668 return true;
6671 /* Return the precision of the type, or for a complex or vector type the
6672 precision of the type of its elements. */
6674 unsigned int
6675 element_precision (const_tree type)
6677 if (!TYPE_P (type))
6678 type = TREE_TYPE (type);
6679 enum tree_code code = TREE_CODE (type);
6680 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
6681 type = TREE_TYPE (type);
6683 return TYPE_PRECISION (type);
6686 /* Return true if CODE represents an associative tree code. Otherwise
6687 return false. */
6688 bool
6689 associative_tree_code (enum tree_code code)
6691 switch (code)
6693 case BIT_IOR_EXPR:
6694 case BIT_AND_EXPR:
6695 case BIT_XOR_EXPR:
6696 case PLUS_EXPR:
6697 case MULT_EXPR:
6698 case MIN_EXPR:
6699 case MAX_EXPR:
6700 return true;
6702 default:
6703 break;
6705 return false;
6708 /* Return true if CODE represents a commutative tree code. Otherwise
6709 return false. */
6710 bool
6711 commutative_tree_code (enum tree_code code)
6713 switch (code)
6715 case PLUS_EXPR:
6716 case MULT_EXPR:
6717 case MULT_HIGHPART_EXPR:
6718 case MIN_EXPR:
6719 case MAX_EXPR:
6720 case BIT_IOR_EXPR:
6721 case BIT_XOR_EXPR:
6722 case BIT_AND_EXPR:
6723 case NE_EXPR:
6724 case EQ_EXPR:
6725 case UNORDERED_EXPR:
6726 case ORDERED_EXPR:
6727 case UNEQ_EXPR:
6728 case LTGT_EXPR:
6729 case TRUTH_AND_EXPR:
6730 case TRUTH_XOR_EXPR:
6731 case TRUTH_OR_EXPR:
6732 case WIDEN_MULT_EXPR:
6733 case VEC_WIDEN_MULT_HI_EXPR:
6734 case VEC_WIDEN_MULT_LO_EXPR:
6735 case VEC_WIDEN_MULT_EVEN_EXPR:
6736 case VEC_WIDEN_MULT_ODD_EXPR:
6737 return true;
6739 default:
6740 break;
6742 return false;
6745 /* Return true if CODE represents a ternary tree code for which the
6746 first two operands are commutative. Otherwise return false. */
6747 bool
6748 commutative_ternary_tree_code (enum tree_code code)
6750 switch (code)
6752 case WIDEN_MULT_PLUS_EXPR:
6753 case WIDEN_MULT_MINUS_EXPR:
6754 case DOT_PROD_EXPR:
6755 return true;
6757 default:
6758 break;
6760 return false;
6763 /* Returns true if CODE can overflow. */
6765 bool
6766 operation_can_overflow (enum tree_code code)
6768 switch (code)
6770 case PLUS_EXPR:
6771 case MINUS_EXPR:
6772 case MULT_EXPR:
6773 case LSHIFT_EXPR:
6774 /* Can overflow in various ways. */
6775 return true;
6776 case TRUNC_DIV_EXPR:
6777 case EXACT_DIV_EXPR:
6778 case FLOOR_DIV_EXPR:
6779 case CEIL_DIV_EXPR:
6780 /* For INT_MIN / -1. */
6781 return true;
6782 case NEGATE_EXPR:
6783 case ABS_EXPR:
6784 /* For -INT_MIN. */
6785 return true;
6786 default:
6787 /* These operators cannot overflow. */
6788 return false;
6792 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
6793 ftrapv doesn't generate trapping insns for CODE. */
6795 bool
6796 operation_no_trapping_overflow (tree type, enum tree_code code)
6798 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
6800 /* We don't generate instructions that trap on overflow for complex or vector
6801 types. */
6802 if (!INTEGRAL_TYPE_P (type))
6803 return true;
6805 if (!TYPE_OVERFLOW_TRAPS (type))
6806 return true;
6808 switch (code)
6810 case PLUS_EXPR:
6811 case MINUS_EXPR:
6812 case MULT_EXPR:
6813 case NEGATE_EXPR:
6814 case ABS_EXPR:
6815 /* These operators can overflow, and -ftrapv generates trapping code for
6816 these. */
6817 return false;
6818 case TRUNC_DIV_EXPR:
6819 case EXACT_DIV_EXPR:
6820 case FLOOR_DIV_EXPR:
6821 case CEIL_DIV_EXPR:
6822 case LSHIFT_EXPR:
6823 /* These operators can overflow, but -ftrapv does not generate trapping
6824 code for these. */
6825 return true;
6826 default:
6827 /* These operators cannot overflow. */
6828 return true;
6832 /* Constructors for pointer, array and function types.
6833 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
6834 constructed by language-dependent code, not here.) */
6836 /* Construct, lay out and return the type of pointers to TO_TYPE with
6837 mode MODE. If MODE is VOIDmode, a pointer mode for the address
6838 space of TO_TYPE will be picked. If CAN_ALIAS_ALL is TRUE,
6839 indicate this type can reference all of memory. If such a type has
6840 already been constructed, reuse it. */
6842 tree
6843 build_pointer_type_for_mode (tree to_type, machine_mode mode,
6844 bool can_alias_all)
6846 tree t;
6847 bool could_alias = can_alias_all;
6849 if (to_type == error_mark_node)
6850 return error_mark_node;
6852 if (mode == VOIDmode)
6854 addr_space_t as = TYPE_ADDR_SPACE (to_type);
6855 mode = targetm.addr_space.pointer_mode (as);
6858 /* If the pointed-to type has the may_alias attribute set, force
6859 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6860 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
6861 can_alias_all = true;
6863 /* In some cases, languages will have things that aren't a POINTER_TYPE
6864 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
6865 In that case, return that type without regard to the rest of our
6866 operands.
6868 ??? This is a kludge, but consistent with the way this function has
6869 always operated and there doesn't seem to be a good way to avoid this
6870 at the moment. */
6871 if (TYPE_POINTER_TO (to_type) != 0
6872 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
6873 return TYPE_POINTER_TO (to_type);
6875 /* First, if we already have a type for pointers to TO_TYPE and it's
6876 the proper mode, use it. */
6877 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
6878 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
6879 return t;
6881 t = make_node (POINTER_TYPE);
6883 TREE_TYPE (t) = to_type;
6884 SET_TYPE_MODE (t, mode);
6885 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
6886 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
6887 TYPE_POINTER_TO (to_type) = t;
6889 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6890 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
6891 SET_TYPE_STRUCTURAL_EQUALITY (t);
6892 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
6893 TYPE_CANONICAL (t)
6894 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
6895 mode, false);
6897 /* Lay out the type. This function has many callers that are concerned
6898 with expression-construction, and this simplifies them all. */
6899 layout_type (t);
6901 return t;
6904 /* By default build pointers in ptr_mode. */
6906 tree
6907 build_pointer_type (tree to_type)
6909 return build_pointer_type_for_mode (to_type, VOIDmode, false);
6912 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
6914 tree
6915 build_reference_type_for_mode (tree to_type, machine_mode mode,
6916 bool can_alias_all)
6918 tree t;
6919 bool could_alias = can_alias_all;
6921 if (to_type == error_mark_node)
6922 return error_mark_node;
6924 if (mode == VOIDmode)
6926 addr_space_t as = TYPE_ADDR_SPACE (to_type);
6927 mode = targetm.addr_space.pointer_mode (as);
6930 /* If the pointed-to type has the may_alias attribute set, force
6931 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6932 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
6933 can_alias_all = true;
6935 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
6936 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
6937 In that case, return that type without regard to the rest of our
6938 operands.
6940 ??? This is a kludge, but consistent with the way this function has
6941 always operated and there doesn't seem to be a good way to avoid this
6942 at the moment. */
6943 if (TYPE_REFERENCE_TO (to_type) != 0
6944 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
6945 return TYPE_REFERENCE_TO (to_type);
6947 /* First, if we already have a type for pointers to TO_TYPE and it's
6948 the proper mode, use it. */
6949 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
6950 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
6951 return t;
6953 t = make_node (REFERENCE_TYPE);
6955 TREE_TYPE (t) = to_type;
6956 SET_TYPE_MODE (t, mode);
6957 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
6958 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
6959 TYPE_REFERENCE_TO (to_type) = t;
6961 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6962 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
6963 SET_TYPE_STRUCTURAL_EQUALITY (t);
6964 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
6965 TYPE_CANONICAL (t)
6966 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
6967 mode, false);
6969 layout_type (t);
6971 return t;
6975 /* Build the node for the type of references-to-TO_TYPE by default
6976 in ptr_mode. */
6978 tree
6979 build_reference_type (tree to_type)
6981 return build_reference_type_for_mode (to_type, VOIDmode, false);
6984 #define MAX_INT_CACHED_PREC \
6985 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
6986 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
6988 /* Builds a signed or unsigned integer type of precision PRECISION.
6989 Used for C bitfields whose precision does not match that of
6990 built-in target types. */
6991 tree
6992 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
6993 int unsignedp)
6995 tree itype, ret;
6997 if (unsignedp)
6998 unsignedp = MAX_INT_CACHED_PREC + 1;
7000 if (precision <= MAX_INT_CACHED_PREC)
7002 itype = nonstandard_integer_type_cache[precision + unsignedp];
7003 if (itype)
7004 return itype;
7007 itype = make_node (INTEGER_TYPE);
7008 TYPE_PRECISION (itype) = precision;
7010 if (unsignedp)
7011 fixup_unsigned_type (itype);
7012 else
7013 fixup_signed_type (itype);
7015 inchash::hash hstate;
7016 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7017 ret = type_hash_canon (hstate.end (), itype);
7018 if (precision <= MAX_INT_CACHED_PREC)
7019 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7021 return ret;
7024 #define MAX_BOOL_CACHED_PREC \
7025 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7026 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
7028 /* Builds a boolean type of precision PRECISION.
7029 Used for boolean vectors to choose proper vector element size. */
7030 tree
7031 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
7033 tree type;
7035 if (precision <= MAX_BOOL_CACHED_PREC)
7037 type = nonstandard_boolean_type_cache[precision];
7038 if (type)
7039 return type;
7042 type = make_node (BOOLEAN_TYPE);
7043 TYPE_PRECISION (type) = precision;
7044 fixup_signed_type (type);
7046 if (precision <= MAX_INT_CACHED_PREC)
7047 nonstandard_boolean_type_cache[precision] = type;
7049 return type;
7052 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7053 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7054 is true, reuse such a type that has already been constructed. */
7056 static tree
7057 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7059 tree itype = make_node (INTEGER_TYPE);
7061 TREE_TYPE (itype) = type;
7063 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7064 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7066 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7067 SET_TYPE_MODE (itype, TYPE_MODE (type));
7068 TYPE_SIZE (itype) = TYPE_SIZE (type);
7069 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7070 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
7071 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7072 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
7074 if (!shared)
7075 return itype;
7077 if ((TYPE_MIN_VALUE (itype)
7078 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7079 || (TYPE_MAX_VALUE (itype)
7080 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7082 /* Since we cannot reliably merge this type, we need to compare it using
7083 structural equality checks. */
7084 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7085 return itype;
7088 hashval_t hash = type_hash_canon_hash (itype);
7089 itype = type_hash_canon (hash, itype);
7091 return itype;
7094 /* Wrapper around build_range_type_1 with SHARED set to true. */
7096 tree
7097 build_range_type (tree type, tree lowval, tree highval)
7099 return build_range_type_1 (type, lowval, highval, true);
7102 /* Wrapper around build_range_type_1 with SHARED set to false. */
7104 tree
7105 build_nonshared_range_type (tree type, tree lowval, tree highval)
7107 return build_range_type_1 (type, lowval, highval, false);
7110 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7111 MAXVAL should be the maximum value in the domain
7112 (one less than the length of the array).
7114 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7115 We don't enforce this limit, that is up to caller (e.g. language front end).
7116 The limit exists because the result is a signed type and we don't handle
7117 sizes that use more than one HOST_WIDE_INT. */
7119 tree
7120 build_index_type (tree maxval)
7122 return build_range_type (sizetype, size_zero_node, maxval);
7125 /* Return true if the debug information for TYPE, a subtype, should be emitted
7126 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7127 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7128 debug info and doesn't reflect the source code. */
7130 bool
7131 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7133 tree base_type = TREE_TYPE (type), low, high;
7135 /* Subrange types have a base type which is an integral type. */
7136 if (!INTEGRAL_TYPE_P (base_type))
7137 return false;
7139 /* Get the real bounds of the subtype. */
7140 if (lang_hooks.types.get_subrange_bounds)
7141 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7142 else
7144 low = TYPE_MIN_VALUE (type);
7145 high = TYPE_MAX_VALUE (type);
7148 /* If the type and its base type have the same representation and the same
7149 name, then the type is not a subrange but a copy of the base type. */
7150 if ((TREE_CODE (base_type) == INTEGER_TYPE
7151 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7152 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7153 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7154 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7155 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7156 return false;
7158 if (lowval)
7159 *lowval = low;
7160 if (highval)
7161 *highval = high;
7162 return true;
7165 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7166 and number of elements specified by the range of values of INDEX_TYPE.
7167 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7168 If SHARED is true, reuse such a type that has already been constructed.
7169 If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type. */
7171 tree
7172 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
7173 bool shared, bool set_canonical)
7175 tree t;
7177 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7179 error ("arrays of functions are not meaningful");
7180 elt_type = integer_type_node;
7183 t = make_node (ARRAY_TYPE);
7184 TREE_TYPE (t) = elt_type;
7185 TYPE_DOMAIN (t) = index_type;
7186 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7187 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
7188 layout_type (t);
7190 if (shared)
7192 hashval_t hash = type_hash_canon_hash (t);
7193 t = type_hash_canon (hash, t);
7196 if (TYPE_CANONICAL (t) == t && set_canonical)
7198 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7199 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
7200 || in_lto_p)
7201 SET_TYPE_STRUCTURAL_EQUALITY (t);
7202 else if (TYPE_CANONICAL (elt_type) != elt_type
7203 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7204 TYPE_CANONICAL (t)
7205 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7206 index_type
7207 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7208 typeless_storage, shared, set_canonical);
7211 return t;
7214 /* Wrapper around build_array_type_1 with SHARED set to true. */
7216 tree
7217 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
7219 return
7220 build_array_type_1 (elt_type, index_type, typeless_storage, true, true);
7223 /* Wrapper around build_array_type_1 with SHARED set to false. */
7225 tree
7226 build_nonshared_array_type (tree elt_type, tree index_type)
7228 return build_array_type_1 (elt_type, index_type, false, false, true);
7231 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7232 sizetype. */
7234 tree
7235 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
7237 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7240 /* Recursively examines the array elements of TYPE, until a non-array
7241 element type is found. */
7243 tree
7244 strip_array_types (tree type)
7246 while (TREE_CODE (type) == ARRAY_TYPE)
7247 type = TREE_TYPE (type);
7249 return type;
7252 /* Computes the canonical argument types from the argument type list
7253 ARGTYPES.
7255 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7256 on entry to this function, or if any of the ARGTYPES are
7257 structural.
7259 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7260 true on entry to this function, or if any of the ARGTYPES are
7261 non-canonical.
7263 Returns a canonical argument list, which may be ARGTYPES when the
7264 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7265 true) or would not differ from ARGTYPES. */
7267 static tree
7268 maybe_canonicalize_argtypes (tree argtypes,
7269 bool *any_structural_p,
7270 bool *any_noncanonical_p)
7272 tree arg;
7273 bool any_noncanonical_argtypes_p = false;
7275 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7277 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7278 /* Fail gracefully by stating that the type is structural. */
7279 *any_structural_p = true;
7280 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7281 *any_structural_p = true;
7282 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7283 || TREE_PURPOSE (arg))
7284 /* If the argument has a default argument, we consider it
7285 non-canonical even though the type itself is canonical.
7286 That way, different variants of function and method types
7287 with default arguments will all point to the variant with
7288 no defaults as their canonical type. */
7289 any_noncanonical_argtypes_p = true;
7292 if (*any_structural_p)
7293 return argtypes;
7295 if (any_noncanonical_argtypes_p)
7297 /* Build the canonical list of argument types. */
7298 tree canon_argtypes = NULL_TREE;
7299 bool is_void = false;
7301 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7303 if (arg == void_list_node)
7304 is_void = true;
7305 else
7306 canon_argtypes = tree_cons (NULL_TREE,
7307 TYPE_CANONICAL (TREE_VALUE (arg)),
7308 canon_argtypes);
7311 canon_argtypes = nreverse (canon_argtypes);
7312 if (is_void)
7313 canon_argtypes = chainon (canon_argtypes, void_list_node);
7315 /* There is a non-canonical type. */
7316 *any_noncanonical_p = true;
7317 return canon_argtypes;
7320 /* The canonical argument types are the same as ARGTYPES. */
7321 return argtypes;
7324 /* Construct, lay out and return
7325 the type of functions returning type VALUE_TYPE
7326 given arguments of types ARG_TYPES.
7327 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7328 are data type nodes for the arguments of the function.
7329 If such a type has already been constructed, reuse it. */
7331 tree
7332 build_function_type (tree value_type, tree arg_types)
7334 tree t;
7335 inchash::hash hstate;
7336 bool any_structural_p, any_noncanonical_p;
7337 tree canon_argtypes;
7339 gcc_assert (arg_types != error_mark_node);
7341 if (TREE_CODE (value_type) == FUNCTION_TYPE)
7343 error ("function return type cannot be function");
7344 value_type = integer_type_node;
7347 /* Make a node of the sort we want. */
7348 t = make_node (FUNCTION_TYPE);
7349 TREE_TYPE (t) = value_type;
7350 TYPE_ARG_TYPES (t) = arg_types;
7352 /* If we already have such a type, use the old one. */
7353 hashval_t hash = type_hash_canon_hash (t);
7354 t = type_hash_canon (hash, t);
7356 /* Set up the canonical type. */
7357 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
7358 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
7359 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
7360 &any_structural_p,
7361 &any_noncanonical_p);
7362 if (any_structural_p)
7363 SET_TYPE_STRUCTURAL_EQUALITY (t);
7364 else if (any_noncanonical_p)
7365 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
7366 canon_argtypes);
7368 if (!COMPLETE_TYPE_P (t))
7369 layout_type (t);
7370 return t;
7373 /* Build a function type. The RETURN_TYPE is the type returned by the
7374 function. If VAARGS is set, no void_type_node is appended to the
7375 list. ARGP must be always be terminated be a NULL_TREE. */
7377 static tree
7378 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
7380 tree t, args, last;
7382 t = va_arg (argp, tree);
7383 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
7384 args = tree_cons (NULL_TREE, t, args);
7386 if (vaargs)
7388 last = args;
7389 if (args != NULL_TREE)
7390 args = nreverse (args);
7391 gcc_assert (last != void_list_node);
7393 else if (args == NULL_TREE)
7394 args = void_list_node;
7395 else
7397 last = args;
7398 args = nreverse (args);
7399 TREE_CHAIN (last) = void_list_node;
7401 args = build_function_type (return_type, args);
7403 return args;
7406 /* Build a function type. The RETURN_TYPE is the type returned by the
7407 function. If additional arguments are provided, they are
7408 additional argument types. The list of argument types must always
7409 be terminated by NULL_TREE. */
7411 tree
7412 build_function_type_list (tree return_type, ...)
7414 tree args;
7415 va_list p;
7417 va_start (p, return_type);
7418 args = build_function_type_list_1 (false, return_type, p);
7419 va_end (p);
7420 return args;
7423 /* Build a variable argument function type. The RETURN_TYPE is the
7424 type returned by the function. If additional arguments are provided,
7425 they are additional argument types. The list of argument types must
7426 always be terminated by NULL_TREE. */
7428 tree
7429 build_varargs_function_type_list (tree return_type, ...)
7431 tree args;
7432 va_list p;
7434 va_start (p, return_type);
7435 args = build_function_type_list_1 (true, return_type, p);
7436 va_end (p);
7438 return args;
7441 /* Build a function type. RETURN_TYPE is the type returned by the
7442 function; VAARGS indicates whether the function takes varargs. The
7443 function takes N named arguments, the types of which are provided in
7444 ARG_TYPES. */
7446 static tree
7447 build_function_type_array_1 (bool vaargs, tree return_type, int n,
7448 tree *arg_types)
7450 int i;
7451 tree t = vaargs ? NULL_TREE : void_list_node;
7453 for (i = n - 1; i >= 0; i--)
7454 t = tree_cons (NULL_TREE, arg_types[i], t);
7456 return build_function_type (return_type, t);
7459 /* Build a function type. RETURN_TYPE is the type returned by the
7460 function. The function takes N named arguments, the types of which
7461 are provided in ARG_TYPES. */
7463 tree
7464 build_function_type_array (tree return_type, int n, tree *arg_types)
7466 return build_function_type_array_1 (false, return_type, n, arg_types);
7469 /* Build a variable argument function type. RETURN_TYPE is the type
7470 returned by the function. The function takes N named arguments, the
7471 types of which are provided in ARG_TYPES. */
7473 tree
7474 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
7476 return build_function_type_array_1 (true, return_type, n, arg_types);
7479 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
7480 and ARGTYPES (a TREE_LIST) are the return type and arguments types
7481 for the method. An implicit additional parameter (of type
7482 pointer-to-BASETYPE) is added to the ARGTYPES. */
7484 tree
7485 build_method_type_directly (tree basetype,
7486 tree rettype,
7487 tree argtypes)
7489 tree t;
7490 tree ptype;
7491 bool any_structural_p, any_noncanonical_p;
7492 tree canon_argtypes;
7494 /* Make a node of the sort we want. */
7495 t = make_node (METHOD_TYPE);
7497 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7498 TREE_TYPE (t) = rettype;
7499 ptype = build_pointer_type (basetype);
7501 /* The actual arglist for this function includes a "hidden" argument
7502 which is "this". Put it into the list of argument types. */
7503 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
7504 TYPE_ARG_TYPES (t) = argtypes;
7506 /* If we already have such a type, use the old one. */
7507 hashval_t hash = type_hash_canon_hash (t);
7508 t = type_hash_canon (hash, t);
7510 /* Set up the canonical type. */
7511 any_structural_p
7512 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7513 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
7514 any_noncanonical_p
7515 = (TYPE_CANONICAL (basetype) != basetype
7516 || TYPE_CANONICAL (rettype) != rettype);
7517 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
7518 &any_structural_p,
7519 &any_noncanonical_p);
7520 if (any_structural_p)
7521 SET_TYPE_STRUCTURAL_EQUALITY (t);
7522 else if (any_noncanonical_p)
7523 TYPE_CANONICAL (t)
7524 = build_method_type_directly (TYPE_CANONICAL (basetype),
7525 TYPE_CANONICAL (rettype),
7526 canon_argtypes);
7527 if (!COMPLETE_TYPE_P (t))
7528 layout_type (t);
7530 return t;
7533 /* Construct, lay out and return the type of methods belonging to class
7534 BASETYPE and whose arguments and values are described by TYPE.
7535 If that type exists already, reuse it.
7536 TYPE must be a FUNCTION_TYPE node. */
7538 tree
7539 build_method_type (tree basetype, tree type)
7541 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
7543 return build_method_type_directly (basetype,
7544 TREE_TYPE (type),
7545 TYPE_ARG_TYPES (type));
7548 /* Construct, lay out and return the type of offsets to a value
7549 of type TYPE, within an object of type BASETYPE.
7550 If a suitable offset type exists already, reuse it. */
7552 tree
7553 build_offset_type (tree basetype, tree type)
7555 tree t;
7557 /* Make a node of the sort we want. */
7558 t = make_node (OFFSET_TYPE);
7560 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7561 TREE_TYPE (t) = type;
7563 /* If we already have such a type, use the old one. */
7564 hashval_t hash = type_hash_canon_hash (t);
7565 t = type_hash_canon (hash, t);
7567 if (!COMPLETE_TYPE_P (t))
7568 layout_type (t);
7570 if (TYPE_CANONICAL (t) == t)
7572 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7573 || TYPE_STRUCTURAL_EQUALITY_P (type))
7574 SET_TYPE_STRUCTURAL_EQUALITY (t);
7575 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
7576 || TYPE_CANONICAL (type) != type)
7577 TYPE_CANONICAL (t)
7578 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
7579 TYPE_CANONICAL (type));
7582 return t;
7585 /* Create a complex type whose components are COMPONENT_TYPE.
7587 If NAMED is true, the type is given a TYPE_NAME. We do not always
7588 do so because this creates a DECL node and thus make the DECL_UIDs
7589 dependent on the type canonicalization hashtable, which is GC-ed,
7590 so the DECL_UIDs would not be stable wrt garbage collection. */
7592 tree
7593 build_complex_type (tree component_type, bool named)
7595 gcc_assert (INTEGRAL_TYPE_P (component_type)
7596 || SCALAR_FLOAT_TYPE_P (component_type)
7597 || FIXED_POINT_TYPE_P (component_type));
7599 /* Make a node of the sort we want. */
7600 tree probe = make_node (COMPLEX_TYPE);
7602 TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
7604 /* If we already have such a type, use the old one. */
7605 hashval_t hash = type_hash_canon_hash (probe);
7606 tree t = type_hash_canon (hash, probe);
7608 if (t == probe)
7610 /* We created a new type. The hash insertion will have laid
7611 out the type. We need to check the canonicalization and
7612 maybe set the name. */
7613 gcc_checking_assert (COMPLETE_TYPE_P (t)
7614 && !TYPE_NAME (t)
7615 && TYPE_CANONICAL (t) == t);
7617 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
7618 SET_TYPE_STRUCTURAL_EQUALITY (t);
7619 else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
7620 TYPE_CANONICAL (t)
7621 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
7623 /* We need to create a name, since complex is a fundamental type. */
7624 if (named)
7626 const char *name = NULL;
7628 if (TREE_TYPE (t) == char_type_node)
7629 name = "complex char";
7630 else if (TREE_TYPE (t) == signed_char_type_node)
7631 name = "complex signed char";
7632 else if (TREE_TYPE (t) == unsigned_char_type_node)
7633 name = "complex unsigned char";
7634 else if (TREE_TYPE (t) == short_integer_type_node)
7635 name = "complex short int";
7636 else if (TREE_TYPE (t) == short_unsigned_type_node)
7637 name = "complex short unsigned int";
7638 else if (TREE_TYPE (t) == integer_type_node)
7639 name = "complex int";
7640 else if (TREE_TYPE (t) == unsigned_type_node)
7641 name = "complex unsigned int";
7642 else if (TREE_TYPE (t) == long_integer_type_node)
7643 name = "complex long int";
7644 else if (TREE_TYPE (t) == long_unsigned_type_node)
7645 name = "complex long unsigned int";
7646 else if (TREE_TYPE (t) == long_long_integer_type_node)
7647 name = "complex long long int";
7648 else if (TREE_TYPE (t) == long_long_unsigned_type_node)
7649 name = "complex long long unsigned int";
7651 if (name != NULL)
7652 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
7653 get_identifier (name), t);
7657 return build_qualified_type (t, TYPE_QUALS (component_type));
7660 /* If TYPE is a real or complex floating-point type and the target
7661 does not directly support arithmetic on TYPE then return the wider
7662 type to be used for arithmetic on TYPE. Otherwise, return
7663 NULL_TREE. */
7665 tree
7666 excess_precision_type (tree type)
7668 /* The target can give two different responses to the question of
7669 which excess precision mode it would like depending on whether we
7670 are in -fexcess-precision=standard or -fexcess-precision=fast. */
7672 enum excess_precision_type requested_type
7673 = (flag_excess_precision == EXCESS_PRECISION_FAST
7674 ? EXCESS_PRECISION_TYPE_FAST
7675 : (flag_excess_precision == EXCESS_PRECISION_FLOAT16
7676 ? EXCESS_PRECISION_TYPE_FLOAT16 :EXCESS_PRECISION_TYPE_STANDARD));
7678 enum flt_eval_method target_flt_eval_method
7679 = targetm.c.excess_precision (requested_type);
7681 /* The target should not ask for unpredictable float evaluation (though
7682 it might advertise that implicitly the evaluation is unpredictable,
7683 but we don't care about that here, it will have been reported
7684 elsewhere). If it does ask for unpredictable evaluation, we have
7685 nothing to do here. */
7686 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
7688 /* Nothing to do. The target has asked for all types we know about
7689 to be computed with their native precision and range. */
7690 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
7691 return NULL_TREE;
7693 /* The target will promote this type in a target-dependent way, so excess
7694 precision ought to leave it alone. */
7695 if (targetm.promoted_type (type) != NULL_TREE)
7696 return NULL_TREE;
7698 machine_mode float16_type_mode = (float16_type_node
7699 ? TYPE_MODE (float16_type_node)
7700 : VOIDmode);
7701 machine_mode float_type_mode = TYPE_MODE (float_type_node);
7702 machine_mode double_type_mode = TYPE_MODE (double_type_node);
7704 switch (TREE_CODE (type))
7706 case REAL_TYPE:
7708 machine_mode type_mode = TYPE_MODE (type);
7709 switch (target_flt_eval_method)
7711 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7712 if (type_mode == float16_type_mode)
7713 return float_type_node;
7714 break;
7715 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7716 if (type_mode == float16_type_mode
7717 || type_mode == float_type_mode)
7718 return double_type_node;
7719 break;
7720 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7721 if (type_mode == float16_type_mode
7722 || type_mode == float_type_mode
7723 || type_mode == double_type_mode)
7724 return long_double_type_node;
7725 break;
7726 default:
7727 gcc_unreachable ();
7729 break;
7731 case COMPLEX_TYPE:
7733 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
7734 return NULL_TREE;
7735 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
7736 switch (target_flt_eval_method)
7738 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7739 if (type_mode == float16_type_mode)
7740 return complex_float_type_node;
7741 break;
7742 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7743 if (type_mode == float16_type_mode
7744 || type_mode == float_type_mode)
7745 return complex_double_type_node;
7746 break;
7747 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7748 if (type_mode == float16_type_mode
7749 || type_mode == float_type_mode
7750 || type_mode == double_type_mode)
7751 return complex_long_double_type_node;
7752 break;
7753 default:
7754 gcc_unreachable ();
7756 break;
7758 default:
7759 break;
7762 return NULL_TREE;
7765 /* Return OP, stripped of any conversions to wider types as much as is safe.
7766 Converting the value back to OP's type makes a value equivalent to OP.
7768 If FOR_TYPE is nonzero, we return a value which, if converted to
7769 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
7771 OP must have integer, real or enumeral type. Pointers are not allowed!
7773 There are some cases where the obvious value we could return
7774 would regenerate to OP if converted to OP's type,
7775 but would not extend like OP to wider types.
7776 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
7777 For example, if OP is (unsigned short)(signed char)-1,
7778 we avoid returning (signed char)-1 if FOR_TYPE is int,
7779 even though extending that to an unsigned short would regenerate OP,
7780 since the result of extending (signed char)-1 to (int)
7781 is different from (int) OP. */
7783 tree
7784 get_unwidened (tree op, tree for_type)
7786 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
7787 tree type = TREE_TYPE (op);
7788 unsigned final_prec
7789 = TYPE_PRECISION (for_type != 0 ? for_type : type);
7790 int uns
7791 = (for_type != 0 && for_type != type
7792 && final_prec > TYPE_PRECISION (type)
7793 && TYPE_UNSIGNED (type));
7794 tree win = op;
7796 while (CONVERT_EXPR_P (op))
7798 int bitschange;
7800 /* TYPE_PRECISION on vector types has different meaning
7801 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
7802 so avoid them here. */
7803 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
7804 break;
7806 bitschange = TYPE_PRECISION (TREE_TYPE (op))
7807 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
7809 /* Truncations are many-one so cannot be removed.
7810 Unless we are later going to truncate down even farther. */
7811 if (bitschange < 0
7812 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
7813 break;
7815 /* See what's inside this conversion. If we decide to strip it,
7816 we will set WIN. */
7817 op = TREE_OPERAND (op, 0);
7819 /* If we have not stripped any zero-extensions (uns is 0),
7820 we can strip any kind of extension.
7821 If we have previously stripped a zero-extension,
7822 only zero-extensions can safely be stripped.
7823 Any extension can be stripped if the bits it would produce
7824 are all going to be discarded later by truncating to FOR_TYPE. */
7826 if (bitschange > 0)
7828 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
7829 win = op;
7830 /* TYPE_UNSIGNED says whether this is a zero-extension.
7831 Let's avoid computing it if it does not affect WIN
7832 and if UNS will not be needed again. */
7833 if ((uns
7834 || CONVERT_EXPR_P (op))
7835 && TYPE_UNSIGNED (TREE_TYPE (op)))
7837 uns = 1;
7838 win = op;
7843 /* If we finally reach a constant see if it fits in sth smaller and
7844 in that case convert it. */
7845 if (TREE_CODE (win) == INTEGER_CST)
7847 tree wtype = TREE_TYPE (win);
7848 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
7849 if (for_type)
7850 prec = MAX (prec, final_prec);
7851 if (prec < TYPE_PRECISION (wtype))
7853 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
7854 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
7855 win = fold_convert (t, win);
7859 return win;
7862 /* Return OP or a simpler expression for a narrower value
7863 which can be sign-extended or zero-extended to give back OP.
7864 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
7865 or 0 if the value should be sign-extended. */
7867 tree
7868 get_narrower (tree op, int *unsignedp_ptr)
7870 int uns = 0;
7871 int first = 1;
7872 tree win = op;
7873 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
7875 if (TREE_CODE (op) == COMPOUND_EXPR)
7878 op = TREE_OPERAND (op, 1);
7879 while (TREE_CODE (op) == COMPOUND_EXPR);
7880 tree ret = get_narrower (op, unsignedp_ptr);
7881 if (ret == op)
7882 return win;
7883 auto_vec <tree, 16> v;
7884 unsigned int i;
7885 for (op = win; TREE_CODE (op) == COMPOUND_EXPR;
7886 op = TREE_OPERAND (op, 1))
7887 v.safe_push (op);
7888 FOR_EACH_VEC_ELT_REVERSE (v, i, op)
7889 ret = build2_loc (EXPR_LOCATION (op), COMPOUND_EXPR,
7890 TREE_TYPE (ret), TREE_OPERAND (op, 0),
7891 ret);
7892 return ret;
7894 while (TREE_CODE (op) == NOP_EXPR)
7896 int bitschange
7897 = (TYPE_PRECISION (TREE_TYPE (op))
7898 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
7900 /* Truncations are many-one so cannot be removed. */
7901 if (bitschange < 0)
7902 break;
7904 /* See what's inside this conversion. If we decide to strip it,
7905 we will set WIN. */
7907 if (bitschange > 0)
7909 op = TREE_OPERAND (op, 0);
7910 /* An extension: the outermost one can be stripped,
7911 but remember whether it is zero or sign extension. */
7912 if (first)
7913 uns = TYPE_UNSIGNED (TREE_TYPE (op));
7914 /* Otherwise, if a sign extension has been stripped,
7915 only sign extensions can now be stripped;
7916 if a zero extension has been stripped, only zero-extensions. */
7917 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
7918 break;
7919 first = 0;
7921 else /* bitschange == 0 */
7923 /* A change in nominal type can always be stripped, but we must
7924 preserve the unsignedness. */
7925 if (first)
7926 uns = TYPE_UNSIGNED (TREE_TYPE (op));
7927 first = 0;
7928 op = TREE_OPERAND (op, 0);
7929 /* Keep trying to narrow, but don't assign op to win if it
7930 would turn an integral type into something else. */
7931 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
7932 continue;
7935 win = op;
7938 if (TREE_CODE (op) == COMPONENT_REF
7939 /* Since type_for_size always gives an integer type. */
7940 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
7941 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
7942 /* Ensure field is laid out already. */
7943 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
7944 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
7946 unsigned HOST_WIDE_INT innerprec
7947 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
7948 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
7949 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
7950 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
7952 /* We can get this structure field in a narrower type that fits it,
7953 but the resulting extension to its nominal type (a fullword type)
7954 must satisfy the same conditions as for other extensions.
7956 Do this only for fields that are aligned (not bit-fields),
7957 because when bit-field insns will be used there is no
7958 advantage in doing this. */
7960 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
7961 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
7962 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
7963 && type != 0)
7965 if (first)
7966 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
7967 win = fold_convert (type, op);
7971 *unsignedp_ptr = uns;
7972 return win;
7975 /* Return true if integer constant C has a value that is permissible
7976 for TYPE, an integral type. */
7978 bool
7979 int_fits_type_p (const_tree c, const_tree type)
7981 tree type_low_bound, type_high_bound;
7982 bool ok_for_low_bound, ok_for_high_bound;
7983 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
7985 /* Non-standard boolean types can have arbitrary precision but various
7986 transformations assume that they can only take values 0 and +/-1. */
7987 if (TREE_CODE (type) == BOOLEAN_TYPE)
7988 return wi::fits_to_boolean_p (wi::to_wide (c), type);
7990 retry:
7991 type_low_bound = TYPE_MIN_VALUE (type);
7992 type_high_bound = TYPE_MAX_VALUE (type);
7994 /* If at least one bound of the type is a constant integer, we can check
7995 ourselves and maybe make a decision. If no such decision is possible, but
7996 this type is a subtype, try checking against that. Otherwise, use
7997 fits_to_tree_p, which checks against the precision.
7999 Compute the status for each possibly constant bound, and return if we see
8000 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8001 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8002 for "constant known to fit". */
8004 /* Check if c >= type_low_bound. */
8005 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8007 if (tree_int_cst_lt (c, type_low_bound))
8008 return false;
8009 ok_for_low_bound = true;
8011 else
8012 ok_for_low_bound = false;
8014 /* Check if c <= type_high_bound. */
8015 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8017 if (tree_int_cst_lt (type_high_bound, c))
8018 return false;
8019 ok_for_high_bound = true;
8021 else
8022 ok_for_high_bound = false;
8024 /* If the constant fits both bounds, the result is known. */
8025 if (ok_for_low_bound && ok_for_high_bound)
8026 return true;
8028 /* Perform some generic filtering which may allow making a decision
8029 even if the bounds are not constant. First, negative integers
8030 never fit in unsigned types, */
8031 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
8032 return false;
8034 /* Second, narrower types always fit in wider ones. */
8035 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8036 return true;
8038 /* Third, unsigned integers with top bit set never fit signed types. */
8039 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8041 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
8042 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8044 /* When a tree_cst is converted to a wide-int, the precision
8045 is taken from the type. However, if the precision of the
8046 mode underneath the type is smaller than that, it is
8047 possible that the value will not fit. The test below
8048 fails if any bit is set between the sign bit of the
8049 underlying mode and the top bit of the type. */
8050 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
8051 return false;
8053 else if (wi::neg_p (wi::to_wide (c)))
8054 return false;
8057 /* If we haven't been able to decide at this point, there nothing more we
8058 can check ourselves here. Look at the base type if we have one and it
8059 has the same precision. */
8060 if (TREE_CODE (type) == INTEGER_TYPE
8061 && TREE_TYPE (type) != 0
8062 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8064 type = TREE_TYPE (type);
8065 goto retry;
8068 /* Or to fits_to_tree_p, if nothing else. */
8069 return wi::fits_to_tree_p (wi::to_wide (c), type);
8072 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8073 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8074 represented (assuming two's-complement arithmetic) within the bit
8075 precision of the type are returned instead. */
8077 void
8078 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8080 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8081 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8082 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
8083 else
8085 if (TYPE_UNSIGNED (type))
8086 mpz_set_ui (min, 0);
8087 else
8089 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8090 wi::to_mpz (mn, min, SIGNED);
8094 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8095 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8096 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
8097 else
8099 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8100 wi::to_mpz (mn, max, TYPE_SIGN (type));
8104 /* Return true if VAR is an automatic variable. */
8106 bool
8107 auto_var_p (const_tree var)
8109 return ((((VAR_P (var) && ! DECL_EXTERNAL (var))
8110 || TREE_CODE (var) == PARM_DECL)
8111 && ! TREE_STATIC (var))
8112 || TREE_CODE (var) == RESULT_DECL);
8115 /* Return true if VAR is an automatic variable defined in function FN. */
8117 bool
8118 auto_var_in_fn_p (const_tree var, const_tree fn)
8120 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8121 && (auto_var_p (var)
8122 || TREE_CODE (var) == LABEL_DECL));
8125 /* Subprogram of following function. Called by walk_tree.
8127 Return *TP if it is an automatic variable or parameter of the
8128 function passed in as DATA. */
8130 static tree
8131 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8133 tree fn = (tree) data;
8135 if (TYPE_P (*tp))
8136 *walk_subtrees = 0;
8138 else if (DECL_P (*tp)
8139 && auto_var_in_fn_p (*tp, fn))
8140 return *tp;
8142 return NULL_TREE;
8145 /* Returns true if T is, contains, or refers to a type with variable
8146 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8147 arguments, but not the return type. If FN is nonzero, only return
8148 true if a modifier of the type or position of FN is a variable or
8149 parameter inside FN.
8151 This concept is more general than that of C99 'variably modified types':
8152 in C99, a struct type is never variably modified because a VLA may not
8153 appear as a structure member. However, in GNU C code like:
8155 struct S { int i[f()]; };
8157 is valid, and other languages may define similar constructs. */
8159 bool
8160 variably_modified_type_p (tree type, tree fn)
8162 tree t;
8164 /* Test if T is either variable (if FN is zero) or an expression containing
8165 a variable in FN. If TYPE isn't gimplified, return true also if
8166 gimplify_one_sizepos would gimplify the expression into a local
8167 variable. */
8168 #define RETURN_TRUE_IF_VAR(T) \
8169 do { tree _t = (T); \
8170 if (_t != NULL_TREE \
8171 && _t != error_mark_node \
8172 && !CONSTANT_CLASS_P (_t) \
8173 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8174 && (!fn \
8175 || (!TYPE_SIZES_GIMPLIFIED (type) \
8176 && (TREE_CODE (_t) != VAR_DECL \
8177 && !CONTAINS_PLACEHOLDER_P (_t))) \
8178 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8179 return true; } while (0)
8181 if (type == error_mark_node)
8182 return false;
8184 /* If TYPE itself has variable size, it is variably modified. */
8185 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8186 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8188 switch (TREE_CODE (type))
8190 case POINTER_TYPE:
8191 case REFERENCE_TYPE:
8192 case VECTOR_TYPE:
8193 /* Ada can have pointer types refering to themselves indirectly. */
8194 if (TREE_VISITED (type))
8195 return false;
8196 TREE_VISITED (type) = true;
8197 if (variably_modified_type_p (TREE_TYPE (type), fn))
8199 TREE_VISITED (type) = false;
8200 return true;
8202 TREE_VISITED (type) = false;
8203 break;
8205 case FUNCTION_TYPE:
8206 case METHOD_TYPE:
8207 /* If TYPE is a function type, it is variably modified if the
8208 return type is variably modified. */
8209 if (variably_modified_type_p (TREE_TYPE (type), fn))
8210 return true;
8211 break;
8213 case INTEGER_TYPE:
8214 case REAL_TYPE:
8215 case FIXED_POINT_TYPE:
8216 case ENUMERAL_TYPE:
8217 case BOOLEAN_TYPE:
8218 /* Scalar types are variably modified if their end points
8219 aren't constant. */
8220 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8221 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8222 break;
8224 case RECORD_TYPE:
8225 case UNION_TYPE:
8226 case QUAL_UNION_TYPE:
8227 /* We can't see if any of the fields are variably-modified by the
8228 definition we normally use, since that would produce infinite
8229 recursion via pointers. */
8230 /* This is variably modified if some field's type is. */
8231 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8232 if (TREE_CODE (t) == FIELD_DECL)
8234 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8235 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8236 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8238 /* If the type is a qualified union, then the DECL_QUALIFIER
8239 of fields can also be an expression containing a variable. */
8240 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8241 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8243 /* If the field is a qualified union, then it's only a container
8244 for what's inside so we look into it. That's necessary in LTO
8245 mode because the sizes of the field tested above have been set
8246 to PLACEHOLDER_EXPRs by free_lang_data. */
8247 if (TREE_CODE (TREE_TYPE (t)) == QUAL_UNION_TYPE
8248 && variably_modified_type_p (TREE_TYPE (t), fn))
8249 return true;
8251 break;
8253 case ARRAY_TYPE:
8254 /* Do not call ourselves to avoid infinite recursion. This is
8255 variably modified if the element type is. */
8256 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8257 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8258 break;
8260 default:
8261 break;
8264 /* The current language may have other cases to check, but in general,
8265 all other types are not variably modified. */
8266 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8268 #undef RETURN_TRUE_IF_VAR
8271 /* Given a DECL or TYPE, return the scope in which it was declared, or
8272 NULL_TREE if there is no containing scope. */
8274 tree
8275 get_containing_scope (const_tree t)
8277 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8280 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
8282 const_tree
8283 get_ultimate_context (const_tree decl)
8285 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
8287 if (TREE_CODE (decl) == BLOCK)
8288 decl = BLOCK_SUPERCONTEXT (decl);
8289 else
8290 decl = get_containing_scope (decl);
8292 return decl;
8295 /* Return the innermost context enclosing DECL that is
8296 a FUNCTION_DECL, or zero if none. */
8298 tree
8299 decl_function_context (const_tree decl)
8301 tree context;
8303 if (TREE_CODE (decl) == ERROR_MARK)
8304 return 0;
8306 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8307 where we look up the function at runtime. Such functions always take
8308 a first argument of type 'pointer to real context'.
8310 C++ should really be fixed to use DECL_CONTEXT for the real context,
8311 and use something else for the "virtual context". */
8312 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
8313 context
8314 = TYPE_MAIN_VARIANT
8315 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8316 else
8317 context = DECL_CONTEXT (decl);
8319 while (context && TREE_CODE (context) != FUNCTION_DECL)
8321 if (TREE_CODE (context) == BLOCK)
8322 context = BLOCK_SUPERCONTEXT (context);
8323 else
8324 context = get_containing_scope (context);
8327 return context;
8330 /* Return the innermost context enclosing DECL that is
8331 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8332 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8334 tree
8335 decl_type_context (const_tree decl)
8337 tree context = DECL_CONTEXT (decl);
8339 while (context)
8340 switch (TREE_CODE (context))
8342 case NAMESPACE_DECL:
8343 case TRANSLATION_UNIT_DECL:
8344 return NULL_TREE;
8346 case RECORD_TYPE:
8347 case UNION_TYPE:
8348 case QUAL_UNION_TYPE:
8349 return context;
8351 case TYPE_DECL:
8352 case FUNCTION_DECL:
8353 context = DECL_CONTEXT (context);
8354 break;
8356 case BLOCK:
8357 context = BLOCK_SUPERCONTEXT (context);
8358 break;
8360 default:
8361 gcc_unreachable ();
8364 return NULL_TREE;
8367 /* CALL is a CALL_EXPR. Return the declaration for the function
8368 called, or NULL_TREE if the called function cannot be
8369 determined. */
8371 tree
8372 get_callee_fndecl (const_tree call)
8374 tree addr;
8376 if (call == error_mark_node)
8377 return error_mark_node;
8379 /* It's invalid to call this function with anything but a
8380 CALL_EXPR. */
8381 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8383 /* The first operand to the CALL is the address of the function
8384 called. */
8385 addr = CALL_EXPR_FN (call);
8387 /* If there is no function, return early. */
8388 if (addr == NULL_TREE)
8389 return NULL_TREE;
8391 STRIP_NOPS (addr);
8393 /* If this is a readonly function pointer, extract its initial value. */
8394 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8395 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8396 && DECL_INITIAL (addr))
8397 addr = DECL_INITIAL (addr);
8399 /* If the address is just `&f' for some function `f', then we know
8400 that `f' is being called. */
8401 if (TREE_CODE (addr) == ADDR_EXPR
8402 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8403 return TREE_OPERAND (addr, 0);
8405 /* We couldn't figure out what was being called. */
8406 return NULL_TREE;
8409 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
8410 return the associated function code, otherwise return CFN_LAST. */
8412 combined_fn
8413 get_call_combined_fn (const_tree call)
8415 /* It's invalid to call this function with anything but a CALL_EXPR. */
8416 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8418 if (!CALL_EXPR_FN (call))
8419 return as_combined_fn (CALL_EXPR_IFN (call));
8421 tree fndecl = get_callee_fndecl (call);
8422 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8423 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
8425 return CFN_LAST;
8428 /* Comparator of indices based on tree_node_counts. */
8430 static int
8431 tree_nodes_cmp (const void *p1, const void *p2)
8433 const unsigned *n1 = (const unsigned *)p1;
8434 const unsigned *n2 = (const unsigned *)p2;
8436 return tree_node_counts[*n1] - tree_node_counts[*n2];
8439 /* Comparator of indices based on tree_code_counts. */
8441 static int
8442 tree_codes_cmp (const void *p1, const void *p2)
8444 const unsigned *n1 = (const unsigned *)p1;
8445 const unsigned *n2 = (const unsigned *)p2;
8447 return tree_code_counts[*n1] - tree_code_counts[*n2];
8450 #define TREE_MEM_USAGE_SPACES 40
8452 /* Print debugging information about tree nodes generated during the compile,
8453 and any language-specific information. */
8455 void
8456 dump_tree_statistics (void)
8458 if (GATHER_STATISTICS)
8460 uint64_t total_nodes, total_bytes;
8461 fprintf (stderr, "\nKind Nodes Bytes\n");
8462 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8463 total_nodes = total_bytes = 0;
8466 auto_vec<unsigned> indices (all_kinds);
8467 for (unsigned i = 0; i < all_kinds; i++)
8468 indices.quick_push (i);
8469 indices.qsort (tree_nodes_cmp);
8471 for (unsigned i = 0; i < (int) all_kinds; i++)
8473 unsigned j = indices[i];
8474 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
8475 tree_node_kind_names[j], SIZE_AMOUNT (tree_node_counts[j]),
8476 SIZE_AMOUNT (tree_node_sizes[j]));
8477 total_nodes += tree_node_counts[j];
8478 total_bytes += tree_node_sizes[j];
8480 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8481 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
8482 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
8483 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8487 fprintf (stderr, "Code Nodes\n");
8488 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8490 auto_vec<unsigned> indices (MAX_TREE_CODES);
8491 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8492 indices.quick_push (i);
8493 indices.qsort (tree_codes_cmp);
8495 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8497 unsigned j = indices[i];
8498 fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
8499 get_tree_code_name ((enum tree_code) j),
8500 SIZE_AMOUNT (tree_code_counts[j]));
8502 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8503 fprintf (stderr, "\n");
8504 ssanames_print_statistics ();
8505 fprintf (stderr, "\n");
8506 phinodes_print_statistics ();
8507 fprintf (stderr, "\n");
8510 else
8511 fprintf (stderr, "(No per-node statistics)\n");
8513 print_type_hash_statistics ();
8514 print_debug_expr_statistics ();
8515 print_value_expr_statistics ();
8516 lang_hooks.print_statistics ();
8519 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8521 /* Generate a crc32 of the low BYTES bytes of VALUE. */
8523 unsigned
8524 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
8526 /* This relies on the raw feedback's top 4 bits being zero. */
8527 #define FEEDBACK(X) ((X) * 0x04c11db7)
8528 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
8529 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
8530 static const unsigned syndromes[16] =
8532 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
8533 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
8534 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
8535 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
8537 #undef FEEDBACK
8538 #undef SYNDROME
8540 value <<= (32 - bytes * 8);
8541 for (unsigned ix = bytes * 2; ix--; value <<= 4)
8543 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
8545 chksum = (chksum << 4) ^ feedback;
8548 return chksum;
8551 /* Generate a crc32 of a string. */
8553 unsigned
8554 crc32_string (unsigned chksum, const char *string)
8557 chksum = crc32_byte (chksum, *string);
8558 while (*string++);
8559 return chksum;
8562 /* P is a string that will be used in a symbol. Mask out any characters
8563 that are not valid in that context. */
8565 void
8566 clean_symbol_name (char *p)
8568 for (; *p; p++)
8569 if (! (ISALNUM (*p)
8570 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8571 || *p == '$'
8572 #endif
8573 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
8574 || *p == '.'
8575 #endif
8577 *p = '_';
8580 static GTY(()) unsigned anon_cnt = 0; /* Saved for PCH. */
8582 /* Create a unique anonymous identifier. The identifier is still a
8583 valid assembly label. */
8585 tree
8586 make_anon_name ()
8588 const char *fmt =
8589 #if !defined (NO_DOT_IN_LABEL)
8591 #elif !defined (NO_DOLLAR_IN_LABEL)
8593 #else
8595 #endif
8596 "_anon_%d";
8598 char buf[24];
8599 int len = snprintf (buf, sizeof (buf), fmt, anon_cnt++);
8600 gcc_checking_assert (len < int (sizeof (buf)));
8602 tree id = get_identifier_with_length (buf, len);
8603 IDENTIFIER_ANON_P (id) = true;
8605 return id;
8608 /* Generate a name for a special-purpose function.
8609 The generated name may need to be unique across the whole link.
8610 Changes to this function may also require corresponding changes to
8611 xstrdup_mask_random.
8612 TYPE is some string to identify the purpose of this function to the
8613 linker or collect2; it must start with an uppercase letter,
8614 one of:
8615 I - for constructors
8616 D - for destructors
8617 N - for C++ anonymous namespaces
8618 F - for DWARF unwind frame information. */
8620 tree
8621 get_file_function_name (const char *type)
8623 char *buf;
8624 const char *p;
8625 char *q;
8627 /* If we already have a name we know to be unique, just use that. */
8628 if (first_global_object_name)
8629 p = q = ASTRDUP (first_global_object_name);
8630 /* If the target is handling the constructors/destructors, they
8631 will be local to this file and the name is only necessary for
8632 debugging purposes.
8633 We also assign sub_I and sub_D sufixes to constructors called from
8634 the global static constructors. These are always local. */
8635 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
8636 || (startswith (type, "sub_")
8637 && (type[4] == 'I' || type[4] == 'D')))
8639 const char *file = main_input_filename;
8640 if (! file)
8641 file = LOCATION_FILE (input_location);
8642 /* Just use the file's basename, because the full pathname
8643 might be quite long. */
8644 p = q = ASTRDUP (lbasename (file));
8646 else
8648 /* Otherwise, the name must be unique across the entire link.
8649 We don't have anything that we know to be unique to this translation
8650 unit, so use what we do have and throw in some randomness. */
8651 unsigned len;
8652 const char *name = weak_global_object_name;
8653 const char *file = main_input_filename;
8655 if (! name)
8656 name = "";
8657 if (! file)
8658 file = LOCATION_FILE (input_location);
8660 len = strlen (file);
8661 q = (char *) alloca (9 + 19 + len + 1);
8662 memcpy (q, file, len + 1);
8664 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
8665 crc32_string (0, name), get_random_seed (false));
8667 p = q;
8670 clean_symbol_name (q);
8671 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
8672 + strlen (type));
8674 /* Set up the name of the file-level functions we may need.
8675 Use a global object (which is already required to be unique over
8676 the program) rather than the file name (which imposes extra
8677 constraints). */
8678 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
8680 return get_identifier (buf);
8683 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
8685 /* Complain that the tree code of NODE does not match the expected 0
8686 terminated list of trailing codes. The trailing code list can be
8687 empty, for a more vague error message. FILE, LINE, and FUNCTION
8688 are of the caller. */
8690 void
8691 tree_check_failed (const_tree node, const char *file,
8692 int line, const char *function, ...)
8694 va_list args;
8695 const char *buffer;
8696 unsigned length = 0;
8697 enum tree_code code;
8699 va_start (args, function);
8700 while ((code = (enum tree_code) va_arg (args, int)))
8701 length += 4 + strlen (get_tree_code_name (code));
8702 va_end (args);
8703 if (length)
8705 char *tmp;
8706 va_start (args, function);
8707 length += strlen ("expected ");
8708 buffer = tmp = (char *) alloca (length);
8709 length = 0;
8710 while ((code = (enum tree_code) va_arg (args, int)))
8712 const char *prefix = length ? " or " : "expected ";
8714 strcpy (tmp + length, prefix);
8715 length += strlen (prefix);
8716 strcpy (tmp + length, get_tree_code_name (code));
8717 length += strlen (get_tree_code_name (code));
8719 va_end (args);
8721 else
8722 buffer = "unexpected node";
8724 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8725 buffer, get_tree_code_name (TREE_CODE (node)),
8726 function, trim_filename (file), line);
8729 /* Complain that the tree code of NODE does match the expected 0
8730 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
8731 the caller. */
8733 void
8734 tree_not_check_failed (const_tree node, const char *file,
8735 int line, const char *function, ...)
8737 va_list args;
8738 char *buffer;
8739 unsigned length = 0;
8740 enum tree_code code;
8742 va_start (args, function);
8743 while ((code = (enum tree_code) va_arg (args, int)))
8744 length += 4 + strlen (get_tree_code_name (code));
8745 va_end (args);
8746 va_start (args, function);
8747 buffer = (char *) alloca (length);
8748 length = 0;
8749 while ((code = (enum tree_code) va_arg (args, int)))
8751 if (length)
8753 strcpy (buffer + length, " or ");
8754 length += 4;
8756 strcpy (buffer + length, get_tree_code_name (code));
8757 length += strlen (get_tree_code_name (code));
8759 va_end (args);
8761 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
8762 buffer, get_tree_code_name (TREE_CODE (node)),
8763 function, trim_filename (file), line);
8766 /* Similar to tree_check_failed, except that we check for a class of tree
8767 code, given in CL. */
8769 void
8770 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
8771 const char *file, int line, const char *function)
8773 internal_error
8774 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
8775 TREE_CODE_CLASS_STRING (cl),
8776 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
8777 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8780 /* Similar to tree_check_failed, except that instead of specifying a
8781 dozen codes, use the knowledge that they're all sequential. */
8783 void
8784 tree_range_check_failed (const_tree node, const char *file, int line,
8785 const char *function, enum tree_code c1,
8786 enum tree_code c2)
8788 char *buffer;
8789 unsigned length = 0;
8790 unsigned int c;
8792 for (c = c1; c <= c2; ++c)
8793 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
8795 length += strlen ("expected ");
8796 buffer = (char *) alloca (length);
8797 length = 0;
8799 for (c = c1; c <= c2; ++c)
8801 const char *prefix = length ? " or " : "expected ";
8803 strcpy (buffer + length, prefix);
8804 length += strlen (prefix);
8805 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
8806 length += strlen (get_tree_code_name ((enum tree_code) c));
8809 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8810 buffer, get_tree_code_name (TREE_CODE (node)),
8811 function, trim_filename (file), line);
8815 /* Similar to tree_check_failed, except that we check that a tree does
8816 not have the specified code, given in CL. */
8818 void
8819 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
8820 const char *file, int line, const char *function)
8822 internal_error
8823 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
8824 TREE_CODE_CLASS_STRING (cl),
8825 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
8826 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8830 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
8832 void
8833 omp_clause_check_failed (const_tree node, const char *file, int line,
8834 const char *function, enum omp_clause_code code)
8836 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
8837 "in %s, at %s:%d",
8838 omp_clause_code_name[code],
8839 get_tree_code_name (TREE_CODE (node)),
8840 function, trim_filename (file), line);
8844 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
8846 void
8847 omp_clause_range_check_failed (const_tree node, const char *file, int line,
8848 const char *function, enum omp_clause_code c1,
8849 enum omp_clause_code c2)
8851 char *buffer;
8852 unsigned length = 0;
8853 unsigned int c;
8855 for (c = c1; c <= c2; ++c)
8856 length += 4 + strlen (omp_clause_code_name[c]);
8858 length += strlen ("expected ");
8859 buffer = (char *) alloca (length);
8860 length = 0;
8862 for (c = c1; c <= c2; ++c)
8864 const char *prefix = length ? " or " : "expected ";
8866 strcpy (buffer + length, prefix);
8867 length += strlen (prefix);
8868 strcpy (buffer + length, omp_clause_code_name[c]);
8869 length += strlen (omp_clause_code_name[c]);
8872 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8873 buffer, omp_clause_code_name[TREE_CODE (node)],
8874 function, trim_filename (file), line);
8878 #undef DEFTREESTRUCT
8879 #define DEFTREESTRUCT(VAL, NAME) NAME,
8881 static const char *ts_enum_names[] = {
8882 #include "treestruct.def"
8884 #undef DEFTREESTRUCT
8886 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
8888 /* Similar to tree_class_check_failed, except that we check for
8889 whether CODE contains the tree structure identified by EN. */
8891 void
8892 tree_contains_struct_check_failed (const_tree node,
8893 const enum tree_node_structure_enum en,
8894 const char *file, int line,
8895 const char *function)
8897 internal_error
8898 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
8899 TS_ENUM_NAME (en),
8900 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8904 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
8905 (dynamically sized) vector. */
8907 void
8908 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
8909 const char *function)
8911 internal_error
8912 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
8913 "at %s:%d",
8914 idx + 1, len, function, trim_filename (file), line);
8917 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
8918 (dynamically sized) vector. */
8920 void
8921 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
8922 const char *function)
8924 internal_error
8925 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
8926 idx + 1, len, function, trim_filename (file), line);
8929 /* Similar to above, except that the check is for the bounds of the operand
8930 vector of an expression node EXP. */
8932 void
8933 tree_operand_check_failed (int idx, const_tree exp, const char *file,
8934 int line, const char *function)
8936 enum tree_code code = TREE_CODE (exp);
8937 internal_error
8938 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
8939 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
8940 function, trim_filename (file), line);
8943 /* Similar to above, except that the check is for the number of
8944 operands of an OMP_CLAUSE node. */
8946 void
8947 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
8948 int line, const char *function)
8950 internal_error
8951 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
8952 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
8953 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
8954 trim_filename (file), line);
8956 #endif /* ENABLE_TREE_CHECKING */
8958 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
8959 and mapped to the machine mode MODE. Initialize its fields and build
8960 the information necessary for debugging output. */
8962 static tree
8963 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
8965 tree t;
8966 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
8968 t = make_node (VECTOR_TYPE);
8969 TREE_TYPE (t) = mv_innertype;
8970 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
8971 SET_TYPE_MODE (t, mode);
8973 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
8974 SET_TYPE_STRUCTURAL_EQUALITY (t);
8975 else if ((TYPE_CANONICAL (mv_innertype) != innertype
8976 || mode != VOIDmode)
8977 && !VECTOR_BOOLEAN_TYPE_P (t))
8978 TYPE_CANONICAL (t)
8979 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
8981 layout_type (t);
8983 hashval_t hash = type_hash_canon_hash (t);
8984 t = type_hash_canon (hash, t);
8986 /* We have built a main variant, based on the main variant of the
8987 inner type. Use it to build the variant we return. */
8988 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
8989 && TREE_TYPE (t) != innertype)
8990 return build_type_attribute_qual_variant (t,
8991 TYPE_ATTRIBUTES (innertype),
8992 TYPE_QUALS (innertype));
8994 return t;
8997 static tree
8998 make_or_reuse_type (unsigned size, int unsignedp)
9000 int i;
9002 if (size == INT_TYPE_SIZE)
9003 return unsignedp ? unsigned_type_node : integer_type_node;
9004 if (size == CHAR_TYPE_SIZE)
9005 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9006 if (size == SHORT_TYPE_SIZE)
9007 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9008 if (size == LONG_TYPE_SIZE)
9009 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9010 if (size == LONG_LONG_TYPE_SIZE)
9011 return (unsignedp ? long_long_unsigned_type_node
9012 : long_long_integer_type_node);
9014 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9015 if (size == int_n_data[i].bitsize
9016 && int_n_enabled_p[i])
9017 return (unsignedp ? int_n_trees[i].unsigned_type
9018 : int_n_trees[i].signed_type);
9020 if (unsignedp)
9021 return make_unsigned_type (size);
9022 else
9023 return make_signed_type (size);
9026 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9028 static tree
9029 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9031 if (satp)
9033 if (size == SHORT_FRACT_TYPE_SIZE)
9034 return unsignedp ? sat_unsigned_short_fract_type_node
9035 : sat_short_fract_type_node;
9036 if (size == FRACT_TYPE_SIZE)
9037 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9038 if (size == LONG_FRACT_TYPE_SIZE)
9039 return unsignedp ? sat_unsigned_long_fract_type_node
9040 : sat_long_fract_type_node;
9041 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9042 return unsignedp ? sat_unsigned_long_long_fract_type_node
9043 : sat_long_long_fract_type_node;
9045 else
9047 if (size == SHORT_FRACT_TYPE_SIZE)
9048 return unsignedp ? unsigned_short_fract_type_node
9049 : short_fract_type_node;
9050 if (size == FRACT_TYPE_SIZE)
9051 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9052 if (size == LONG_FRACT_TYPE_SIZE)
9053 return unsignedp ? unsigned_long_fract_type_node
9054 : long_fract_type_node;
9055 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9056 return unsignedp ? unsigned_long_long_fract_type_node
9057 : long_long_fract_type_node;
9060 return make_fract_type (size, unsignedp, satp);
9063 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9065 static tree
9066 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9068 if (satp)
9070 if (size == SHORT_ACCUM_TYPE_SIZE)
9071 return unsignedp ? sat_unsigned_short_accum_type_node
9072 : sat_short_accum_type_node;
9073 if (size == ACCUM_TYPE_SIZE)
9074 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9075 if (size == LONG_ACCUM_TYPE_SIZE)
9076 return unsignedp ? sat_unsigned_long_accum_type_node
9077 : sat_long_accum_type_node;
9078 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9079 return unsignedp ? sat_unsigned_long_long_accum_type_node
9080 : sat_long_long_accum_type_node;
9082 else
9084 if (size == SHORT_ACCUM_TYPE_SIZE)
9085 return unsignedp ? unsigned_short_accum_type_node
9086 : short_accum_type_node;
9087 if (size == ACCUM_TYPE_SIZE)
9088 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9089 if (size == LONG_ACCUM_TYPE_SIZE)
9090 return unsignedp ? unsigned_long_accum_type_node
9091 : long_accum_type_node;
9092 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9093 return unsignedp ? unsigned_long_long_accum_type_node
9094 : long_long_accum_type_node;
9097 return make_accum_type (size, unsignedp, satp);
9101 /* Create an atomic variant node for TYPE. This routine is called
9102 during initialization of data types to create the 5 basic atomic
9103 types. The generic build_variant_type function requires these to
9104 already be set up in order to function properly, so cannot be
9105 called from there. If ALIGN is non-zero, then ensure alignment is
9106 overridden to this value. */
9108 static tree
9109 build_atomic_base (tree type, unsigned int align)
9111 tree t;
9113 /* Make sure its not already registered. */
9114 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9115 return t;
9117 t = build_variant_type_copy (type);
9118 set_type_quals (t, TYPE_QUAL_ATOMIC);
9120 if (align)
9121 SET_TYPE_ALIGN (t, align);
9123 return t;
9126 /* Information about the _FloatN and _FloatNx types. This must be in
9127 the same order as the corresponding TI_* enum values. */
9128 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
9130 { 16, false },
9131 { 32, false },
9132 { 64, false },
9133 { 128, false },
9134 { 32, true },
9135 { 64, true },
9136 { 128, true },
9140 /* Create nodes for all integer types (and error_mark_node) using the sizes
9141 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9143 void
9144 build_common_tree_nodes (bool signed_char)
9146 int i;
9148 error_mark_node = make_node (ERROR_MARK);
9149 TREE_TYPE (error_mark_node) = error_mark_node;
9151 initialize_sizetypes ();
9153 /* Define both `signed char' and `unsigned char'. */
9154 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9155 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9156 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9157 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9159 /* Define `char', which is like either `signed char' or `unsigned char'
9160 but not the same as either. */
9161 char_type_node
9162 = (signed_char
9163 ? make_signed_type (CHAR_TYPE_SIZE)
9164 : make_unsigned_type (CHAR_TYPE_SIZE));
9165 TYPE_STRING_FLAG (char_type_node) = 1;
9167 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9168 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9169 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9170 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9171 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9172 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9173 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9174 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9176 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9178 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9179 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9181 if (int_n_enabled_p[i])
9183 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9184 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9188 /* Define a boolean type. This type only represents boolean values but
9189 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9190 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9191 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9192 TYPE_PRECISION (boolean_type_node) = 1;
9193 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9195 /* Define what type to use for size_t. */
9196 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9197 size_type_node = unsigned_type_node;
9198 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9199 size_type_node = long_unsigned_type_node;
9200 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9201 size_type_node = long_long_unsigned_type_node;
9202 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9203 size_type_node = short_unsigned_type_node;
9204 else
9206 int i;
9208 size_type_node = NULL_TREE;
9209 for (i = 0; i < NUM_INT_N_ENTS; i++)
9210 if (int_n_enabled_p[i])
9212 char name[50], altname[50];
9213 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9214 sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
9216 if (strcmp (name, SIZE_TYPE) == 0
9217 || strcmp (altname, SIZE_TYPE) == 0)
9219 size_type_node = int_n_trees[i].unsigned_type;
9222 if (size_type_node == NULL_TREE)
9223 gcc_unreachable ();
9226 /* Define what type to use for ptrdiff_t. */
9227 if (strcmp (PTRDIFF_TYPE, "int") == 0)
9228 ptrdiff_type_node = integer_type_node;
9229 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
9230 ptrdiff_type_node = long_integer_type_node;
9231 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
9232 ptrdiff_type_node = long_long_integer_type_node;
9233 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
9234 ptrdiff_type_node = short_integer_type_node;
9235 else
9237 ptrdiff_type_node = NULL_TREE;
9238 for (int i = 0; i < NUM_INT_N_ENTS; i++)
9239 if (int_n_enabled_p[i])
9241 char name[50], altname[50];
9242 sprintf (name, "__int%d", int_n_data[i].bitsize);
9243 sprintf (altname, "__int%d__", int_n_data[i].bitsize);
9245 if (strcmp (name, PTRDIFF_TYPE) == 0
9246 || strcmp (altname, PTRDIFF_TYPE) == 0)
9247 ptrdiff_type_node = int_n_trees[i].signed_type;
9249 if (ptrdiff_type_node == NULL_TREE)
9250 gcc_unreachable ();
9253 /* Fill in the rest of the sized types. Reuse existing type nodes
9254 when possible. */
9255 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9256 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9257 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9258 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9259 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9261 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9262 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9263 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9264 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9265 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9267 /* Don't call build_qualified type for atomics. That routine does
9268 special processing for atomics, and until they are initialized
9269 it's better not to make that call.
9271 Check to see if there is a target override for atomic types. */
9273 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9274 targetm.atomic_align_for_mode (QImode));
9275 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9276 targetm.atomic_align_for_mode (HImode));
9277 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9278 targetm.atomic_align_for_mode (SImode));
9279 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9280 targetm.atomic_align_for_mode (DImode));
9281 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9282 targetm.atomic_align_for_mode (TImode));
9284 access_public_node = get_identifier ("public");
9285 access_protected_node = get_identifier ("protected");
9286 access_private_node = get_identifier ("private");
9288 /* Define these next since types below may used them. */
9289 integer_zero_node = build_int_cst (integer_type_node, 0);
9290 integer_one_node = build_int_cst (integer_type_node, 1);
9291 integer_three_node = build_int_cst (integer_type_node, 3);
9292 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9294 size_zero_node = size_int (0);
9295 size_one_node = size_int (1);
9296 bitsize_zero_node = bitsize_int (0);
9297 bitsize_one_node = bitsize_int (1);
9298 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9300 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9301 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9303 void_type_node = make_node (VOID_TYPE);
9304 layout_type (void_type_node);
9306 /* We are not going to have real types in C with less than byte alignment,
9307 so we might as well not have any types that claim to have it. */
9308 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
9309 TYPE_USER_ALIGN (void_type_node) = 0;
9311 void_node = make_node (VOID_CST);
9312 TREE_TYPE (void_node) = void_type_node;
9314 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9315 layout_type (TREE_TYPE (null_pointer_node));
9317 ptr_type_node = build_pointer_type (void_type_node);
9318 const_ptr_type_node
9319 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9320 for (unsigned i = 0;
9321 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
9322 ++i)
9323 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
9325 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9327 float_type_node = make_node (REAL_TYPE);
9328 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9329 layout_type (float_type_node);
9331 double_type_node = make_node (REAL_TYPE);
9332 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9333 layout_type (double_type_node);
9335 long_double_type_node = make_node (REAL_TYPE);
9336 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9337 layout_type (long_double_type_node);
9339 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9341 int n = floatn_nx_types[i].n;
9342 bool extended = floatn_nx_types[i].extended;
9343 scalar_float_mode mode;
9344 if (!targetm.floatn_mode (n, extended).exists (&mode))
9345 continue;
9346 int precision = GET_MODE_PRECISION (mode);
9347 /* Work around the rs6000 KFmode having precision 113 not
9348 128. */
9349 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
9350 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
9351 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
9352 if (!extended)
9353 gcc_assert (min_precision == n);
9354 if (precision < min_precision)
9355 precision = min_precision;
9356 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
9357 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
9358 layout_type (FLOATN_NX_TYPE_NODE (i));
9359 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
9362 float_ptr_type_node = build_pointer_type (float_type_node);
9363 double_ptr_type_node = build_pointer_type (double_type_node);
9364 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9365 integer_ptr_type_node = build_pointer_type (integer_type_node);
9367 /* Fixed size integer types. */
9368 uint16_type_node = make_or_reuse_type (16, 1);
9369 uint32_type_node = make_or_reuse_type (32, 1);
9370 uint64_type_node = make_or_reuse_type (64, 1);
9371 if (targetm.scalar_mode_supported_p (TImode))
9372 uint128_type_node = make_or_reuse_type (128, 1);
9374 /* Decimal float types. */
9375 if (targetm.decimal_float_supported_p ())
9377 dfloat32_type_node = make_node (REAL_TYPE);
9378 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9379 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9380 layout_type (dfloat32_type_node);
9382 dfloat64_type_node = make_node (REAL_TYPE);
9383 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9384 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9385 layout_type (dfloat64_type_node);
9387 dfloat128_type_node = make_node (REAL_TYPE);
9388 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9389 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9390 layout_type (dfloat128_type_node);
9393 complex_integer_type_node = build_complex_type (integer_type_node, true);
9394 complex_float_type_node = build_complex_type (float_type_node, true);
9395 complex_double_type_node = build_complex_type (double_type_node, true);
9396 complex_long_double_type_node = build_complex_type (long_double_type_node,
9397 true);
9399 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9401 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
9402 COMPLEX_FLOATN_NX_TYPE_NODE (i)
9403 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
9406 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9407 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9408 sat_ ## KIND ## _type_node = \
9409 make_sat_signed_ ## KIND ## _type (SIZE); \
9410 sat_unsigned_ ## KIND ## _type_node = \
9411 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9412 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9413 unsigned_ ## KIND ## _type_node = \
9414 make_unsigned_ ## KIND ## _type (SIZE);
9416 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9417 sat_ ## WIDTH ## KIND ## _type_node = \
9418 make_sat_signed_ ## KIND ## _type (SIZE); \
9419 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9420 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9421 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9422 unsigned_ ## WIDTH ## KIND ## _type_node = \
9423 make_unsigned_ ## KIND ## _type (SIZE);
9425 /* Make fixed-point type nodes based on four different widths. */
9426 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9427 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9428 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9429 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9430 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9432 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9433 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9434 NAME ## _type_node = \
9435 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9436 u ## NAME ## _type_node = \
9437 make_or_reuse_unsigned_ ## KIND ## _type \
9438 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9439 sat_ ## NAME ## _type_node = \
9440 make_or_reuse_sat_signed_ ## KIND ## _type \
9441 (GET_MODE_BITSIZE (MODE ## mode)); \
9442 sat_u ## NAME ## _type_node = \
9443 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9444 (GET_MODE_BITSIZE (U ## MODE ## mode));
9446 /* Fixed-point type and mode nodes. */
9447 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9448 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9449 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9450 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9451 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9452 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9453 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9454 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9455 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9456 MAKE_FIXED_MODE_NODE (accum, da, DA)
9457 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9460 tree t = targetm.build_builtin_va_list ();
9462 /* Many back-ends define record types without setting TYPE_NAME.
9463 If we copied the record type here, we'd keep the original
9464 record type without a name. This breaks name mangling. So,
9465 don't copy record types and let c_common_nodes_and_builtins()
9466 declare the type to be __builtin_va_list. */
9467 if (TREE_CODE (t) != RECORD_TYPE)
9468 t = build_variant_type_copy (t);
9470 va_list_type_node = t;
9473 /* SCEV analyzer global shared trees. */
9474 chrec_dont_know = make_node (SCEV_NOT_KNOWN);
9475 TREE_TYPE (chrec_dont_know) = void_type_node;
9476 chrec_known = make_node (SCEV_KNOWN);
9477 TREE_TYPE (chrec_known) = void_type_node;
9480 /* Modify DECL for given flags.
9481 TM_PURE attribute is set only on types, so the function will modify
9482 DECL's type when ECF_TM_PURE is used. */
9484 void
9485 set_call_expr_flags (tree decl, int flags)
9487 if (flags & ECF_NOTHROW)
9488 TREE_NOTHROW (decl) = 1;
9489 if (flags & ECF_CONST)
9490 TREE_READONLY (decl) = 1;
9491 if (flags & ECF_PURE)
9492 DECL_PURE_P (decl) = 1;
9493 if (flags & ECF_LOOPING_CONST_OR_PURE)
9494 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9495 if (flags & ECF_NOVOPS)
9496 DECL_IS_NOVOPS (decl) = 1;
9497 if (flags & ECF_NORETURN)
9498 TREE_THIS_VOLATILE (decl) = 1;
9499 if (flags & ECF_MALLOC)
9500 DECL_IS_MALLOC (decl) = 1;
9501 if (flags & ECF_RETURNS_TWICE)
9502 DECL_IS_RETURNS_TWICE (decl) = 1;
9503 if (flags & ECF_LEAF)
9504 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9505 NULL, DECL_ATTRIBUTES (decl));
9506 if (flags & ECF_COLD)
9507 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
9508 NULL, DECL_ATTRIBUTES (decl));
9509 if (flags & ECF_RET1)
9510 DECL_ATTRIBUTES (decl)
9511 = tree_cons (get_identifier ("fn spec"),
9512 build_tree_list (NULL_TREE, build_string (2, "1 ")),
9513 DECL_ATTRIBUTES (decl));
9514 if ((flags & ECF_TM_PURE) && flag_tm)
9515 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9516 /* Looping const or pure is implied by noreturn.
9517 There is currently no way to declare looping const or looping pure alone. */
9518 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9519 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9523 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9525 static void
9526 local_define_builtin (const char *name, tree type, enum built_in_function code,
9527 const char *library_name, int ecf_flags)
9529 tree decl;
9531 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9532 library_name, NULL_TREE);
9533 set_call_expr_flags (decl, ecf_flags);
9535 set_builtin_decl (code, decl, true);
9538 /* Call this function after instantiating all builtins that the language
9539 front end cares about. This will build the rest of the builtins
9540 and internal functions that are relied upon by the tree optimizers and
9541 the middle-end. */
9543 void
9544 build_common_builtin_nodes (void)
9546 tree tmp, ftype;
9547 int ecf_flags;
9549 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_PADDING))
9551 ftype = build_function_type_list (void_type_node,
9552 ptr_type_node,
9553 ptr_type_node,
9554 integer_type_node,
9555 NULL_TREE);
9556 local_define_builtin ("__builtin_clear_padding", ftype,
9557 BUILT_IN_CLEAR_PADDING,
9558 "__builtin_clear_padding",
9559 ECF_LEAF | ECF_NOTHROW);
9562 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
9563 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
9565 ftype = build_function_type (void_type_node, void_list_node);
9566 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9567 local_define_builtin ("__builtin_unreachable", ftype,
9568 BUILT_IN_UNREACHABLE,
9569 "__builtin_unreachable",
9570 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9571 | ECF_CONST | ECF_COLD);
9572 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
9573 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
9574 "abort",
9575 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
9578 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9579 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9581 ftype = build_function_type_list (ptr_type_node,
9582 ptr_type_node, const_ptr_type_node,
9583 size_type_node, NULL_TREE);
9585 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9586 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9587 "memcpy", ECF_NOTHROW | ECF_LEAF);
9588 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9589 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9590 "memmove", ECF_NOTHROW | ECF_LEAF);
9593 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9595 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9596 const_ptr_type_node, size_type_node,
9597 NULL_TREE);
9598 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9599 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9602 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9604 ftype = build_function_type_list (ptr_type_node,
9605 ptr_type_node, integer_type_node,
9606 size_type_node, NULL_TREE);
9607 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9608 "memset", ECF_NOTHROW | ECF_LEAF);
9611 /* If we're checking the stack, `alloca' can throw. */
9612 const int alloca_flags
9613 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
9615 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9617 ftype = build_function_type_list (ptr_type_node,
9618 size_type_node, NULL_TREE);
9619 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9620 "alloca", alloca_flags);
9623 ftype = build_function_type_list (ptr_type_node, size_type_node,
9624 size_type_node, NULL_TREE);
9625 local_define_builtin ("__builtin_alloca_with_align", ftype,
9626 BUILT_IN_ALLOCA_WITH_ALIGN,
9627 "__builtin_alloca_with_align",
9628 alloca_flags);
9630 ftype = build_function_type_list (ptr_type_node, size_type_node,
9631 size_type_node, size_type_node, NULL_TREE);
9632 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
9633 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
9634 "__builtin_alloca_with_align_and_max",
9635 alloca_flags);
9637 ftype = build_function_type_list (void_type_node,
9638 ptr_type_node, ptr_type_node,
9639 ptr_type_node, NULL_TREE);
9640 local_define_builtin ("__builtin_init_trampoline", ftype,
9641 BUILT_IN_INIT_TRAMPOLINE,
9642 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9643 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9644 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9645 "__builtin_init_heap_trampoline",
9646 ECF_NOTHROW | ECF_LEAF);
9647 local_define_builtin ("__builtin_init_descriptor", ftype,
9648 BUILT_IN_INIT_DESCRIPTOR,
9649 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
9651 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9652 local_define_builtin ("__builtin_adjust_trampoline", ftype,
9653 BUILT_IN_ADJUST_TRAMPOLINE,
9654 "__builtin_adjust_trampoline",
9655 ECF_CONST | ECF_NOTHROW);
9656 local_define_builtin ("__builtin_adjust_descriptor", ftype,
9657 BUILT_IN_ADJUST_DESCRIPTOR,
9658 "__builtin_adjust_descriptor",
9659 ECF_CONST | ECF_NOTHROW);
9661 ftype = build_function_type_list (void_type_node,
9662 ptr_type_node, ptr_type_node, NULL_TREE);
9663 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_CACHE))
9664 local_define_builtin ("__builtin___clear_cache", ftype,
9665 BUILT_IN_CLEAR_CACHE,
9666 "__clear_cache",
9667 ECF_NOTHROW);
9669 local_define_builtin ("__builtin_nonlocal_goto", ftype,
9670 BUILT_IN_NONLOCAL_GOTO,
9671 "__builtin_nonlocal_goto",
9672 ECF_NORETURN | ECF_NOTHROW);
9674 ftype = build_function_type_list (void_type_node,
9675 ptr_type_node, ptr_type_node, NULL_TREE);
9676 local_define_builtin ("__builtin_setjmp_setup", ftype,
9677 BUILT_IN_SETJMP_SETUP,
9678 "__builtin_setjmp_setup", ECF_NOTHROW);
9680 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9681 local_define_builtin ("__builtin_setjmp_receiver", ftype,
9682 BUILT_IN_SETJMP_RECEIVER,
9683 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
9685 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
9686 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
9687 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
9689 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9690 local_define_builtin ("__builtin_stack_restore", ftype,
9691 BUILT_IN_STACK_RESTORE,
9692 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
9694 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9695 const_ptr_type_node, size_type_node,
9696 NULL_TREE);
9697 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
9698 "__builtin_memcmp_eq",
9699 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9701 local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
9702 "__builtin_strncmp_eq",
9703 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9705 local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
9706 "__builtin_strcmp_eq",
9707 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9709 /* If there's a possibility that we might use the ARM EABI, build the
9710 alternate __cxa_end_cleanup node used to resume from C++. */
9711 if (targetm.arm_eabi_unwinder)
9713 ftype = build_function_type_list (void_type_node, NULL_TREE);
9714 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
9715 BUILT_IN_CXA_END_CLEANUP,
9716 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
9719 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9720 local_define_builtin ("__builtin_unwind_resume", ftype,
9721 BUILT_IN_UNWIND_RESUME,
9722 ((targetm_common.except_unwind_info (&global_options)
9723 == UI_SJLJ)
9724 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
9725 ECF_NORETURN);
9727 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
9729 ftype = build_function_type_list (ptr_type_node, integer_type_node,
9730 NULL_TREE);
9731 local_define_builtin ("__builtin_return_address", ftype,
9732 BUILT_IN_RETURN_ADDRESS,
9733 "__builtin_return_address",
9734 ECF_NOTHROW);
9737 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
9738 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9740 ftype = build_function_type_list (void_type_node, ptr_type_node,
9741 ptr_type_node, NULL_TREE);
9742 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
9743 local_define_builtin ("__cyg_profile_func_enter", ftype,
9744 BUILT_IN_PROFILE_FUNC_ENTER,
9745 "__cyg_profile_func_enter", 0);
9746 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9747 local_define_builtin ("__cyg_profile_func_exit", ftype,
9748 BUILT_IN_PROFILE_FUNC_EXIT,
9749 "__cyg_profile_func_exit", 0);
9752 /* The exception object and filter values from the runtime. The argument
9753 must be zero before exception lowering, i.e. from the front end. After
9754 exception lowering, it will be the region number for the exception
9755 landing pad. These functions are PURE instead of CONST to prevent
9756 them from being hoisted past the exception edge that will initialize
9757 its value in the landing pad. */
9758 ftype = build_function_type_list (ptr_type_node,
9759 integer_type_node, NULL_TREE);
9760 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
9761 /* Only use TM_PURE if we have TM language support. */
9762 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
9763 ecf_flags |= ECF_TM_PURE;
9764 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
9765 "__builtin_eh_pointer", ecf_flags);
9767 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
9768 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
9769 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
9770 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9772 ftype = build_function_type_list (void_type_node,
9773 integer_type_node, integer_type_node,
9774 NULL_TREE);
9775 local_define_builtin ("__builtin_eh_copy_values", ftype,
9776 BUILT_IN_EH_COPY_VALUES,
9777 "__builtin_eh_copy_values", ECF_NOTHROW);
9779 /* Complex multiplication and division. These are handled as builtins
9780 rather than optabs because emit_library_call_value doesn't support
9781 complex. Further, we can do slightly better with folding these
9782 beasties if the real and complex parts of the arguments are separate. */
9784 int mode;
9786 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
9788 char mode_name_buf[4], *q;
9789 const char *p;
9790 enum built_in_function mcode, dcode;
9791 tree type, inner_type;
9792 const char *prefix = "__";
9794 if (targetm.libfunc_gnu_prefix)
9795 prefix = "__gnu_";
9797 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
9798 if (type == NULL)
9799 continue;
9800 inner_type = TREE_TYPE (type);
9802 ftype = build_function_type_list (type, inner_type, inner_type,
9803 inner_type, inner_type, NULL_TREE);
9805 mcode = ((enum built_in_function)
9806 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
9807 dcode = ((enum built_in_function)
9808 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
9810 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
9811 *q = TOLOWER (*p);
9812 *q = '\0';
9814 /* For -ftrapping-math these should throw from a former
9815 -fnon-call-exception stmt. */
9816 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
9817 NULL);
9818 local_define_builtin (built_in_names[mcode], ftype, mcode,
9819 built_in_names[mcode],
9820 ECF_CONST | ECF_LEAF);
9822 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
9823 NULL);
9824 local_define_builtin (built_in_names[dcode], ftype, dcode,
9825 built_in_names[dcode],
9826 ECF_CONST | ECF_LEAF);
9830 init_internal_fns ();
9833 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
9834 better way.
9836 If we requested a pointer to a vector, build up the pointers that
9837 we stripped off while looking for the inner type. Similarly for
9838 return values from functions.
9840 The argument TYPE is the top of the chain, and BOTTOM is the
9841 new type which we will point to. */
9843 tree
9844 reconstruct_complex_type (tree type, tree bottom)
9846 tree inner, outer;
9848 if (TREE_CODE (type) == POINTER_TYPE)
9850 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9851 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
9852 TYPE_REF_CAN_ALIAS_ALL (type));
9854 else if (TREE_CODE (type) == REFERENCE_TYPE)
9856 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9857 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
9858 TYPE_REF_CAN_ALIAS_ALL (type));
9860 else if (TREE_CODE (type) == ARRAY_TYPE)
9862 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9863 outer = build_array_type (inner, TYPE_DOMAIN (type));
9865 else if (TREE_CODE (type) == FUNCTION_TYPE)
9867 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9868 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
9870 else if (TREE_CODE (type) == METHOD_TYPE)
9872 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9873 /* The build_method_type_directly() routine prepends 'this' to argument list,
9874 so we must compensate by getting rid of it. */
9875 outer
9876 = build_method_type_directly
9877 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
9878 inner,
9879 TREE_CHAIN (TYPE_ARG_TYPES (type)));
9881 else if (TREE_CODE (type) == OFFSET_TYPE)
9883 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
9884 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
9886 else
9887 return bottom;
9889 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
9890 TYPE_QUALS (type));
9893 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
9894 the inner type. */
9895 tree
9896 build_vector_type_for_mode (tree innertype, machine_mode mode)
9898 poly_int64 nunits;
9899 unsigned int bitsize;
9901 switch (GET_MODE_CLASS (mode))
9903 case MODE_VECTOR_BOOL:
9904 case MODE_VECTOR_INT:
9905 case MODE_VECTOR_FLOAT:
9906 case MODE_VECTOR_FRACT:
9907 case MODE_VECTOR_UFRACT:
9908 case MODE_VECTOR_ACCUM:
9909 case MODE_VECTOR_UACCUM:
9910 nunits = GET_MODE_NUNITS (mode);
9911 break;
9913 case MODE_INT:
9914 /* Check that there are no leftover bits. */
9915 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
9916 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
9917 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
9918 break;
9920 default:
9921 gcc_unreachable ();
9924 return make_vector_type (innertype, nunits, mode);
9927 /* Similarly, but takes the inner type and number of units, which must be
9928 a power of two. */
9930 tree
9931 build_vector_type (tree innertype, poly_int64 nunits)
9933 return make_vector_type (innertype, nunits, VOIDmode);
9936 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE. */
9938 tree
9939 build_truth_vector_type_for_mode (poly_uint64 nunits, machine_mode mask_mode)
9941 gcc_assert (mask_mode != BLKmode);
9943 unsigned HOST_WIDE_INT esize;
9944 if (VECTOR_MODE_P (mask_mode))
9946 poly_uint64 vsize = GET_MODE_BITSIZE (mask_mode);
9947 esize = vector_element_size (vsize, nunits);
9949 else
9950 esize = 1;
9952 tree bool_type = build_nonstandard_boolean_type (esize);
9954 return make_vector_type (bool_type, nunits, mask_mode);
9957 /* Build a vector type that holds one boolean result for each element of
9958 vector type VECTYPE. The public interface for this operation is
9959 truth_type_for. */
9961 static tree
9962 build_truth_vector_type_for (tree vectype)
9964 machine_mode vector_mode = TYPE_MODE (vectype);
9965 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
9967 machine_mode mask_mode;
9968 if (VECTOR_MODE_P (vector_mode)
9969 && targetm.vectorize.get_mask_mode (vector_mode).exists (&mask_mode))
9970 return build_truth_vector_type_for_mode (nunits, mask_mode);
9972 poly_uint64 vsize = tree_to_poly_uint64 (TYPE_SIZE (vectype));
9973 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
9974 tree bool_type = build_nonstandard_boolean_type (esize);
9976 return make_vector_type (bool_type, nunits, VOIDmode);
9979 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
9980 set. */
9982 tree
9983 build_opaque_vector_type (tree innertype, poly_int64 nunits)
9985 tree t = make_vector_type (innertype, nunits, VOIDmode);
9986 tree cand;
9987 /* We always build the non-opaque variant before the opaque one,
9988 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
9989 cand = TYPE_NEXT_VARIANT (t);
9990 if (cand
9991 && TYPE_VECTOR_OPAQUE (cand)
9992 && check_qualified_type (cand, t, TYPE_QUALS (t)))
9993 return cand;
9994 /* Othewise build a variant type and make sure to queue it after
9995 the non-opaque type. */
9996 cand = build_distinct_type_copy (t);
9997 TYPE_VECTOR_OPAQUE (cand) = true;
9998 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
9999 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10000 TYPE_NEXT_VARIANT (t) = cand;
10001 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10002 return cand;
10005 /* Return the value of element I of VECTOR_CST T as a wide_int. */
10007 static poly_wide_int
10008 vector_cst_int_elt (const_tree t, unsigned int i)
10010 /* First handle elements that are directly encoded. */
10011 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10012 if (i < encoded_nelts)
10013 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, i));
10015 /* Identify the pattern that contains element I and work out the index of
10016 the last encoded element for that pattern. */
10017 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10018 unsigned int pattern = i % npatterns;
10019 unsigned int count = i / npatterns;
10020 unsigned int final_i = encoded_nelts - npatterns + pattern;
10022 /* If there are no steps, the final encoded value is the right one. */
10023 if (!VECTOR_CST_STEPPED_P (t))
10024 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
10026 /* Otherwise work out the value from the last two encoded elements. */
10027 tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
10028 tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
10029 poly_wide_int diff = wi::to_poly_wide (v2) - wi::to_poly_wide (v1);
10030 return wi::to_poly_wide (v2) + (count - 2) * diff;
10033 /* Return the value of element I of VECTOR_CST T. */
10035 tree
10036 vector_cst_elt (const_tree t, unsigned int i)
10038 /* First handle elements that are directly encoded. */
10039 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10040 if (i < encoded_nelts)
10041 return VECTOR_CST_ENCODED_ELT (t, i);
10043 /* If there are no steps, the final encoded value is the right one. */
10044 if (!VECTOR_CST_STEPPED_P (t))
10046 /* Identify the pattern that contains element I and work out the index of
10047 the last encoded element for that pattern. */
10048 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10049 unsigned int pattern = i % npatterns;
10050 unsigned int final_i = encoded_nelts - npatterns + pattern;
10051 return VECTOR_CST_ENCODED_ELT (t, final_i);
10054 /* Otherwise work out the value from the last two encoded elements. */
10055 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
10056 vector_cst_int_elt (t, i));
10059 /* Given an initializer INIT, return TRUE if INIT is zero or some
10060 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
10061 null, set *NONZERO if and only if INIT is known not to be all
10062 zeros. The combination of return value of false and *NONZERO
10063 false implies that INIT may but need not be all zeros. Other
10064 combinations indicate definitive answers. */
10066 bool
10067 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
10069 bool dummy;
10070 if (!nonzero)
10071 nonzero = &dummy;
10073 /* Conservatively clear NONZERO and set it only if INIT is definitely
10074 not all zero. */
10075 *nonzero = false;
10077 STRIP_NOPS (init);
10079 unsigned HOST_WIDE_INT off = 0;
10081 switch (TREE_CODE (init))
10083 case INTEGER_CST:
10084 if (integer_zerop (init))
10085 return true;
10087 *nonzero = true;
10088 return false;
10090 case REAL_CST:
10091 /* ??? Note that this is not correct for C4X float formats. There,
10092 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10093 negative exponent. */
10094 if (real_zerop (init)
10095 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
10096 return true;
10098 *nonzero = true;
10099 return false;
10101 case FIXED_CST:
10102 if (fixed_zerop (init))
10103 return true;
10105 *nonzero = true;
10106 return false;
10108 case COMPLEX_CST:
10109 if (integer_zerop (init)
10110 || (real_zerop (init)
10111 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10112 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
10113 return true;
10115 *nonzero = true;
10116 return false;
10118 case VECTOR_CST:
10119 if (VECTOR_CST_NPATTERNS (init) == 1
10120 && VECTOR_CST_DUPLICATE_P (init)
10121 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
10122 return true;
10124 *nonzero = true;
10125 return false;
10127 case CONSTRUCTOR:
10129 if (TREE_CLOBBER_P (init))
10130 return false;
10132 unsigned HOST_WIDE_INT idx;
10133 tree elt;
10135 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10136 if (!initializer_zerop (elt, nonzero))
10137 return false;
10139 return true;
10142 case MEM_REF:
10144 tree arg = TREE_OPERAND (init, 0);
10145 if (TREE_CODE (arg) != ADDR_EXPR)
10146 return false;
10147 tree offset = TREE_OPERAND (init, 1);
10148 if (TREE_CODE (offset) != INTEGER_CST
10149 || !tree_fits_uhwi_p (offset))
10150 return false;
10151 off = tree_to_uhwi (offset);
10152 if (INT_MAX < off)
10153 return false;
10154 arg = TREE_OPERAND (arg, 0);
10155 if (TREE_CODE (arg) != STRING_CST)
10156 return false;
10157 init = arg;
10159 /* Fall through. */
10161 case STRING_CST:
10163 gcc_assert (off <= INT_MAX);
10165 int i = off;
10166 int n = TREE_STRING_LENGTH (init);
10167 if (n <= i)
10168 return false;
10170 /* We need to loop through all elements to handle cases like
10171 "\0" and "\0foobar". */
10172 for (i = 0; i < n; ++i)
10173 if (TREE_STRING_POINTER (init)[i] != '\0')
10175 *nonzero = true;
10176 return false;
10179 return true;
10182 default:
10183 return false;
10187 /* Return true if EXPR is an initializer expression in which every element
10188 is a constant that is numerically equal to 0 or 1. The elements do not
10189 need to be equal to each other. */
10191 bool
10192 initializer_each_zero_or_onep (const_tree expr)
10194 STRIP_ANY_LOCATION_WRAPPER (expr);
10196 switch (TREE_CODE (expr))
10198 case INTEGER_CST:
10199 return integer_zerop (expr) || integer_onep (expr);
10201 case REAL_CST:
10202 return real_zerop (expr) || real_onep (expr);
10204 case VECTOR_CST:
10206 unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (expr);
10207 if (VECTOR_CST_STEPPED_P (expr)
10208 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)).is_constant (&nelts))
10209 return false;
10211 for (unsigned int i = 0; i < nelts; ++i)
10213 tree elt = vector_cst_elt (expr, i);
10214 if (!initializer_each_zero_or_onep (elt))
10215 return false;
10218 return true;
10221 default:
10222 return false;
10226 /* Check if vector VEC consists of all the equal elements and
10227 that the number of elements corresponds to the type of VEC.
10228 The function returns first element of the vector
10229 or NULL_TREE if the vector is not uniform. */
10230 tree
10231 uniform_vector_p (const_tree vec)
10233 tree first, t;
10234 unsigned HOST_WIDE_INT i, nelts;
10236 if (vec == NULL_TREE)
10237 return NULL_TREE;
10239 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10241 if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
10242 return TREE_OPERAND (vec, 0);
10244 else if (TREE_CODE (vec) == VECTOR_CST)
10246 if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
10247 return VECTOR_CST_ENCODED_ELT (vec, 0);
10248 return NULL_TREE;
10251 else if (TREE_CODE (vec) == CONSTRUCTOR
10252 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
10254 first = error_mark_node;
10256 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10258 if (i == 0)
10260 first = t;
10261 continue;
10263 if (!operand_equal_p (first, t, 0))
10264 return NULL_TREE;
10266 if (i != nelts)
10267 return NULL_TREE;
10269 return first;
10272 return NULL_TREE;
10275 /* If the argument is INTEGER_CST, return it. If the argument is vector
10276 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
10277 return NULL_TREE.
10278 Look through location wrappers. */
10280 tree
10281 uniform_integer_cst_p (tree t)
10283 STRIP_ANY_LOCATION_WRAPPER (t);
10285 if (TREE_CODE (t) == INTEGER_CST)
10286 return t;
10288 if (VECTOR_TYPE_P (TREE_TYPE (t)))
10290 t = uniform_vector_p (t);
10291 if (t && TREE_CODE (t) == INTEGER_CST)
10292 return t;
10295 return NULL_TREE;
10298 /* Checks to see if T is a constant or a constant vector and if each element E
10299 adheres to ~E + 1 == pow2 then return ~E otherwise NULL_TREE. */
10301 tree
10302 bitmask_inv_cst_vector_p (tree t)
10305 tree_code code = TREE_CODE (t);
10306 tree type = TREE_TYPE (t);
10308 if (!INTEGRAL_TYPE_P (type)
10309 && !VECTOR_INTEGER_TYPE_P (type))
10310 return NULL_TREE;
10312 unsigned HOST_WIDE_INT nelts = 1;
10313 tree cst;
10314 unsigned int idx = 0;
10315 bool uniform = uniform_integer_cst_p (t);
10316 tree newtype = unsigned_type_for (type);
10317 tree_vector_builder builder;
10318 if (code == INTEGER_CST)
10319 cst = t;
10320 else
10322 if (!VECTOR_CST_NELTS (t).is_constant (&nelts))
10323 return NULL_TREE;
10325 cst = vector_cst_elt (t, 0);
10326 builder.new_vector (newtype, nelts, 1);
10329 tree ty = unsigned_type_for (TREE_TYPE (cst));
10333 if (idx > 0)
10334 cst = vector_cst_elt (t, idx);
10335 wide_int icst = wi::to_wide (cst);
10336 wide_int inv = wi::bit_not (icst);
10337 icst = wi::add (1, inv);
10338 if (wi::popcount (icst) != 1)
10339 return NULL_TREE;
10341 tree newcst = wide_int_to_tree (ty, inv);
10343 if (uniform)
10344 return build_uniform_cst (newtype, newcst);
10346 builder.quick_push (newcst);
10348 while (++idx < nelts);
10350 return builder.build ();
10353 /* If VECTOR_CST T has a single nonzero element, return the index of that
10354 element, otherwise return -1. */
10357 single_nonzero_element (const_tree t)
10359 unsigned HOST_WIDE_INT nelts;
10360 unsigned int repeat_nelts;
10361 if (VECTOR_CST_NELTS (t).is_constant (&nelts))
10362 repeat_nelts = nelts;
10363 else if (VECTOR_CST_NELTS_PER_PATTERN (t) == 2)
10365 nelts = vector_cst_encoded_nelts (t);
10366 repeat_nelts = VECTOR_CST_NPATTERNS (t);
10368 else
10369 return -1;
10371 int res = -1;
10372 for (unsigned int i = 0; i < nelts; ++i)
10374 tree elt = vector_cst_elt (t, i);
10375 if (!integer_zerop (elt) && !real_zerop (elt))
10377 if (res >= 0 || i >= repeat_nelts)
10378 return -1;
10379 res = i;
10382 return res;
10385 /* Build an empty statement at location LOC. */
10387 tree
10388 build_empty_stmt (location_t loc)
10390 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10391 SET_EXPR_LOCATION (t, loc);
10392 return t;
10396 /* Build an OMP clause with code CODE. LOC is the location of the
10397 clause. */
10399 tree
10400 build_omp_clause (location_t loc, enum omp_clause_code code)
10402 tree t;
10403 int size, length;
10405 length = omp_clause_num_ops[code];
10406 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10408 record_node_allocation_statistics (OMP_CLAUSE, size);
10410 t = (tree) ggc_internal_alloc (size);
10411 memset (t, 0, size);
10412 TREE_SET_CODE (t, OMP_CLAUSE);
10413 OMP_CLAUSE_SET_CODE (t, code);
10414 OMP_CLAUSE_LOCATION (t) = loc;
10416 return t;
10419 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10420 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10421 Except for the CODE and operand count field, other storage for the
10422 object is initialized to zeros. */
10424 tree
10425 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
10427 tree t;
10428 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10430 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10431 gcc_assert (len >= 1);
10433 record_node_allocation_statistics (code, length);
10435 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10437 TREE_SET_CODE (t, code);
10439 /* Can't use TREE_OPERAND to store the length because if checking is
10440 enabled, it will try to check the length before we store it. :-P */
10441 t->exp.operands[0] = build_int_cst (sizetype, len);
10443 return t;
10446 /* Helper function for build_call_* functions; build a CALL_EXPR with
10447 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10448 the argument slots. */
10450 static tree
10451 build_call_1 (tree return_type, tree fn, int nargs)
10453 tree t;
10455 t = build_vl_exp (CALL_EXPR, nargs + 3);
10456 TREE_TYPE (t) = return_type;
10457 CALL_EXPR_FN (t) = fn;
10458 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10460 return t;
10463 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10464 FN and a null static chain slot. NARGS is the number of call arguments
10465 which are specified as "..." arguments. */
10467 tree
10468 build_call_nary (tree return_type, tree fn, int nargs, ...)
10470 tree ret;
10471 va_list args;
10472 va_start (args, nargs);
10473 ret = build_call_valist (return_type, fn, nargs, args);
10474 va_end (args);
10475 return ret;
10478 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10479 FN and a null static chain slot. NARGS is the number of call arguments
10480 which are specified as a va_list ARGS. */
10482 tree
10483 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10485 tree t;
10486 int i;
10488 t = build_call_1 (return_type, fn, nargs);
10489 for (i = 0; i < nargs; i++)
10490 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10491 process_call_operands (t);
10492 return t;
10495 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10496 FN and a null static chain slot. NARGS is the number of call arguments
10497 which are specified as a tree array ARGS. */
10499 tree
10500 build_call_array_loc (location_t loc, tree return_type, tree fn,
10501 int nargs, const tree *args)
10503 tree t;
10504 int i;
10506 t = build_call_1 (return_type, fn, nargs);
10507 for (i = 0; i < nargs; i++)
10508 CALL_EXPR_ARG (t, i) = args[i];
10509 process_call_operands (t);
10510 SET_EXPR_LOCATION (t, loc);
10511 return t;
10514 /* Like build_call_array, but takes a vec. */
10516 tree
10517 build_call_vec (tree return_type, tree fn, const vec<tree, va_gc> *args)
10519 tree ret, t;
10520 unsigned int ix;
10522 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10523 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10524 CALL_EXPR_ARG (ret, ix) = t;
10525 process_call_operands (ret);
10526 return ret;
10529 /* Conveniently construct a function call expression. FNDECL names the
10530 function to be called and N arguments are passed in the array
10531 ARGARRAY. */
10533 tree
10534 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10536 tree fntype = TREE_TYPE (fndecl);
10537 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10539 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10542 /* Conveniently construct a function call expression. FNDECL names the
10543 function to be called and the arguments are passed in the vector
10544 VEC. */
10546 tree
10547 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10549 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10550 vec_safe_address (vec));
10554 /* Conveniently construct a function call expression. FNDECL names the
10555 function to be called, N is the number of arguments, and the "..."
10556 parameters are the argument expressions. */
10558 tree
10559 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10561 va_list ap;
10562 tree *argarray = XALLOCAVEC (tree, n);
10563 int i;
10565 va_start (ap, n);
10566 for (i = 0; i < n; i++)
10567 argarray[i] = va_arg (ap, tree);
10568 va_end (ap);
10569 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10572 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10573 varargs macros aren't supported by all bootstrap compilers. */
10575 tree
10576 build_call_expr (tree fndecl, int n, ...)
10578 va_list ap;
10579 tree *argarray = XALLOCAVEC (tree, n);
10580 int i;
10582 va_start (ap, n);
10583 for (i = 0; i < n; i++)
10584 argarray[i] = va_arg (ap, tree);
10585 va_end (ap);
10586 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10589 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10590 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10591 It will get gimplified later into an ordinary internal function. */
10593 tree
10594 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
10595 tree type, int n, const tree *args)
10597 tree t = build_call_1 (type, NULL_TREE, n);
10598 for (int i = 0; i < n; ++i)
10599 CALL_EXPR_ARG (t, i) = args[i];
10600 SET_EXPR_LOCATION (t, loc);
10601 CALL_EXPR_IFN (t) = ifn;
10602 process_call_operands (t);
10603 return t;
10606 /* Build internal call expression. This is just like CALL_EXPR, except
10607 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10608 internal function. */
10610 tree
10611 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10612 tree type, int n, ...)
10614 va_list ap;
10615 tree *argarray = XALLOCAVEC (tree, n);
10616 int i;
10618 va_start (ap, n);
10619 for (i = 0; i < n; i++)
10620 argarray[i] = va_arg (ap, tree);
10621 va_end (ap);
10622 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10625 /* Return a function call to FN, if the target is guaranteed to support it,
10626 or null otherwise.
10628 N is the number of arguments, passed in the "...", and TYPE is the
10629 type of the return value. */
10631 tree
10632 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
10633 int n, ...)
10635 va_list ap;
10636 tree *argarray = XALLOCAVEC (tree, n);
10637 int i;
10639 va_start (ap, n);
10640 for (i = 0; i < n; i++)
10641 argarray[i] = va_arg (ap, tree);
10642 va_end (ap);
10643 if (internal_fn_p (fn))
10645 internal_fn ifn = as_internal_fn (fn);
10646 if (direct_internal_fn_p (ifn))
10648 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
10649 if (!direct_internal_fn_supported_p (ifn, types,
10650 OPTIMIZE_FOR_BOTH))
10651 return NULL_TREE;
10653 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10655 else
10657 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
10658 if (!fndecl)
10659 return NULL_TREE;
10660 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10664 /* Return a function call to the appropriate builtin alloca variant.
10666 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
10667 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
10668 bound for SIZE in case it is not a fixed value. */
10670 tree
10671 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
10673 if (max_size >= 0)
10675 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
10676 return
10677 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
10679 else if (align > 0)
10681 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10682 return build_call_expr (t, 2, size, size_int (align));
10684 else
10686 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
10687 return build_call_expr (t, 1, size);
10691 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
10692 if SIZE == -1) and return a tree node representing char* pointer to
10693 it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). When STR is nonnull
10694 the STRING_CST value is the LEN bytes at STR (the representation
10695 of the string, which may be wide). Otherwise it's all zeros. */
10697 tree
10698 build_string_literal (unsigned len, const char *str /* = NULL */,
10699 tree eltype /* = char_type_node */,
10700 unsigned HOST_WIDE_INT size /* = -1 */)
10702 tree t = build_string (len, str);
10703 /* Set the maximum valid index based on the string length or SIZE. */
10704 unsigned HOST_WIDE_INT maxidx
10705 = (size == HOST_WIDE_INT_M1U ? len : size) - 1;
10707 tree index = build_index_type (size_int (maxidx));
10708 eltype = build_type_variant (eltype, 1, 0);
10709 tree type = build_array_type (eltype, index);
10710 TREE_TYPE (t) = type;
10711 TREE_CONSTANT (t) = 1;
10712 TREE_READONLY (t) = 1;
10713 TREE_STATIC (t) = 1;
10715 type = build_pointer_type (eltype);
10716 t = build1 (ADDR_EXPR, type,
10717 build4 (ARRAY_REF, eltype,
10718 t, integer_zero_node, NULL_TREE, NULL_TREE));
10719 return t;
10724 /* Return true if T (assumed to be a DECL) must be assigned a memory
10725 location. */
10727 bool
10728 needs_to_live_in_memory (const_tree t)
10730 return (TREE_ADDRESSABLE (t)
10731 || is_global_var (t)
10732 || (TREE_CODE (t) == RESULT_DECL
10733 && !DECL_BY_REFERENCE (t)
10734 && aggregate_value_p (t, current_function_decl)));
10737 /* Return value of a constant X and sign-extend it. */
10739 HOST_WIDE_INT
10740 int_cst_value (const_tree x)
10742 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10743 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10745 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10746 gcc_assert (cst_and_fits_in_hwi (x));
10748 if (bits < HOST_BITS_PER_WIDE_INT)
10750 bool negative = ((val >> (bits - 1)) & 1) != 0;
10751 if (negative)
10752 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
10753 else
10754 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
10757 return val;
10760 /* If TYPE is an integral or pointer type, return an integer type with
10761 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10762 if TYPE is already an integer type of signedness UNSIGNEDP.
10763 If TYPE is a floating-point type, return an integer type with the same
10764 bitsize and with the signedness given by UNSIGNEDP; this is useful
10765 when doing bit-level operations on a floating-point value. */
10767 tree
10768 signed_or_unsigned_type_for (int unsignedp, tree type)
10770 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
10771 return type;
10773 if (TREE_CODE (type) == VECTOR_TYPE)
10775 tree inner = TREE_TYPE (type);
10776 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10777 if (!inner2)
10778 return NULL_TREE;
10779 if (inner == inner2)
10780 return type;
10781 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10784 if (TREE_CODE (type) == COMPLEX_TYPE)
10786 tree inner = TREE_TYPE (type);
10787 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10788 if (!inner2)
10789 return NULL_TREE;
10790 if (inner == inner2)
10791 return type;
10792 return build_complex_type (inner2);
10795 unsigned int bits;
10796 if (INTEGRAL_TYPE_P (type)
10797 || POINTER_TYPE_P (type)
10798 || TREE_CODE (type) == OFFSET_TYPE)
10799 bits = TYPE_PRECISION (type);
10800 else if (TREE_CODE (type) == REAL_TYPE)
10801 bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type));
10802 else
10803 return NULL_TREE;
10805 return build_nonstandard_integer_type (bits, unsignedp);
10808 /* If TYPE is an integral or pointer type, return an integer type with
10809 the same precision which is unsigned, or itself if TYPE is already an
10810 unsigned integer type. If TYPE is a floating-point type, return an
10811 unsigned integer type with the same bitsize as TYPE. */
10813 tree
10814 unsigned_type_for (tree type)
10816 return signed_or_unsigned_type_for (1, type);
10819 /* If TYPE is an integral or pointer type, return an integer type with
10820 the same precision which is signed, or itself if TYPE is already a
10821 signed integer type. If TYPE is a floating-point type, return a
10822 signed integer type with the same bitsize as TYPE. */
10824 tree
10825 signed_type_for (tree type)
10827 return signed_or_unsigned_type_for (0, type);
10830 /* - For VECTOR_TYPEs:
10831 - The truth type must be a VECTOR_BOOLEAN_TYPE.
10832 - The number of elements must match (known_eq).
10833 - targetm.vectorize.get_mask_mode exists, and exactly
10834 the same mode as the truth type.
10835 - Otherwise, the truth type must be a BOOLEAN_TYPE
10836 or useless_type_conversion_p to BOOLEAN_TYPE. */
10837 bool
10838 is_truth_type_for (tree type, tree truth_type)
10840 machine_mode mask_mode = TYPE_MODE (truth_type);
10841 machine_mode vmode = TYPE_MODE (type);
10842 machine_mode tmask_mode;
10844 if (TREE_CODE (type) == VECTOR_TYPE)
10846 if (VECTOR_BOOLEAN_TYPE_P (truth_type)
10847 && known_eq (TYPE_VECTOR_SUBPARTS (type),
10848 TYPE_VECTOR_SUBPARTS (truth_type))
10849 && targetm.vectorize.get_mask_mode (vmode).exists (&tmask_mode)
10850 && tmask_mode == mask_mode)
10851 return true;
10853 return false;
10856 return useless_type_conversion_p (boolean_type_node, truth_type);
10859 /* If TYPE is a vector type, return a signed integer vector type with the
10860 same width and number of subparts. Otherwise return boolean_type_node. */
10862 tree
10863 truth_type_for (tree type)
10865 if (TREE_CODE (type) == VECTOR_TYPE)
10867 if (VECTOR_BOOLEAN_TYPE_P (type))
10868 return type;
10869 return build_truth_vector_type_for (type);
10871 else
10872 return boolean_type_node;
10875 /* Returns the largest value obtainable by casting something in INNER type to
10876 OUTER type. */
10878 tree
10879 upper_bound_in_type (tree outer, tree inner)
10881 unsigned int det = 0;
10882 unsigned oprec = TYPE_PRECISION (outer);
10883 unsigned iprec = TYPE_PRECISION (inner);
10884 unsigned prec;
10886 /* Compute a unique number for every combination. */
10887 det |= (oprec > iprec) ? 4 : 0;
10888 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10889 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10891 /* Determine the exponent to use. */
10892 switch (det)
10894 case 0:
10895 case 1:
10896 /* oprec <= iprec, outer: signed, inner: don't care. */
10897 prec = oprec - 1;
10898 break;
10899 case 2:
10900 case 3:
10901 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10902 prec = oprec;
10903 break;
10904 case 4:
10905 /* oprec > iprec, outer: signed, inner: signed. */
10906 prec = iprec - 1;
10907 break;
10908 case 5:
10909 /* oprec > iprec, outer: signed, inner: unsigned. */
10910 prec = iprec;
10911 break;
10912 case 6:
10913 /* oprec > iprec, outer: unsigned, inner: signed. */
10914 prec = oprec;
10915 break;
10916 case 7:
10917 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10918 prec = iprec;
10919 break;
10920 default:
10921 gcc_unreachable ();
10924 return wide_int_to_tree (outer,
10925 wi::mask (prec, false, TYPE_PRECISION (outer)));
10928 /* Returns the smallest value obtainable by casting something in INNER type to
10929 OUTER type. */
10931 tree
10932 lower_bound_in_type (tree outer, tree inner)
10934 unsigned oprec = TYPE_PRECISION (outer);
10935 unsigned iprec = TYPE_PRECISION (inner);
10937 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10938 and obtain 0. */
10939 if (TYPE_UNSIGNED (outer)
10940 /* If we are widening something of an unsigned type, OUTER type
10941 contains all values of INNER type. In particular, both INNER
10942 and OUTER types have zero in common. */
10943 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10944 return build_int_cst (outer, 0);
10945 else
10947 /* If we are widening a signed type to another signed type, we
10948 want to obtain -2^^(iprec-1). If we are keeping the
10949 precision or narrowing to a signed type, we want to obtain
10950 -2^(oprec-1). */
10951 unsigned prec = oprec > iprec ? iprec : oprec;
10952 return wide_int_to_tree (outer,
10953 wi::mask (prec - 1, true,
10954 TYPE_PRECISION (outer)));
10958 /* Return nonzero if two operands that are suitable for PHI nodes are
10959 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10960 SSA_NAME or invariant. Note that this is strictly an optimization.
10961 That is, callers of this function can directly call operand_equal_p
10962 and get the same result, only slower. */
10965 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10967 if (arg0 == arg1)
10968 return 1;
10969 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10970 return 0;
10971 return operand_equal_p (arg0, arg1, 0);
10974 /* Returns number of zeros at the end of binary representation of X. */
10976 tree
10977 num_ending_zeros (const_tree x)
10979 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
10983 #define WALK_SUBTREE(NODE) \
10984 do \
10986 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10987 if (result) \
10988 return result; \
10990 while (0)
10992 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10993 be walked whenever a type is seen in the tree. Rest of operands and return
10994 value are as for walk_tree. */
10996 static tree
10997 walk_type_fields (tree type, walk_tree_fn func, void *data,
10998 hash_set<tree> *pset, walk_tree_lh lh)
11000 tree result = NULL_TREE;
11002 switch (TREE_CODE (type))
11004 case POINTER_TYPE:
11005 case REFERENCE_TYPE:
11006 case VECTOR_TYPE:
11007 /* We have to worry about mutually recursive pointers. These can't
11008 be written in C. They can in Ada. It's pathological, but
11009 there's an ACATS test (c38102a) that checks it. Deal with this
11010 by checking if we're pointing to another pointer, that one
11011 points to another pointer, that one does too, and we have no htab.
11012 If so, get a hash table. We check three levels deep to avoid
11013 the cost of the hash table if we don't need one. */
11014 if (POINTER_TYPE_P (TREE_TYPE (type))
11015 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11016 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11017 && !pset)
11019 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11020 func, data);
11021 if (result)
11022 return result;
11024 break;
11027 /* fall through */
11029 case COMPLEX_TYPE:
11030 WALK_SUBTREE (TREE_TYPE (type));
11031 break;
11033 case METHOD_TYPE:
11034 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11036 /* Fall through. */
11038 case FUNCTION_TYPE:
11039 WALK_SUBTREE (TREE_TYPE (type));
11041 tree arg;
11043 /* We never want to walk into default arguments. */
11044 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11045 WALK_SUBTREE (TREE_VALUE (arg));
11047 break;
11049 case ARRAY_TYPE:
11050 /* Don't follow this nodes's type if a pointer for fear that
11051 we'll have infinite recursion. If we have a PSET, then we
11052 need not fear. */
11053 if (pset
11054 || (!POINTER_TYPE_P (TREE_TYPE (type))
11055 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11056 WALK_SUBTREE (TREE_TYPE (type));
11057 WALK_SUBTREE (TYPE_DOMAIN (type));
11058 break;
11060 case OFFSET_TYPE:
11061 WALK_SUBTREE (TREE_TYPE (type));
11062 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11063 break;
11065 default:
11066 break;
11069 return NULL_TREE;
11072 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11073 called with the DATA and the address of each sub-tree. If FUNC returns a
11074 non-NULL value, the traversal is stopped, and the value returned by FUNC
11075 is returned. If PSET is non-NULL it is used to record the nodes visited,
11076 and to avoid visiting a node more than once. */
11078 tree
11079 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11080 hash_set<tree> *pset, walk_tree_lh lh)
11082 enum tree_code code;
11083 int walk_subtrees;
11084 tree result;
11086 #define WALK_SUBTREE_TAIL(NODE) \
11087 do \
11089 tp = & (NODE); \
11090 goto tail_recurse; \
11092 while (0)
11094 tail_recurse:
11095 /* Skip empty subtrees. */
11096 if (!*tp)
11097 return NULL_TREE;
11099 /* Don't walk the same tree twice, if the user has requested
11100 that we avoid doing so. */
11101 if (pset && pset->add (*tp))
11102 return NULL_TREE;
11104 /* Call the function. */
11105 walk_subtrees = 1;
11106 result = (*func) (tp, &walk_subtrees, data);
11108 /* If we found something, return it. */
11109 if (result)
11110 return result;
11112 code = TREE_CODE (*tp);
11114 /* Even if we didn't, FUNC may have decided that there was nothing
11115 interesting below this point in the tree. */
11116 if (!walk_subtrees)
11118 /* But we still need to check our siblings. */
11119 if (code == TREE_LIST)
11120 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11121 else if (code == OMP_CLAUSE)
11122 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11123 else
11124 return NULL_TREE;
11127 if (lh)
11129 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11130 if (result || !walk_subtrees)
11131 return result;
11134 switch (code)
11136 case ERROR_MARK:
11137 case IDENTIFIER_NODE:
11138 case INTEGER_CST:
11139 case REAL_CST:
11140 case FIXED_CST:
11141 case STRING_CST:
11142 case BLOCK:
11143 case PLACEHOLDER_EXPR:
11144 case SSA_NAME:
11145 case FIELD_DECL:
11146 case RESULT_DECL:
11147 /* None of these have subtrees other than those already walked
11148 above. */
11149 break;
11151 case TREE_LIST:
11152 WALK_SUBTREE (TREE_VALUE (*tp));
11153 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11155 case TREE_VEC:
11157 int len = TREE_VEC_LENGTH (*tp);
11159 if (len == 0)
11160 break;
11162 /* Walk all elements but the first. */
11163 while (--len)
11164 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11166 /* Now walk the first one as a tail call. */
11167 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11170 case VECTOR_CST:
11172 unsigned len = vector_cst_encoded_nelts (*tp);
11173 if (len == 0)
11174 break;
11175 /* Walk all elements but the first. */
11176 while (--len)
11177 WALK_SUBTREE (VECTOR_CST_ENCODED_ELT (*tp, len));
11178 /* Now walk the first one as a tail call. */
11179 WALK_SUBTREE_TAIL (VECTOR_CST_ENCODED_ELT (*tp, 0));
11182 case COMPLEX_CST:
11183 WALK_SUBTREE (TREE_REALPART (*tp));
11184 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11186 case CONSTRUCTOR:
11188 unsigned HOST_WIDE_INT idx;
11189 constructor_elt *ce;
11191 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11192 idx++)
11193 WALK_SUBTREE (ce->value);
11195 break;
11197 case SAVE_EXPR:
11198 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11200 case BIND_EXPR:
11202 tree decl;
11203 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11205 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11206 into declarations that are just mentioned, rather than
11207 declared; they don't really belong to this part of the tree.
11208 And, we can see cycles: the initializer for a declaration
11209 can refer to the declaration itself. */
11210 WALK_SUBTREE (DECL_INITIAL (decl));
11211 WALK_SUBTREE (DECL_SIZE (decl));
11212 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11214 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11217 case STATEMENT_LIST:
11219 tree_stmt_iterator i;
11220 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11221 WALK_SUBTREE (*tsi_stmt_ptr (i));
11223 break;
11225 case OMP_CLAUSE:
11227 int len = omp_clause_num_ops[OMP_CLAUSE_CODE (*tp)];
11228 for (int i = 0; i < len; i++)
11229 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11230 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11233 case TARGET_EXPR:
11235 int i, len;
11237 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11238 But, we only want to walk once. */
11239 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11240 for (i = 0; i < len; ++i)
11241 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11242 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11245 case DECL_EXPR:
11246 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11247 defining. We only want to walk into these fields of a type in this
11248 case and not in the general case of a mere reference to the type.
11250 The criterion is as follows: if the field can be an expression, it
11251 must be walked only here. This should be in keeping with the fields
11252 that are directly gimplified in gimplify_type_sizes in order for the
11253 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11254 variable-sized types.
11256 Note that DECLs get walked as part of processing the BIND_EXPR. */
11257 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11259 /* Call the function for the decl so e.g. copy_tree_body_r can
11260 replace it with the remapped one. */
11261 result = (*func) (&DECL_EXPR_DECL (*tp), &walk_subtrees, data);
11262 if (result || !walk_subtrees)
11263 return result;
11265 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11266 if (TREE_CODE (*type_p) == ERROR_MARK)
11267 return NULL_TREE;
11269 /* Call the function for the type. See if it returns anything or
11270 doesn't want us to continue. If we are to continue, walk both
11271 the normal fields and those for the declaration case. */
11272 result = (*func) (type_p, &walk_subtrees, data);
11273 if (result || !walk_subtrees)
11274 return result;
11276 /* But do not walk a pointed-to type since it may itself need to
11277 be walked in the declaration case if it isn't anonymous. */
11278 if (!POINTER_TYPE_P (*type_p))
11280 result = walk_type_fields (*type_p, func, data, pset, lh);
11281 if (result)
11282 return result;
11285 /* If this is a record type, also walk the fields. */
11286 if (RECORD_OR_UNION_TYPE_P (*type_p))
11288 tree field;
11290 for (field = TYPE_FIELDS (*type_p); field;
11291 field = DECL_CHAIN (field))
11293 /* We'd like to look at the type of the field, but we can
11294 easily get infinite recursion. So assume it's pointed
11295 to elsewhere in the tree. Also, ignore things that
11296 aren't fields. */
11297 if (TREE_CODE (field) != FIELD_DECL)
11298 continue;
11300 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11301 WALK_SUBTREE (DECL_SIZE (field));
11302 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11303 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11304 WALK_SUBTREE (DECL_QUALIFIER (field));
11308 /* Same for scalar types. */
11309 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11310 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11311 || TREE_CODE (*type_p) == INTEGER_TYPE
11312 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11313 || TREE_CODE (*type_p) == REAL_TYPE)
11315 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11316 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11319 WALK_SUBTREE (TYPE_SIZE (*type_p));
11320 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11322 /* FALLTHRU */
11324 default:
11325 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11327 int i, len;
11329 /* Walk over all the sub-trees of this operand. */
11330 len = TREE_OPERAND_LENGTH (*tp);
11332 /* Go through the subtrees. We need to do this in forward order so
11333 that the scope of a FOR_EXPR is handled properly. */
11334 if (len)
11336 for (i = 0; i < len - 1; ++i)
11337 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11338 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11341 /* If this is a type, walk the needed fields in the type. */
11342 else if (TYPE_P (*tp))
11343 return walk_type_fields (*tp, func, data, pset, lh);
11344 break;
11347 /* We didn't find what we were looking for. */
11348 return NULL_TREE;
11350 #undef WALK_SUBTREE_TAIL
11352 #undef WALK_SUBTREE
11354 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11356 tree
11357 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11358 walk_tree_lh lh)
11360 tree result;
11362 hash_set<tree> pset;
11363 result = walk_tree_1 (tp, func, data, &pset, lh);
11364 return result;
11368 tree
11369 tree_block (tree t)
11371 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11373 if (IS_EXPR_CODE_CLASS (c))
11374 return LOCATION_BLOCK (t->exp.locus);
11375 gcc_unreachable ();
11376 return NULL;
11379 void
11380 tree_set_block (tree t, tree b)
11382 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11384 if (IS_EXPR_CODE_CLASS (c))
11386 t->exp.locus = set_block (t->exp.locus, b);
11388 else
11389 gcc_unreachable ();
11392 /* Create a nameless artificial label and put it in the current
11393 function context. The label has a location of LOC. Returns the
11394 newly created label. */
11396 tree
11397 create_artificial_label (location_t loc)
11399 tree lab = build_decl (loc,
11400 LABEL_DECL, NULL_TREE, void_type_node);
11402 DECL_ARTIFICIAL (lab) = 1;
11403 DECL_IGNORED_P (lab) = 1;
11404 DECL_CONTEXT (lab) = current_function_decl;
11405 return lab;
11408 /* Given a tree, try to return a useful variable name that we can use
11409 to prefix a temporary that is being assigned the value of the tree.
11410 I.E. given <temp> = &A, return A. */
11412 const char *
11413 get_name (tree t)
11415 tree stripped_decl;
11417 stripped_decl = t;
11418 STRIP_NOPS (stripped_decl);
11419 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11420 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11421 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11423 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11424 if (!name)
11425 return NULL;
11426 return IDENTIFIER_POINTER (name);
11428 else
11430 switch (TREE_CODE (stripped_decl))
11432 case ADDR_EXPR:
11433 return get_name (TREE_OPERAND (stripped_decl, 0));
11434 default:
11435 return NULL;
11440 /* Return true if TYPE has a variable argument list. */
11442 bool
11443 stdarg_p (const_tree fntype)
11445 function_args_iterator args_iter;
11446 tree n = NULL_TREE, t;
11448 if (!fntype)
11449 return false;
11451 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11453 n = t;
11456 return n != NULL_TREE && n != void_type_node;
11459 /* Return true if TYPE has a prototype. */
11461 bool
11462 prototype_p (const_tree fntype)
11464 tree t;
11466 gcc_assert (fntype != NULL_TREE);
11468 t = TYPE_ARG_TYPES (fntype);
11469 return (t != NULL_TREE);
11472 /* If BLOCK is inlined from an __attribute__((__artificial__))
11473 routine, return pointer to location from where it has been
11474 called. */
11475 location_t *
11476 block_nonartificial_location (tree block)
11478 location_t *ret = NULL;
11480 while (block && TREE_CODE (block) == BLOCK
11481 && BLOCK_ABSTRACT_ORIGIN (block))
11483 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11484 if (TREE_CODE (ao) == FUNCTION_DECL)
11486 /* If AO is an artificial inline, point RET to the
11487 call site locus at which it has been inlined and continue
11488 the loop, in case AO's caller is also an artificial
11489 inline. */
11490 if (DECL_DECLARED_INLINE_P (ao)
11491 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11492 ret = &BLOCK_SOURCE_LOCATION (block);
11493 else
11494 break;
11496 else if (TREE_CODE (ao) != BLOCK)
11497 break;
11499 block = BLOCK_SUPERCONTEXT (block);
11501 return ret;
11505 /* If EXP is inlined from an __attribute__((__artificial__))
11506 function, return the location of the original call expression. */
11508 location_t
11509 tree_nonartificial_location (tree exp)
11511 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11513 if (loc)
11514 return *loc;
11515 else
11516 return EXPR_LOCATION (exp);
11519 /* Return the location into which EXP has been inlined. Analogous
11520 to tree_nonartificial_location() above but not limited to artificial
11521 functions declared inline. If SYSTEM_HEADER is true, return
11522 the macro expansion point of the location if it's in a system header */
11524 location_t
11525 tree_inlined_location (tree exp, bool system_header /* = true */)
11527 location_t loc = UNKNOWN_LOCATION;
11529 tree block = TREE_BLOCK (exp);
11531 while (block && TREE_CODE (block) == BLOCK
11532 && BLOCK_ABSTRACT_ORIGIN (block))
11534 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11535 if (TREE_CODE (ao) == FUNCTION_DECL)
11536 loc = BLOCK_SOURCE_LOCATION (block);
11537 else if (TREE_CODE (ao) != BLOCK)
11538 break;
11540 block = BLOCK_SUPERCONTEXT (block);
11543 if (loc == UNKNOWN_LOCATION)
11545 loc = EXPR_LOCATION (exp);
11546 if (system_header)
11547 /* Only consider macro expansion when the block traversal failed
11548 to find a location. Otherwise it's not relevant. */
11549 return expansion_point_location_if_in_system_header (loc);
11552 return loc;
11555 /* These are the hash table functions for the hash table of OPTIMIZATION_NODE
11556 nodes. */
11558 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11560 hashval_t
11561 cl_option_hasher::hash (tree x)
11563 const_tree const t = x;
11565 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11566 return cl_optimization_hash (TREE_OPTIMIZATION (t));
11567 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11568 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11569 else
11570 gcc_unreachable ();
11573 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11574 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11575 same. */
11577 bool
11578 cl_option_hasher::equal (tree x, tree y)
11580 const_tree const xt = x;
11581 const_tree const yt = y;
11583 if (TREE_CODE (xt) != TREE_CODE (yt))
11584 return 0;
11586 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11587 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
11588 TREE_OPTIMIZATION (yt));
11589 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11590 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11591 TREE_TARGET_OPTION (yt));
11592 else
11593 gcc_unreachable ();
11596 /* Build an OPTIMIZATION_NODE based on the options in OPTS and OPTS_SET. */
11598 tree
11599 build_optimization_node (struct gcc_options *opts,
11600 struct gcc_options *opts_set)
11602 tree t;
11604 /* Use the cache of optimization nodes. */
11606 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11607 opts, opts_set);
11609 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11610 t = *slot;
11611 if (!t)
11613 /* Insert this one into the hash table. */
11614 t = cl_optimization_node;
11615 *slot = t;
11617 /* Make a new node for next time round. */
11618 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11621 return t;
11624 /* Build a TARGET_OPTION_NODE based on the options in OPTS and OPTS_SET. */
11626 tree
11627 build_target_option_node (struct gcc_options *opts,
11628 struct gcc_options *opts_set)
11630 tree t;
11632 /* Use the cache of optimization nodes. */
11634 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11635 opts, opts_set);
11637 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11638 t = *slot;
11639 if (!t)
11641 /* Insert this one into the hash table. */
11642 t = cl_target_option_node;
11643 *slot = t;
11645 /* Make a new node for next time round. */
11646 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11649 return t;
11652 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11653 so that they aren't saved during PCH writing. */
11655 void
11656 prepare_target_option_nodes_for_pch (void)
11658 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11659 for (; iter != cl_option_hash_table->end (); ++iter)
11660 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11661 TREE_TARGET_GLOBALS (*iter) = NULL;
11664 /* Determine the "ultimate origin" of a block. */
11666 tree
11667 block_ultimate_origin (const_tree block)
11669 tree origin = BLOCK_ABSTRACT_ORIGIN (block);
11671 if (origin == NULL_TREE)
11672 return NULL_TREE;
11673 else
11675 gcc_checking_assert ((DECL_P (origin)
11676 && DECL_ORIGIN (origin) == origin)
11677 || BLOCK_ORIGIN (origin) == origin);
11678 return origin;
11682 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11683 no instruction. */
11685 bool
11686 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11688 /* Do not strip casts into or out of differing address spaces. */
11689 if (POINTER_TYPE_P (outer_type)
11690 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
11692 if (!POINTER_TYPE_P (inner_type)
11693 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
11694 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
11695 return false;
11697 else if (POINTER_TYPE_P (inner_type)
11698 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
11700 /* We already know that outer_type is not a pointer with
11701 a non-generic address space. */
11702 return false;
11705 /* Use precision rather then machine mode when we can, which gives
11706 the correct answer even for submode (bit-field) types. */
11707 if ((INTEGRAL_TYPE_P (outer_type)
11708 || POINTER_TYPE_P (outer_type)
11709 || TREE_CODE (outer_type) == OFFSET_TYPE)
11710 && (INTEGRAL_TYPE_P (inner_type)
11711 || POINTER_TYPE_P (inner_type)
11712 || TREE_CODE (inner_type) == OFFSET_TYPE))
11713 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11715 /* Otherwise fall back on comparing machine modes (e.g. for
11716 aggregate types, floats). */
11717 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11720 /* Return true iff conversion in EXP generates no instruction. Mark
11721 it inline so that we fully inline into the stripping functions even
11722 though we have two uses of this function. */
11724 static inline bool
11725 tree_nop_conversion (const_tree exp)
11727 tree outer_type, inner_type;
11729 if (location_wrapper_p (exp))
11730 return true;
11731 if (!CONVERT_EXPR_P (exp)
11732 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11733 return false;
11735 outer_type = TREE_TYPE (exp);
11736 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11737 if (!inner_type || inner_type == error_mark_node)
11738 return false;
11740 return tree_nop_conversion_p (outer_type, inner_type);
11743 /* Return true iff conversion in EXP generates no instruction. Don't
11744 consider conversions changing the signedness. */
11746 static bool
11747 tree_sign_nop_conversion (const_tree exp)
11749 tree outer_type, inner_type;
11751 if (!tree_nop_conversion (exp))
11752 return false;
11754 outer_type = TREE_TYPE (exp);
11755 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11757 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11758 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11761 /* Strip conversions from EXP according to tree_nop_conversion and
11762 return the resulting expression. */
11764 tree
11765 tree_strip_nop_conversions (tree exp)
11767 while (tree_nop_conversion (exp))
11768 exp = TREE_OPERAND (exp, 0);
11769 return exp;
11772 /* Strip conversions from EXP according to tree_sign_nop_conversion
11773 and return the resulting expression. */
11775 tree
11776 tree_strip_sign_nop_conversions (tree exp)
11778 while (tree_sign_nop_conversion (exp))
11779 exp = TREE_OPERAND (exp, 0);
11780 return exp;
11783 /* Avoid any floating point extensions from EXP. */
11784 tree
11785 strip_float_extensions (tree exp)
11787 tree sub, expt, subt;
11789 /* For floating point constant look up the narrowest type that can hold
11790 it properly and handle it like (type)(narrowest_type)constant.
11791 This way we can optimize for instance a=a*2.0 where "a" is float
11792 but 2.0 is double constant. */
11793 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11795 REAL_VALUE_TYPE orig;
11796 tree type = NULL;
11798 orig = TREE_REAL_CST (exp);
11799 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11800 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11801 type = float_type_node;
11802 else if (TYPE_PRECISION (TREE_TYPE (exp))
11803 > TYPE_PRECISION (double_type_node)
11804 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11805 type = double_type_node;
11806 if (type)
11807 return build_real_truncate (type, orig);
11810 if (!CONVERT_EXPR_P (exp))
11811 return exp;
11813 sub = TREE_OPERAND (exp, 0);
11814 subt = TREE_TYPE (sub);
11815 expt = TREE_TYPE (exp);
11817 if (!FLOAT_TYPE_P (subt))
11818 return exp;
11820 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11821 return exp;
11823 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11824 return exp;
11826 return strip_float_extensions (sub);
11829 /* Strip out all handled components that produce invariant
11830 offsets. */
11832 const_tree
11833 strip_invariant_refs (const_tree op)
11835 while (handled_component_p (op))
11837 switch (TREE_CODE (op))
11839 case ARRAY_REF:
11840 case ARRAY_RANGE_REF:
11841 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11842 || TREE_OPERAND (op, 2) != NULL_TREE
11843 || TREE_OPERAND (op, 3) != NULL_TREE)
11844 return NULL;
11845 break;
11847 case COMPONENT_REF:
11848 if (TREE_OPERAND (op, 2) != NULL_TREE)
11849 return NULL;
11850 break;
11852 default:;
11854 op = TREE_OPERAND (op, 0);
11857 return op;
11860 static GTY(()) tree gcc_eh_personality_decl;
11862 /* Return the GCC personality function decl. */
11864 tree
11865 lhd_gcc_personality (void)
11867 if (!gcc_eh_personality_decl)
11868 gcc_eh_personality_decl = build_personality_function ("gcc");
11869 return gcc_eh_personality_decl;
11872 /* TARGET is a call target of GIMPLE call statement
11873 (obtained by gimple_call_fn). Return true if it is
11874 OBJ_TYPE_REF representing an virtual call of C++ method.
11875 (As opposed to OBJ_TYPE_REF representing objc calls
11876 through a cast where middle-end devirtualization machinery
11877 can't apply.) FOR_DUMP_P is true when being called from
11878 the dump routines. */
11880 bool
11881 virtual_method_call_p (const_tree target, bool for_dump_p)
11883 if (TREE_CODE (target) != OBJ_TYPE_REF)
11884 return false;
11885 tree t = TREE_TYPE (target);
11886 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
11887 t = TREE_TYPE (t);
11888 if (TREE_CODE (t) == FUNCTION_TYPE)
11889 return false;
11890 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
11891 /* If we do not have BINFO associated, it means that type was built
11892 without devirtualization enabled. Do not consider this a virtual
11893 call. */
11894 if (!TYPE_BINFO (obj_type_ref_class (target, for_dump_p)))
11895 return false;
11896 return true;
11899 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
11901 static tree
11902 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
11904 unsigned int i;
11905 tree base_binfo, b;
11907 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
11908 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
11909 && types_same_for_odr (TREE_TYPE (base_binfo), type))
11910 return base_binfo;
11911 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
11912 return b;
11913 return NULL;
11916 /* Try to find a base info of BINFO that would have its field decl at offset
11917 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11918 found, return, otherwise return NULL_TREE. */
11920 tree
11921 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
11923 tree type = BINFO_TYPE (binfo);
11925 while (true)
11927 HOST_WIDE_INT pos, size;
11928 tree fld;
11929 int i;
11931 if (types_same_for_odr (type, expected_type))
11932 return binfo;
11933 if (maybe_lt (offset, 0))
11934 return NULL_TREE;
11936 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
11938 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
11939 continue;
11941 pos = int_bit_position (fld);
11942 size = tree_to_uhwi (DECL_SIZE (fld));
11943 if (known_in_range_p (offset, pos, size))
11944 break;
11946 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
11947 return NULL_TREE;
11949 /* Offset 0 indicates the primary base, whose vtable contents are
11950 represented in the binfo for the derived class. */
11951 else if (maybe_ne (offset, 0))
11953 tree found_binfo = NULL, base_binfo;
11954 /* Offsets in BINFO are in bytes relative to the whole structure
11955 while POS is in bits relative to the containing field. */
11956 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
11957 / BITS_PER_UNIT);
11959 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
11960 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
11961 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
11963 found_binfo = base_binfo;
11964 break;
11966 if (found_binfo)
11967 binfo = found_binfo;
11968 else
11969 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
11970 binfo_offset);
11973 type = TREE_TYPE (fld);
11974 offset -= pos;
11978 /* Returns true if X is a typedef decl. */
11980 bool
11981 is_typedef_decl (const_tree x)
11983 return (x && TREE_CODE (x) == TYPE_DECL
11984 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
11987 /* Returns true iff TYPE is a type variant created for a typedef. */
11989 bool
11990 typedef_variant_p (const_tree type)
11992 return is_typedef_decl (TYPE_NAME (type));
11995 /* PR 84195: Replace control characters in "unescaped" with their
11996 escaped equivalents. Allow newlines if -fmessage-length has
11997 been set to a non-zero value. This is done here, rather than
11998 where the attribute is recorded as the message length can
11999 change between these two locations. */
12001 void
12002 escaped_string::escape (const char *unescaped)
12004 char *escaped;
12005 size_t i, new_i, len;
12007 if (m_owned)
12008 free (m_str);
12010 m_str = const_cast<char *> (unescaped);
12011 m_owned = false;
12013 if (unescaped == NULL || *unescaped == 0)
12014 return;
12016 len = strlen (unescaped);
12017 escaped = NULL;
12018 new_i = 0;
12020 for (i = 0; i < len; i++)
12022 char c = unescaped[i];
12024 if (!ISCNTRL (c))
12026 if (escaped)
12027 escaped[new_i++] = c;
12028 continue;
12031 if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
12033 if (escaped == NULL)
12035 /* We only allocate space for a new string if we
12036 actually encounter a control character that
12037 needs replacing. */
12038 escaped = (char *) xmalloc (len * 2 + 1);
12039 strncpy (escaped, unescaped, i);
12040 new_i = i;
12043 escaped[new_i++] = '\\';
12045 switch (c)
12047 case '\a': escaped[new_i++] = 'a'; break;
12048 case '\b': escaped[new_i++] = 'b'; break;
12049 case '\f': escaped[new_i++] = 'f'; break;
12050 case '\n': escaped[new_i++] = 'n'; break;
12051 case '\r': escaped[new_i++] = 'r'; break;
12052 case '\t': escaped[new_i++] = 't'; break;
12053 case '\v': escaped[new_i++] = 'v'; break;
12054 default: escaped[new_i++] = '?'; break;
12057 else if (escaped)
12058 escaped[new_i++] = c;
12061 if (escaped)
12063 escaped[new_i] = 0;
12064 m_str = escaped;
12065 m_owned = true;
12069 /* Warn about a use of an identifier which was marked deprecated. Returns
12070 whether a warning was given. */
12072 bool
12073 warn_deprecated_use (tree node, tree attr)
12075 escaped_string msg;
12077 if (node == 0 || !warn_deprecated_decl)
12078 return false;
12080 if (!attr)
12082 if (DECL_P (node))
12083 attr = DECL_ATTRIBUTES (node);
12084 else if (TYPE_P (node))
12086 tree decl = TYPE_STUB_DECL (node);
12087 if (decl)
12088 attr = TYPE_ATTRIBUTES (TREE_TYPE (decl));
12089 else if ((decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (node)))
12090 != NULL_TREE)
12092 node = TREE_TYPE (decl);
12093 attr = TYPE_ATTRIBUTES (node);
12098 if (attr)
12099 attr = lookup_attribute ("deprecated", attr);
12101 if (attr)
12102 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12104 bool w = false;
12105 if (DECL_P (node))
12107 auto_diagnostic_group d;
12108 if (msg)
12109 w = warning (OPT_Wdeprecated_declarations,
12110 "%qD is deprecated: %s", node, (const char *) msg);
12111 else
12112 w = warning (OPT_Wdeprecated_declarations,
12113 "%qD is deprecated", node);
12114 if (w)
12115 inform (DECL_SOURCE_LOCATION (node), "declared here");
12117 else if (TYPE_P (node))
12119 tree what = NULL_TREE;
12120 tree decl = TYPE_STUB_DECL (node);
12122 if (TYPE_NAME (node))
12124 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12125 what = TYPE_NAME (node);
12126 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12127 && DECL_NAME (TYPE_NAME (node)))
12128 what = DECL_NAME (TYPE_NAME (node));
12131 auto_diagnostic_group d;
12132 if (what)
12134 if (msg)
12135 w = warning (OPT_Wdeprecated_declarations,
12136 "%qE is deprecated: %s", what, (const char *) msg);
12137 else
12138 w = warning (OPT_Wdeprecated_declarations,
12139 "%qE is deprecated", what);
12141 else
12143 if (msg)
12144 w = warning (OPT_Wdeprecated_declarations,
12145 "type is deprecated: %s", (const char *) msg);
12146 else
12147 w = warning (OPT_Wdeprecated_declarations,
12148 "type is deprecated");
12151 if (w && decl)
12152 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12155 return w;
12158 /* Error out with an identifier which was marked 'unavailable'. */
12159 void
12160 error_unavailable_use (tree node, tree attr)
12162 escaped_string msg;
12164 if (node == 0)
12165 return;
12167 if (!attr)
12169 if (DECL_P (node))
12170 attr = DECL_ATTRIBUTES (node);
12171 else if (TYPE_P (node))
12173 tree decl = TYPE_STUB_DECL (node);
12174 if (decl)
12175 attr = lookup_attribute ("unavailable",
12176 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12180 if (attr)
12181 attr = lookup_attribute ("unavailable", attr);
12183 if (attr)
12184 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12186 if (DECL_P (node))
12188 auto_diagnostic_group d;
12189 if (msg)
12190 error ("%qD is unavailable: %s", node, (const char *) msg);
12191 else
12192 error ("%qD is unavailable", node);
12193 inform (DECL_SOURCE_LOCATION (node), "declared here");
12195 else if (TYPE_P (node))
12197 tree what = NULL_TREE;
12198 tree decl = TYPE_STUB_DECL (node);
12200 if (TYPE_NAME (node))
12202 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12203 what = TYPE_NAME (node);
12204 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12205 && DECL_NAME (TYPE_NAME (node)))
12206 what = DECL_NAME (TYPE_NAME (node));
12209 auto_diagnostic_group d;
12210 if (what)
12212 if (msg)
12213 error ("%qE is unavailable: %s", what, (const char *) msg);
12214 else
12215 error ("%qE is unavailable", what);
12217 else
12219 if (msg)
12220 error ("type is unavailable: %s", (const char *) msg);
12221 else
12222 error ("type is unavailable");
12225 if (decl)
12226 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12230 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12231 somewhere in it. */
12233 bool
12234 contains_bitfld_component_ref_p (const_tree ref)
12236 while (handled_component_p (ref))
12238 if (TREE_CODE (ref) == COMPONENT_REF
12239 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12240 return true;
12241 ref = TREE_OPERAND (ref, 0);
12244 return false;
12247 /* Try to determine whether a TRY_CATCH expression can fall through.
12248 This is a subroutine of block_may_fallthru. */
12250 static bool
12251 try_catch_may_fallthru (const_tree stmt)
12253 tree_stmt_iterator i;
12255 /* If the TRY block can fall through, the whole TRY_CATCH can
12256 fall through. */
12257 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12258 return true;
12260 i = tsi_start (TREE_OPERAND (stmt, 1));
12261 switch (TREE_CODE (tsi_stmt (i)))
12263 case CATCH_EXPR:
12264 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12265 catch expression and a body. The whole TRY_CATCH may fall
12266 through iff any of the catch bodies falls through. */
12267 for (; !tsi_end_p (i); tsi_next (&i))
12269 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12270 return true;
12272 return false;
12274 case EH_FILTER_EXPR:
12275 /* The exception filter expression only matters if there is an
12276 exception. If the exception does not match EH_FILTER_TYPES,
12277 we will execute EH_FILTER_FAILURE, and we will fall through
12278 if that falls through. If the exception does match
12279 EH_FILTER_TYPES, the stack unwinder will continue up the
12280 stack, so we will not fall through. We don't know whether we
12281 will throw an exception which matches EH_FILTER_TYPES or not,
12282 so we just ignore EH_FILTER_TYPES and assume that we might
12283 throw an exception which doesn't match. */
12284 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12286 default:
12287 /* This case represents statements to be executed when an
12288 exception occurs. Those statements are implicitly followed
12289 by a RESX statement to resume execution after the exception.
12290 So in this case the TRY_CATCH never falls through. */
12291 return false;
12295 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12296 need not be 100% accurate; simply be conservative and return true if we
12297 don't know. This is used only to avoid stupidly generating extra code.
12298 If we're wrong, we'll just delete the extra code later. */
12300 bool
12301 block_may_fallthru (const_tree block)
12303 /* This CONST_CAST is okay because expr_last returns its argument
12304 unmodified and we assign it to a const_tree. */
12305 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12307 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12309 case GOTO_EXPR:
12310 case RETURN_EXPR:
12311 /* Easy cases. If the last statement of the block implies
12312 control transfer, then we can't fall through. */
12313 return false;
12315 case SWITCH_EXPR:
12316 /* If there is a default: label or case labels cover all possible
12317 SWITCH_COND values, then the SWITCH_EXPR will transfer control
12318 to some case label in all cases and all we care is whether the
12319 SWITCH_BODY falls through. */
12320 if (SWITCH_ALL_CASES_P (stmt))
12321 return block_may_fallthru (SWITCH_BODY (stmt));
12322 return true;
12324 case COND_EXPR:
12325 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12326 return true;
12327 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12329 case BIND_EXPR:
12330 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12332 case TRY_CATCH_EXPR:
12333 return try_catch_may_fallthru (stmt);
12335 case TRY_FINALLY_EXPR:
12336 /* The finally clause is always executed after the try clause,
12337 so if it does not fall through, then the try-finally will not
12338 fall through. Otherwise, if the try clause does not fall
12339 through, then when the finally clause falls through it will
12340 resume execution wherever the try clause was going. So the
12341 whole try-finally will only fall through if both the try
12342 clause and the finally clause fall through. */
12343 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12344 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12346 case EH_ELSE_EXPR:
12347 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12349 case MODIFY_EXPR:
12350 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12351 stmt = TREE_OPERAND (stmt, 1);
12352 else
12353 return true;
12354 /* FALLTHRU */
12356 case CALL_EXPR:
12357 /* Functions that do not return do not fall through. */
12358 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12360 case CLEANUP_POINT_EXPR:
12361 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12363 case TARGET_EXPR:
12364 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12366 case ERROR_MARK:
12367 return true;
12369 default:
12370 return lang_hooks.block_may_fallthru (stmt);
12374 /* True if we are using EH to handle cleanups. */
12375 static bool using_eh_for_cleanups_flag = false;
12377 /* This routine is called from front ends to indicate eh should be used for
12378 cleanups. */
12379 void
12380 using_eh_for_cleanups (void)
12382 using_eh_for_cleanups_flag = true;
12385 /* Query whether EH is used for cleanups. */
12386 bool
12387 using_eh_for_cleanups_p (void)
12389 return using_eh_for_cleanups_flag;
12392 /* Wrapper for tree_code_name to ensure that tree code is valid */
12393 const char *
12394 get_tree_code_name (enum tree_code code)
12396 const char *invalid = "<invalid tree code>";
12398 /* The tree_code enum promotes to signed, but we could be getting
12399 invalid values, so force an unsigned comparison. */
12400 if (unsigned (code) >= MAX_TREE_CODES)
12402 if ((unsigned)code == 0xa5a5)
12403 return "ggc_freed";
12404 return invalid;
12407 return tree_code_name[code];
12410 /* Drops the TREE_OVERFLOW flag from T. */
12412 tree
12413 drop_tree_overflow (tree t)
12415 gcc_checking_assert (TREE_OVERFLOW (t));
12417 /* For tree codes with a sharing machinery re-build the result. */
12418 if (poly_int_tree_p (t))
12419 return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
12421 /* For VECTOR_CST, remove the overflow bits from the encoded elements
12422 and canonicalize the result. */
12423 if (TREE_CODE (t) == VECTOR_CST)
12425 tree_vector_builder builder;
12426 builder.new_unary_operation (TREE_TYPE (t), t, true);
12427 unsigned int count = builder.encoded_nelts ();
12428 for (unsigned int i = 0; i < count; ++i)
12430 tree elt = VECTOR_CST_ELT (t, i);
12431 if (TREE_OVERFLOW (elt))
12432 elt = drop_tree_overflow (elt);
12433 builder.quick_push (elt);
12435 return builder.build ();
12438 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12439 and drop the flag. */
12440 t = copy_node (t);
12441 TREE_OVERFLOW (t) = 0;
12443 /* For constants that contain nested constants, drop the flag
12444 from those as well. */
12445 if (TREE_CODE (t) == COMPLEX_CST)
12447 if (TREE_OVERFLOW (TREE_REALPART (t)))
12448 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
12449 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
12450 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
12453 return t;
12456 /* Given a memory reference expression T, return its base address.
12457 The base address of a memory reference expression is the main
12458 object being referenced. For instance, the base address for
12459 'array[i].fld[j]' is 'array'. You can think of this as stripping
12460 away the offset part from a memory address.
12462 This function calls handled_component_p to strip away all the inner
12463 parts of the memory reference until it reaches the base object. */
12465 tree
12466 get_base_address (tree t)
12468 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12469 t = TREE_OPERAND (t, 0);
12470 while (handled_component_p (t))
12471 t = TREE_OPERAND (t, 0);
12473 if ((TREE_CODE (t) == MEM_REF
12474 || TREE_CODE (t) == TARGET_MEM_REF)
12475 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12476 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12478 return t;
12481 /* Return a tree of sizetype representing the size, in bytes, of the element
12482 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12484 tree
12485 array_ref_element_size (tree exp)
12487 tree aligned_size = TREE_OPERAND (exp, 3);
12488 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12489 location_t loc = EXPR_LOCATION (exp);
12491 /* If a size was specified in the ARRAY_REF, it's the size measured
12492 in alignment units of the element type. So multiply by that value. */
12493 if (aligned_size)
12495 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12496 sizetype from another type of the same width and signedness. */
12497 if (TREE_TYPE (aligned_size) != sizetype)
12498 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12499 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12500 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12503 /* Otherwise, take the size from that of the element type. Substitute
12504 any PLACEHOLDER_EXPR that we have. */
12505 else
12506 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12509 /* Return a tree representing the lower bound of the array mentioned in
12510 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12512 tree
12513 array_ref_low_bound (tree exp)
12515 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12517 /* If a lower bound is specified in EXP, use it. */
12518 if (TREE_OPERAND (exp, 2))
12519 return TREE_OPERAND (exp, 2);
12521 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12522 substituting for a PLACEHOLDER_EXPR as needed. */
12523 if (domain_type && TYPE_MIN_VALUE (domain_type))
12524 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12526 /* Otherwise, return a zero of the appropriate type. */
12527 tree idxtype = TREE_TYPE (TREE_OPERAND (exp, 1));
12528 return (idxtype == error_mark_node
12529 ? integer_zero_node : build_int_cst (idxtype, 0));
12532 /* Return a tree representing the upper bound of the array mentioned in
12533 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12535 tree
12536 array_ref_up_bound (tree exp)
12538 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12540 /* If there is a domain type and it has an upper bound, use it, substituting
12541 for a PLACEHOLDER_EXPR as needed. */
12542 if (domain_type && TYPE_MAX_VALUE (domain_type))
12543 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12545 /* Otherwise fail. */
12546 return NULL_TREE;
12549 /* Returns true if REF is an array reference, component reference,
12550 or memory reference to an array at the end of a structure.
12551 If this is the case, the array may be allocated larger
12552 than its upper bound implies. */
12554 bool
12555 array_at_struct_end_p (tree ref)
12557 tree atype;
12559 if (TREE_CODE (ref) == ARRAY_REF
12560 || TREE_CODE (ref) == ARRAY_RANGE_REF)
12562 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
12563 ref = TREE_OPERAND (ref, 0);
12565 else if (TREE_CODE (ref) == COMPONENT_REF
12566 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
12567 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
12568 else if (TREE_CODE (ref) == MEM_REF)
12570 tree arg = TREE_OPERAND (ref, 0);
12571 if (TREE_CODE (arg) == ADDR_EXPR)
12572 arg = TREE_OPERAND (arg, 0);
12573 tree argtype = TREE_TYPE (arg);
12574 if (TREE_CODE (argtype) == RECORD_TYPE)
12576 if (tree fld = last_field (argtype))
12578 atype = TREE_TYPE (fld);
12579 if (TREE_CODE (atype) != ARRAY_TYPE)
12580 return false;
12581 if (VAR_P (arg) && DECL_SIZE (fld))
12582 return false;
12584 else
12585 return false;
12587 else
12588 return false;
12590 else
12591 return false;
12593 if (TREE_CODE (ref) == STRING_CST)
12594 return false;
12596 tree ref_to_array = ref;
12597 while (handled_component_p (ref))
12599 /* If the reference chain contains a component reference to a
12600 non-union type and there follows another field the reference
12601 is not at the end of a structure. */
12602 if (TREE_CODE (ref) == COMPONENT_REF)
12604 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12606 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12607 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12608 nextf = DECL_CHAIN (nextf);
12609 if (nextf)
12610 return false;
12613 /* If we have a multi-dimensional array we do not consider
12614 a non-innermost dimension as flex array if the whole
12615 multi-dimensional array is at struct end.
12616 Same for an array of aggregates with a trailing array
12617 member. */
12618 else if (TREE_CODE (ref) == ARRAY_REF)
12619 return false;
12620 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
12622 /* If we view an underlying object as sth else then what we
12623 gathered up to now is what we have to rely on. */
12624 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
12625 break;
12626 else
12627 gcc_unreachable ();
12629 ref = TREE_OPERAND (ref, 0);
12632 /* The array now is at struct end. Treat flexible arrays as
12633 always subject to extend, even into just padding constrained by
12634 an underlying decl. */
12635 if (! TYPE_SIZE (atype)
12636 || ! TYPE_DOMAIN (atype)
12637 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12638 return true;
12640 /* If the reference is based on a declared entity, the size of the array
12641 is constrained by its given domain. (Do not trust commons PR/69368). */
12642 ref = get_base_address (ref);
12643 if (ref
12644 && DECL_P (ref)
12645 && !(flag_unconstrained_commons
12646 && VAR_P (ref) && DECL_COMMON (ref))
12647 && DECL_SIZE_UNIT (ref)
12648 && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
12650 /* Check whether the array domain covers all of the available
12651 padding. */
12652 poly_int64 offset;
12653 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
12654 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
12655 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
12656 return true;
12657 if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
12658 return true;
12660 /* If at least one extra element fits it is a flexarray. */
12661 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12662 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
12663 + 2)
12664 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
12665 wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
12666 return true;
12668 return false;
12671 return true;
12674 /* Return a tree representing the offset, in bytes, of the field referenced
12675 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12677 tree
12678 component_ref_field_offset (tree exp)
12680 tree aligned_offset = TREE_OPERAND (exp, 2);
12681 tree field = TREE_OPERAND (exp, 1);
12682 location_t loc = EXPR_LOCATION (exp);
12684 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12685 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12686 value. */
12687 if (aligned_offset)
12689 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12690 sizetype from another type of the same width and signedness. */
12691 if (TREE_TYPE (aligned_offset) != sizetype)
12692 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12693 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12694 size_int (DECL_OFFSET_ALIGN (field)
12695 / BITS_PER_UNIT));
12698 /* Otherwise, take the offset from that of the field. Substitute
12699 any PLACEHOLDER_EXPR that we have. */
12700 else
12701 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
12704 /* Given the initializer INIT, return the initializer for the field
12705 DECL if it exists, otherwise null. Used to obtain the initializer
12706 for a flexible array member and determine its size. */
12708 static tree
12709 get_initializer_for (tree init, tree decl)
12711 STRIP_NOPS (init);
12713 tree fld, fld_init;
12714 unsigned HOST_WIDE_INT i;
12715 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), i, fld, fld_init)
12717 if (decl == fld)
12718 return fld_init;
12720 if (TREE_CODE (fld) == CONSTRUCTOR)
12722 fld_init = get_initializer_for (fld_init, decl);
12723 if (fld_init)
12724 return fld_init;
12728 return NULL_TREE;
12731 /* Determines the size of the member referenced by the COMPONENT_REF
12732 REF, using its initializer expression if necessary in order to
12733 determine the size of an initialized flexible array member.
12734 If non-null, set *ARK when REF refers to an interior zero-length
12735 array or a trailing one-element array.
12736 Returns the size as sizetype (which might be zero for an object
12737 with an uninitialized flexible array member) or null if the size
12738 cannot be determined. */
12740 tree
12741 component_ref_size (tree ref, special_array_member *sam /* = NULL */)
12743 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
12745 special_array_member sambuf;
12746 if (!sam)
12747 sam = &sambuf;
12748 *sam = special_array_member::none;
12750 /* The object/argument referenced by the COMPONENT_REF and its type. */
12751 tree arg = TREE_OPERAND (ref, 0);
12752 tree argtype = TREE_TYPE (arg);
12753 /* The referenced member. */
12754 tree member = TREE_OPERAND (ref, 1);
12756 tree memsize = DECL_SIZE_UNIT (member);
12757 if (memsize)
12759 tree memtype = TREE_TYPE (member);
12760 if (TREE_CODE (memtype) != ARRAY_TYPE)
12761 /* DECL_SIZE may be less than TYPE_SIZE in C++ when referring
12762 to the type of a class with a virtual base which doesn't
12763 reflect the size of the virtual's members (see pr97595).
12764 If that's the case fail for now and implement something
12765 more robust in the future. */
12766 return (tree_int_cst_equal (memsize, TYPE_SIZE_UNIT (memtype))
12767 ? memsize : NULL_TREE);
12769 bool trailing = array_at_struct_end_p (ref);
12770 bool zero_length = integer_zerop (memsize);
12771 if (!trailing && !zero_length)
12772 /* MEMBER is either an interior array or is an array with
12773 more than one element. */
12774 return memsize;
12776 if (zero_length)
12778 if (trailing)
12779 *sam = special_array_member::trail_0;
12780 else
12782 *sam = special_array_member::int_0;
12783 memsize = NULL_TREE;
12787 if (!zero_length)
12788 if (tree dom = TYPE_DOMAIN (memtype))
12789 if (tree min = TYPE_MIN_VALUE (dom))
12790 if (tree max = TYPE_MAX_VALUE (dom))
12791 if (TREE_CODE (min) == INTEGER_CST
12792 && TREE_CODE (max) == INTEGER_CST)
12794 offset_int minidx = wi::to_offset (min);
12795 offset_int maxidx = wi::to_offset (max);
12796 offset_int neltsm1 = maxidx - minidx;
12797 if (neltsm1 > 0)
12798 /* MEMBER is an array with more than one element. */
12799 return memsize;
12801 if (neltsm1 == 0)
12802 *sam = special_array_member::trail_1;
12805 /* For a reference to a zero- or one-element array member of a union
12806 use the size of the union instead of the size of the member. */
12807 if (TREE_CODE (argtype) == UNION_TYPE)
12808 memsize = TYPE_SIZE_UNIT (argtype);
12811 /* MEMBER is either a bona fide flexible array member, or a zero-length
12812 array member, or an array of length one treated as such. */
12814 /* If the reference is to a declared object and the member a true
12815 flexible array, try to determine its size from its initializer. */
12816 poly_int64 baseoff = 0;
12817 tree base = get_addr_base_and_unit_offset (ref, &baseoff);
12818 if (!base || !VAR_P (base))
12820 if (*sam != special_array_member::int_0)
12821 return NULL_TREE;
12823 if (TREE_CODE (arg) != COMPONENT_REF)
12824 return NULL_TREE;
12826 base = arg;
12827 while (TREE_CODE (base) == COMPONENT_REF)
12828 base = TREE_OPERAND (base, 0);
12829 baseoff = tree_to_poly_int64 (byte_position (TREE_OPERAND (ref, 1)));
12832 /* BASE is the declared object of which MEMBER is either a member
12833 or that is cast to ARGTYPE (e.g., a char buffer used to store
12834 an ARGTYPE object). */
12835 tree basetype = TREE_TYPE (base);
12837 /* Determine the base type of the referenced object. If it's
12838 the same as ARGTYPE and MEMBER has a known size, return it. */
12839 tree bt = basetype;
12840 if (*sam != special_array_member::int_0)
12841 while (TREE_CODE (bt) == ARRAY_TYPE)
12842 bt = TREE_TYPE (bt);
12843 bool typematch = useless_type_conversion_p (argtype, bt);
12844 if (memsize && typematch)
12845 return memsize;
12847 memsize = NULL_TREE;
12849 if (typematch)
12850 /* MEMBER is a true flexible array member. Compute its size from
12851 the initializer of the BASE object if it has one. */
12852 if (tree init = DECL_P (base) ? DECL_INITIAL (base) : NULL_TREE)
12853 if (init != error_mark_node)
12855 init = get_initializer_for (init, member);
12856 if (init)
12858 memsize = TYPE_SIZE_UNIT (TREE_TYPE (init));
12859 if (tree refsize = TYPE_SIZE_UNIT (argtype))
12861 /* Use the larger of the initializer size and the tail
12862 padding in the enclosing struct. */
12863 poly_int64 rsz = tree_to_poly_int64 (refsize);
12864 rsz -= baseoff;
12865 if (known_lt (tree_to_poly_int64 (memsize), rsz))
12866 memsize = wide_int_to_tree (TREE_TYPE (memsize), rsz);
12869 baseoff = 0;
12873 if (!memsize)
12875 if (typematch)
12877 if (DECL_P (base)
12878 && DECL_EXTERNAL (base)
12879 && bt == basetype
12880 && *sam != special_array_member::int_0)
12881 /* The size of a flexible array member of an extern struct
12882 with no initializer cannot be determined (it's defined
12883 in another translation unit and can have an initializer
12884 with an arbitrary number of elements). */
12885 return NULL_TREE;
12887 /* Use the size of the base struct or, for interior zero-length
12888 arrays, the size of the enclosing type. */
12889 memsize = TYPE_SIZE_UNIT (bt);
12891 else if (DECL_P (base))
12892 /* Use the size of the BASE object (possibly an array of some
12893 other type such as char used to store the struct). */
12894 memsize = DECL_SIZE_UNIT (base);
12895 else
12896 return NULL_TREE;
12899 /* If the flexible array member has a known size use the greater
12900 of it and the tail padding in the enclosing struct.
12901 Otherwise, when the size of the flexible array member is unknown
12902 and the referenced object is not a struct, use the size of its
12903 type when known. This detects sizes of array buffers when cast
12904 to struct types with flexible array members. */
12905 if (memsize)
12907 poly_int64 memsz64 = memsize ? tree_to_poly_int64 (memsize) : 0;
12908 if (known_lt (baseoff, memsz64))
12910 memsz64 -= baseoff;
12911 return wide_int_to_tree (TREE_TYPE (memsize), memsz64);
12913 return size_zero_node;
12916 /* Return "don't know" for an external non-array object since its
12917 flexible array member can be initialized to have any number of
12918 elements. Otherwise, return zero because the flexible array
12919 member has no elements. */
12920 return (DECL_P (base)
12921 && DECL_EXTERNAL (base)
12922 && (!typematch
12923 || TREE_CODE (basetype) != ARRAY_TYPE)
12924 ? NULL_TREE : size_zero_node);
12927 /* Return the machine mode of T. For vectors, returns the mode of the
12928 inner type. The main use case is to feed the result to HONOR_NANS,
12929 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12931 machine_mode
12932 element_mode (const_tree t)
12934 if (!TYPE_P (t))
12935 t = TREE_TYPE (t);
12936 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12937 t = TREE_TYPE (t);
12938 return TYPE_MODE (t);
12941 /* Vector types need to re-check the target flags each time we report
12942 the machine mode. We need to do this because attribute target can
12943 change the result of vector_mode_supported_p and have_regs_of_mode
12944 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
12945 change on a per-function basis. */
12946 /* ??? Possibly a better solution is to run through all the types
12947 referenced by a function and re-compute the TYPE_MODE once, rather
12948 than make the TYPE_MODE macro call a function. */
12950 machine_mode
12951 vector_type_mode (const_tree t)
12953 machine_mode mode;
12955 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
12957 mode = t->type_common.mode;
12958 if (VECTOR_MODE_P (mode)
12959 && (!targetm.vector_mode_supported_p (mode)
12960 || !have_regs_of_mode[mode]))
12962 scalar_int_mode innermode;
12964 /* For integers, try mapping it to a same-sized scalar mode. */
12965 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
12967 poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
12968 * GET_MODE_BITSIZE (innermode));
12969 scalar_int_mode mode;
12970 if (int_mode_for_size (size, 0).exists (&mode)
12971 && have_regs_of_mode[mode])
12972 return mode;
12975 return BLKmode;
12978 return mode;
12981 /* Return the size in bits of each element of vector type TYPE. */
12983 unsigned int
12984 vector_element_bits (const_tree type)
12986 gcc_checking_assert (VECTOR_TYPE_P (type));
12987 if (VECTOR_BOOLEAN_TYPE_P (type))
12988 return TYPE_PRECISION (TREE_TYPE (type));
12989 return tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
12992 /* Calculate the size in bits of each element of vector type TYPE
12993 and return the result as a tree of type bitsizetype. */
12995 tree
12996 vector_element_bits_tree (const_tree type)
12998 gcc_checking_assert (VECTOR_TYPE_P (type));
12999 if (VECTOR_BOOLEAN_TYPE_P (type))
13000 return bitsize_int (vector_element_bits (type));
13001 return TYPE_SIZE (TREE_TYPE (type));
13004 /* Verify that basic properties of T match TV and thus T can be a variant of
13005 TV. TV should be the more specified variant (i.e. the main variant). */
13007 static bool
13008 verify_type_variant (const_tree t, tree tv)
13010 /* Type variant can differ by:
13012 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13013 ENCODE_QUAL_ADDR_SPACE.
13014 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13015 in this case some values may not be set in the variant types
13016 (see TYPE_COMPLETE_P checks).
13017 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13018 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13019 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13020 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13021 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13022 this is necessary to make it possible to merge types form different TUs
13023 - arrays, pointers and references may have TREE_TYPE that is a variant
13024 of TREE_TYPE of their main variants.
13025 - aggregates may have new TYPE_FIELDS list that list variants of
13026 the main variant TYPE_FIELDS.
13027 - vector types may differ by TYPE_VECTOR_OPAQUE
13030 /* Convenience macro for matching individual fields. */
13031 #define verify_variant_match(flag) \
13032 do { \
13033 if (flag (tv) != flag (t)) \
13035 error ("type variant differs by %s", #flag); \
13036 debug_tree (tv); \
13037 return false; \
13039 } while (false)
13041 /* tree_base checks. */
13043 verify_variant_match (TREE_CODE);
13044 /* FIXME: Ada builds non-artificial variants of artificial types. */
13045 #if 0
13046 if (TYPE_ARTIFICIAL (tv))
13047 verify_variant_match (TYPE_ARTIFICIAL);
13048 #endif
13049 if (POINTER_TYPE_P (tv))
13050 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13051 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13052 verify_variant_match (TYPE_UNSIGNED);
13053 verify_variant_match (TYPE_PACKED);
13054 if (TREE_CODE (t) == REFERENCE_TYPE)
13055 verify_variant_match (TYPE_REF_IS_RVALUE);
13056 if (AGGREGATE_TYPE_P (t))
13057 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13058 else
13059 verify_variant_match (TYPE_SATURATING);
13060 /* FIXME: This check trigger during libstdc++ build. */
13061 #if 0
13062 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t))
13063 verify_variant_match (TYPE_FINAL_P);
13064 #endif
13066 /* tree_type_common checks. */
13068 if (COMPLETE_TYPE_P (t))
13070 verify_variant_match (TYPE_MODE);
13071 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
13072 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
13073 verify_variant_match (TYPE_SIZE);
13074 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
13075 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
13076 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
13078 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
13079 TYPE_SIZE_UNIT (tv), 0));
13080 error ("type variant has different %<TYPE_SIZE_UNIT%>");
13081 debug_tree (tv);
13082 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13083 debug_tree (TYPE_SIZE_UNIT (tv));
13084 error ("type%'s %<TYPE_SIZE_UNIT%>");
13085 debug_tree (TYPE_SIZE_UNIT (t));
13086 return false;
13088 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13090 verify_variant_match (TYPE_PRECISION);
13091 if (RECORD_OR_UNION_TYPE_P (t))
13092 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13093 else if (TREE_CODE (t) == ARRAY_TYPE)
13094 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13095 /* During LTO we merge variant lists from diferent translation units
13096 that may differ BY TYPE_CONTEXT that in turn may point
13097 to TRANSLATION_UNIT_DECL.
13098 Ada also builds variants of types with different TYPE_CONTEXT. */
13099 #if 0
13100 if (!in_lto_p || !TYPE_FILE_SCOPE_P (t))
13101 verify_variant_match (TYPE_CONTEXT);
13102 #endif
13103 if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == INTEGER_TYPE)
13104 verify_variant_match (TYPE_STRING_FLAG);
13105 if (TREE_CODE (t) == RECORD_TYPE || TREE_CODE (t) == UNION_TYPE)
13106 verify_variant_match (TYPE_CXX_ODR_P);
13107 if (TYPE_ALIAS_SET_KNOWN_P (t))
13109 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13110 debug_tree (tv);
13111 return false;
13114 /* tree_type_non_common checks. */
13116 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13117 and dangle the pointer from time to time. */
13118 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13119 && (in_lto_p || !TYPE_VFIELD (tv)
13120 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13122 error ("type variant has different %<TYPE_VFIELD%>");
13123 debug_tree (tv);
13124 return false;
13126 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13127 || TREE_CODE (t) == INTEGER_TYPE
13128 || TREE_CODE (t) == BOOLEAN_TYPE
13129 || TREE_CODE (t) == REAL_TYPE
13130 || TREE_CODE (t) == FIXED_POINT_TYPE)
13132 verify_variant_match (TYPE_MAX_VALUE);
13133 verify_variant_match (TYPE_MIN_VALUE);
13135 if (TREE_CODE (t) == METHOD_TYPE)
13136 verify_variant_match (TYPE_METHOD_BASETYPE);
13137 if (TREE_CODE (t) == OFFSET_TYPE)
13138 verify_variant_match (TYPE_OFFSET_BASETYPE);
13139 if (TREE_CODE (t) == ARRAY_TYPE)
13140 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13141 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13142 or even type's main variant. This is needed to make bootstrap pass
13143 and the bug seems new in GCC 5.
13144 C++ FE should be updated to make this consistent and we should check
13145 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13146 is a match with main variant.
13148 Also disable the check for Java for now because of parser hack that builds
13149 first an dummy BINFO and then sometimes replace it by real BINFO in some
13150 of the copies. */
13151 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13152 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13153 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13154 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13155 at LTO time only. */
13156 && (in_lto_p && odr_type_p (t)))
13158 error ("type variant has different %<TYPE_BINFO%>");
13159 debug_tree (tv);
13160 error ("type variant%'s %<TYPE_BINFO%>");
13161 debug_tree (TYPE_BINFO (tv));
13162 error ("type%'s %<TYPE_BINFO%>");
13163 debug_tree (TYPE_BINFO (t));
13164 return false;
13167 /* Check various uses of TYPE_VALUES_RAW. */
13168 if (TREE_CODE (t) == ENUMERAL_TYPE
13169 && TYPE_VALUES (t))
13170 verify_variant_match (TYPE_VALUES);
13171 else if (TREE_CODE (t) == ARRAY_TYPE)
13172 verify_variant_match (TYPE_DOMAIN);
13173 /* Permit incomplete variants of complete type. While FEs may complete
13174 all variants, this does not happen for C++ templates in all cases. */
13175 else if (RECORD_OR_UNION_TYPE_P (t)
13176 && COMPLETE_TYPE_P (t)
13177 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13179 tree f1, f2;
13181 /* Fortran builds qualified variants as new records with items of
13182 qualified type. Verify that they looks same. */
13183 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13184 f1 && f2;
13185 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13186 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13187 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13188 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13189 /* FIXME: gfc_nonrestricted_type builds all types as variants
13190 with exception of pointer types. It deeply copies the type
13191 which means that we may end up with a variant type
13192 referring non-variant pointer. We may change it to
13193 produce types as variants, too, like
13194 objc_get_protocol_qualified_type does. */
13195 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13196 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13197 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13198 break;
13199 if (f1 || f2)
13201 error ("type variant has different %<TYPE_FIELDS%>");
13202 debug_tree (tv);
13203 error ("first mismatch is field");
13204 debug_tree (f1);
13205 error ("and field");
13206 debug_tree (f2);
13207 return false;
13210 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13211 verify_variant_match (TYPE_ARG_TYPES);
13212 /* For C++ the qualified variant of array type is really an array type
13213 of qualified TREE_TYPE.
13214 objc builds variants of pointer where pointer to type is a variant, too
13215 in objc_get_protocol_qualified_type. */
13216 if (TREE_TYPE (t) != TREE_TYPE (tv)
13217 && ((TREE_CODE (t) != ARRAY_TYPE
13218 && !POINTER_TYPE_P (t))
13219 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13220 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13222 error ("type variant has different %<TREE_TYPE%>");
13223 debug_tree (tv);
13224 error ("type variant%'s %<TREE_TYPE%>");
13225 debug_tree (TREE_TYPE (tv));
13226 error ("type%'s %<TREE_TYPE%>");
13227 debug_tree (TREE_TYPE (t));
13228 return false;
13230 if (type_with_alias_set_p (t)
13231 && !gimple_canonical_types_compatible_p (t, tv, false))
13233 error ("type is not compatible with its variant");
13234 debug_tree (tv);
13235 error ("type variant%'s %<TREE_TYPE%>");
13236 debug_tree (TREE_TYPE (tv));
13237 error ("type%'s %<TREE_TYPE%>");
13238 debug_tree (TREE_TYPE (t));
13239 return false;
13241 return true;
13242 #undef verify_variant_match
13246 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13247 the middle-end types_compatible_p function. It needs to avoid
13248 claiming types are different for types that should be treated
13249 the same with respect to TBAA. Canonical types are also used
13250 for IL consistency checks via the useless_type_conversion_p
13251 predicate which does not handle all type kinds itself but falls
13252 back to pointer-comparison of TYPE_CANONICAL for aggregates
13253 for example. */
13255 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13256 type calculation because we need to allow inter-operability between signed
13257 and unsigned variants. */
13259 bool
13260 type_with_interoperable_signedness (const_tree type)
13262 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13263 signed char and unsigned char. Similarly fortran FE builds
13264 C_SIZE_T as signed type, while C defines it unsigned. */
13266 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13267 == INTEGER_TYPE
13268 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13269 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13272 /* Return true iff T1 and T2 are structurally identical for what
13273 TBAA is concerned.
13274 This function is used both by lto.cc canonical type merging and by the
13275 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13276 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13277 only for LTO because only in these cases TYPE_CANONICAL equivalence
13278 correspond to one defined by gimple_canonical_types_compatible_p. */
13280 bool
13281 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13282 bool trust_type_canonical)
13284 /* Type variants should be same as the main variant. When not doing sanity
13285 checking to verify this fact, go to main variants and save some work. */
13286 if (trust_type_canonical)
13288 t1 = TYPE_MAIN_VARIANT (t1);
13289 t2 = TYPE_MAIN_VARIANT (t2);
13292 /* Check first for the obvious case of pointer identity. */
13293 if (t1 == t2)
13294 return true;
13296 /* Check that we have two types to compare. */
13297 if (t1 == NULL_TREE || t2 == NULL_TREE)
13298 return false;
13300 /* We consider complete types always compatible with incomplete type.
13301 This does not make sense for canonical type calculation and thus we
13302 need to ensure that we are never called on it.
13304 FIXME: For more correctness the function probably should have three modes
13305 1) mode assuming that types are complete mathcing their structure
13306 2) mode allowing incomplete types but producing equivalence classes
13307 and thus ignoring all info from complete types
13308 3) mode allowing incomplete types to match complete but checking
13309 compatibility between complete types.
13311 1 and 2 can be used for canonical type calculation. 3 is the real
13312 definition of type compatibility that can be used i.e. for warnings during
13313 declaration merging. */
13315 gcc_assert (!trust_type_canonical
13316 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13318 /* If the types have been previously registered and found equal
13319 they still are. */
13321 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13322 && trust_type_canonical)
13324 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13325 they are always NULL, but they are set to non-NULL for types
13326 constructed by build_pointer_type and variants. In this case the
13327 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13328 all pointers are considered equal. Be sure to not return false
13329 negatives. */
13330 gcc_checking_assert (canonical_type_used_p (t1)
13331 && canonical_type_used_p (t2));
13332 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13335 /* For types where we do ODR based TBAA the canonical type is always
13336 set correctly, so we know that types are different if their
13337 canonical types does not match. */
13338 if (trust_type_canonical
13339 && (odr_type_p (t1) && odr_based_tbaa_p (t1))
13340 != (odr_type_p (t2) && odr_based_tbaa_p (t2)))
13341 return false;
13343 /* Can't be the same type if the types don't have the same code. */
13344 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13345 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13346 return false;
13348 /* Qualifiers do not matter for canonical type comparison purposes. */
13350 /* Void types and nullptr types are always the same. */
13351 if (TREE_CODE (t1) == VOID_TYPE
13352 || TREE_CODE (t1) == NULLPTR_TYPE)
13353 return true;
13355 /* Can't be the same type if they have different mode. */
13356 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13357 return false;
13359 /* Non-aggregate types can be handled cheaply. */
13360 if (INTEGRAL_TYPE_P (t1)
13361 || SCALAR_FLOAT_TYPE_P (t1)
13362 || FIXED_POINT_TYPE_P (t1)
13363 || TREE_CODE (t1) == VECTOR_TYPE
13364 || TREE_CODE (t1) == COMPLEX_TYPE
13365 || TREE_CODE (t1) == OFFSET_TYPE
13366 || POINTER_TYPE_P (t1))
13368 /* Can't be the same type if they have different recision. */
13369 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13370 return false;
13372 /* In some cases the signed and unsigned types are required to be
13373 inter-operable. */
13374 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13375 && !type_with_interoperable_signedness (t1))
13376 return false;
13378 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13379 interoperable with "signed char". Unless all frontends are revisited
13380 to agree on these types, we must ignore the flag completely. */
13382 /* Fortran standard define C_PTR type that is compatible with every
13383 C pointer. For this reason we need to glob all pointers into one.
13384 Still pointers in different address spaces are not compatible. */
13385 if (POINTER_TYPE_P (t1))
13387 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13388 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13389 return false;
13392 /* Tail-recurse to components. */
13393 if (TREE_CODE (t1) == VECTOR_TYPE
13394 || TREE_CODE (t1) == COMPLEX_TYPE)
13395 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13396 TREE_TYPE (t2),
13397 trust_type_canonical);
13399 return true;
13402 /* Do type-specific comparisons. */
13403 switch (TREE_CODE (t1))
13405 case ARRAY_TYPE:
13406 /* Array types are the same if the element types are the same and
13407 the number of elements are the same. */
13408 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13409 trust_type_canonical)
13410 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13411 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13412 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13413 return false;
13414 else
13416 tree i1 = TYPE_DOMAIN (t1);
13417 tree i2 = TYPE_DOMAIN (t2);
13419 /* For an incomplete external array, the type domain can be
13420 NULL_TREE. Check this condition also. */
13421 if (i1 == NULL_TREE && i2 == NULL_TREE)
13422 return true;
13423 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13424 return false;
13425 else
13427 tree min1 = TYPE_MIN_VALUE (i1);
13428 tree min2 = TYPE_MIN_VALUE (i2);
13429 tree max1 = TYPE_MAX_VALUE (i1);
13430 tree max2 = TYPE_MAX_VALUE (i2);
13432 /* The minimum/maximum values have to be the same. */
13433 if ((min1 == min2
13434 || (min1 && min2
13435 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13436 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13437 || operand_equal_p (min1, min2, 0))))
13438 && (max1 == max2
13439 || (max1 && max2
13440 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13441 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13442 || operand_equal_p (max1, max2, 0)))))
13443 return true;
13444 else
13445 return false;
13449 case METHOD_TYPE:
13450 case FUNCTION_TYPE:
13451 /* Function types are the same if the return type and arguments types
13452 are the same. */
13453 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13454 trust_type_canonical))
13455 return false;
13457 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13458 return true;
13459 else
13461 tree parms1, parms2;
13463 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13464 parms1 && parms2;
13465 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13467 if (!gimple_canonical_types_compatible_p
13468 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13469 trust_type_canonical))
13470 return false;
13473 if (parms1 || parms2)
13474 return false;
13476 return true;
13479 case RECORD_TYPE:
13480 case UNION_TYPE:
13481 case QUAL_UNION_TYPE:
13483 tree f1, f2;
13485 /* Don't try to compare variants of an incomplete type, before
13486 TYPE_FIELDS has been copied around. */
13487 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13488 return true;
13491 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13492 return false;
13494 /* For aggregate types, all the fields must be the same. */
13495 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13496 f1 || f2;
13497 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13499 /* Skip non-fields and zero-sized fields. */
13500 while (f1 && (TREE_CODE (f1) != FIELD_DECL
13501 || (DECL_SIZE (f1)
13502 && integer_zerop (DECL_SIZE (f1)))))
13503 f1 = TREE_CHAIN (f1);
13504 while (f2 && (TREE_CODE (f2) != FIELD_DECL
13505 || (DECL_SIZE (f2)
13506 && integer_zerop (DECL_SIZE (f2)))))
13507 f2 = TREE_CHAIN (f2);
13508 if (!f1 || !f2)
13509 break;
13510 /* The fields must have the same name, offset and type. */
13511 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13512 || !gimple_compare_field_offset (f1, f2)
13513 || !gimple_canonical_types_compatible_p
13514 (TREE_TYPE (f1), TREE_TYPE (f2),
13515 trust_type_canonical))
13516 return false;
13519 /* If one aggregate has more fields than the other, they
13520 are not the same. */
13521 if (f1 || f2)
13522 return false;
13524 return true;
13527 default:
13528 /* Consider all types with language specific trees in them mutually
13529 compatible. This is executed only from verify_type and false
13530 positives can be tolerated. */
13531 gcc_assert (!in_lto_p);
13532 return true;
13536 /* Verify type T. */
13538 void
13539 verify_type (const_tree t)
13541 bool error_found = false;
13542 tree mv = TYPE_MAIN_VARIANT (t);
13543 if (!mv)
13545 error ("main variant is not defined");
13546 error_found = true;
13548 else if (mv != TYPE_MAIN_VARIANT (mv))
13550 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
13551 debug_tree (mv);
13552 error_found = true;
13554 else if (t != mv && !verify_type_variant (t, mv))
13555 error_found = true;
13557 tree ct = TYPE_CANONICAL (t);
13558 if (!ct)
13560 else if (TYPE_CANONICAL (ct) != ct)
13562 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
13563 debug_tree (ct);
13564 error_found = true;
13566 /* Method and function types cannot be used to address memory and thus
13567 TYPE_CANONICAL really matters only for determining useless conversions.
13569 FIXME: C++ FE produce declarations of builtin functions that are not
13570 compatible with main variants. */
13571 else if (TREE_CODE (t) == FUNCTION_TYPE)
13573 else if (t != ct
13574 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
13575 with variably sized arrays because their sizes possibly
13576 gimplified to different variables. */
13577 && !variably_modified_type_p (ct, NULL)
13578 && !gimple_canonical_types_compatible_p (t, ct, false)
13579 && COMPLETE_TYPE_P (t))
13581 error ("%<TYPE_CANONICAL%> is not compatible");
13582 debug_tree (ct);
13583 error_found = true;
13586 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13587 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13589 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
13590 debug_tree (ct);
13591 error_found = true;
13593 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
13595 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
13596 debug_tree (ct);
13597 debug_tree (TYPE_MAIN_VARIANT (ct));
13598 error_found = true;
13602 /* Check various uses of TYPE_MIN_VALUE_RAW. */
13603 if (RECORD_OR_UNION_TYPE_P (t))
13605 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13606 and danagle the pointer from time to time. */
13607 if (TYPE_VFIELD (t)
13608 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13609 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13611 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
13612 debug_tree (TYPE_VFIELD (t));
13613 error_found = true;
13616 else if (TREE_CODE (t) == POINTER_TYPE)
13618 if (TYPE_NEXT_PTR_TO (t)
13619 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13621 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
13622 debug_tree (TYPE_NEXT_PTR_TO (t));
13623 error_found = true;
13626 else if (TREE_CODE (t) == REFERENCE_TYPE)
13628 if (TYPE_NEXT_REF_TO (t)
13629 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13631 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
13632 debug_tree (TYPE_NEXT_REF_TO (t));
13633 error_found = true;
13636 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13637 || TREE_CODE (t) == FIXED_POINT_TYPE)
13639 /* FIXME: The following check should pass:
13640 useless_type_conversion_p (const_cast <tree> (t),
13641 TREE_TYPE (TYPE_MIN_VALUE (t))
13642 but does not for C sizetypes in LTO. */
13645 /* Check various uses of TYPE_MAXVAL_RAW. */
13646 if (RECORD_OR_UNION_TYPE_P (t))
13648 if (!TYPE_BINFO (t))
13650 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13652 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
13653 debug_tree (TYPE_BINFO (t));
13654 error_found = true;
13656 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
13658 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
13659 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13660 error_found = true;
13663 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13665 if (TYPE_METHOD_BASETYPE (t)
13666 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13667 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13669 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
13670 debug_tree (TYPE_METHOD_BASETYPE (t));
13671 error_found = true;
13674 else if (TREE_CODE (t) == OFFSET_TYPE)
13676 if (TYPE_OFFSET_BASETYPE (t)
13677 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13678 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13680 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
13681 debug_tree (TYPE_OFFSET_BASETYPE (t));
13682 error_found = true;
13685 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13686 || TREE_CODE (t) == FIXED_POINT_TYPE)
13688 /* FIXME: The following check should pass:
13689 useless_type_conversion_p (const_cast <tree> (t),
13690 TREE_TYPE (TYPE_MAX_VALUE (t))
13691 but does not for C sizetypes in LTO. */
13693 else if (TREE_CODE (t) == ARRAY_TYPE)
13695 if (TYPE_ARRAY_MAX_SIZE (t)
13696 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13698 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
13699 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13700 error_found = true;
13703 else if (TYPE_MAX_VALUE_RAW (t))
13705 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
13706 debug_tree (TYPE_MAX_VALUE_RAW (t));
13707 error_found = true;
13710 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13712 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
13713 debug_tree (TYPE_LANG_SLOT_1 (t));
13714 error_found = true;
13717 /* Check various uses of TYPE_VALUES_RAW. */
13718 if (TREE_CODE (t) == ENUMERAL_TYPE)
13719 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13721 tree value = TREE_VALUE (l);
13722 tree name = TREE_PURPOSE (l);
13724 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13725 CONST_DECL of ENUMERAL TYPE. */
13726 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13728 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
13729 debug_tree (value);
13730 debug_tree (name);
13731 error_found = true;
13733 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13734 && TREE_CODE (TREE_TYPE (value)) != BOOLEAN_TYPE
13735 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13737 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
13738 "to the enum");
13739 debug_tree (value);
13740 debug_tree (name);
13741 error_found = true;
13743 if (TREE_CODE (name) != IDENTIFIER_NODE)
13745 error ("enum value name is not %<IDENTIFIER_NODE%>");
13746 debug_tree (value);
13747 debug_tree (name);
13748 error_found = true;
13751 else if (TREE_CODE (t) == ARRAY_TYPE)
13753 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13755 error ("array %<TYPE_DOMAIN%> is not integer type");
13756 debug_tree (TYPE_DOMAIN (t));
13757 error_found = true;
13760 else if (RECORD_OR_UNION_TYPE_P (t))
13762 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
13764 error ("%<TYPE_FIELDS%> defined in incomplete type");
13765 error_found = true;
13767 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13769 /* TODO: verify properties of decls. */
13770 if (TREE_CODE (fld) == FIELD_DECL)
13772 else if (TREE_CODE (fld) == TYPE_DECL)
13774 else if (TREE_CODE (fld) == CONST_DECL)
13776 else if (VAR_P (fld))
13778 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13780 else if (TREE_CODE (fld) == USING_DECL)
13782 else if (TREE_CODE (fld) == FUNCTION_DECL)
13784 else
13786 error ("wrong tree in %<TYPE_FIELDS%> list");
13787 debug_tree (fld);
13788 error_found = true;
13792 else if (TREE_CODE (t) == INTEGER_TYPE
13793 || TREE_CODE (t) == BOOLEAN_TYPE
13794 || TREE_CODE (t) == OFFSET_TYPE
13795 || TREE_CODE (t) == REFERENCE_TYPE
13796 || TREE_CODE (t) == NULLPTR_TYPE
13797 || TREE_CODE (t) == POINTER_TYPE)
13799 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13801 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
13802 "is %p",
13803 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13804 error_found = true;
13806 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13808 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
13809 debug_tree (TYPE_CACHED_VALUES (t));
13810 error_found = true;
13812 /* Verify just enough of cache to ensure that no one copied it to new type.
13813 All copying should go by copy_node that should clear it. */
13814 else if (TYPE_CACHED_VALUES_P (t))
13816 int i;
13817 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13818 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13819 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13821 error ("wrong %<TYPE_CACHED_VALUES%> entry");
13822 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13823 error_found = true;
13824 break;
13828 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13829 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13831 /* C++ FE uses TREE_PURPOSE to store initial values. */
13832 if (TREE_PURPOSE (l) && in_lto_p)
13834 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
13835 debug_tree (l);
13836 error_found = true;
13838 if (!TYPE_P (TREE_VALUE (l)))
13840 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
13841 debug_tree (l);
13842 error_found = true;
13845 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13847 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
13848 debug_tree (TYPE_VALUES_RAW (t));
13849 error_found = true;
13851 if (TREE_CODE (t) != INTEGER_TYPE
13852 && TREE_CODE (t) != BOOLEAN_TYPE
13853 && TREE_CODE (t) != OFFSET_TYPE
13854 && TREE_CODE (t) != REFERENCE_TYPE
13855 && TREE_CODE (t) != NULLPTR_TYPE
13856 && TREE_CODE (t) != POINTER_TYPE
13857 && TYPE_CACHED_VALUES_P (t))
13859 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
13860 error_found = true;
13863 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13864 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13865 of a type. */
13866 if (TREE_CODE (t) == METHOD_TYPE
13867 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
13869 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
13870 error_found = true;
13873 if (error_found)
13875 debug_tree (const_cast <tree> (t));
13876 internal_error ("%qs failed", __func__);
13881 /* Return 1 if ARG interpreted as signed in its precision is known to be
13882 always positive or 2 if ARG is known to be always negative, or 3 if
13883 ARG may be positive or negative. */
13886 get_range_pos_neg (tree arg)
13888 if (arg == error_mark_node)
13889 return 3;
13891 int prec = TYPE_PRECISION (TREE_TYPE (arg));
13892 int cnt = 0;
13893 if (TREE_CODE (arg) == INTEGER_CST)
13895 wide_int w = wi::sext (wi::to_wide (arg), prec);
13896 if (wi::neg_p (w))
13897 return 2;
13898 else
13899 return 1;
13901 while (CONVERT_EXPR_P (arg)
13902 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
13903 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
13905 arg = TREE_OPERAND (arg, 0);
13906 /* Narrower value zero extended into wider type
13907 will always result in positive values. */
13908 if (TYPE_UNSIGNED (TREE_TYPE (arg))
13909 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
13910 return 1;
13911 prec = TYPE_PRECISION (TREE_TYPE (arg));
13912 if (++cnt > 30)
13913 return 3;
13916 if (TREE_CODE (arg) != SSA_NAME)
13917 return 3;
13918 value_range r;
13919 while (!get_global_range_query ()->range_of_expr (r, arg) || r.kind () != VR_RANGE)
13921 gimple *g = SSA_NAME_DEF_STMT (arg);
13922 if (is_gimple_assign (g)
13923 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
13925 tree t = gimple_assign_rhs1 (g);
13926 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
13927 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
13929 if (TYPE_UNSIGNED (TREE_TYPE (t))
13930 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
13931 return 1;
13932 prec = TYPE_PRECISION (TREE_TYPE (t));
13933 arg = t;
13934 if (++cnt > 30)
13935 return 3;
13936 continue;
13939 return 3;
13941 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
13943 /* For unsigned values, the "positive" range comes
13944 below the "negative" range. */
13945 if (!wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
13946 return 1;
13947 if (wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
13948 return 2;
13950 else
13952 if (!wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
13953 return 1;
13954 if (wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
13955 return 2;
13957 return 3;
13963 /* Return true if ARG is marked with the nonnull attribute in the
13964 current function signature. */
13966 bool
13967 nonnull_arg_p (const_tree arg)
13969 tree t, attrs, fntype;
13970 unsigned HOST_WIDE_INT arg_num;
13972 gcc_assert (TREE_CODE (arg) == PARM_DECL
13973 && (POINTER_TYPE_P (TREE_TYPE (arg))
13974 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
13976 /* The static chain decl is always non null. */
13977 if (arg == cfun->static_chain_decl)
13978 return true;
13980 /* THIS argument of method is always non-NULL. */
13981 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
13982 && arg == DECL_ARGUMENTS (cfun->decl)
13983 && flag_delete_null_pointer_checks)
13984 return true;
13986 /* Values passed by reference are always non-NULL. */
13987 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
13988 && flag_delete_null_pointer_checks)
13989 return true;
13991 fntype = TREE_TYPE (cfun->decl);
13992 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
13994 attrs = lookup_attribute ("nonnull", attrs);
13996 /* If "nonnull" wasn't specified, we know nothing about the argument. */
13997 if (attrs == NULL_TREE)
13998 return false;
14000 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
14001 if (TREE_VALUE (attrs) == NULL_TREE)
14002 return true;
14004 /* Get the position number for ARG in the function signature. */
14005 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
14007 t = DECL_CHAIN (t), arg_num++)
14009 if (t == arg)
14010 break;
14013 gcc_assert (t == arg);
14015 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14016 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14018 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14019 return true;
14023 return false;
14026 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14027 information. */
14029 location_t
14030 set_block (location_t loc, tree block)
14032 location_t pure_loc = get_pure_location (loc);
14033 source_range src_range = get_range_from_loc (line_table, loc);
14034 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
14037 location_t
14038 set_source_range (tree expr, location_t start, location_t finish)
14040 source_range src_range;
14041 src_range.m_start = start;
14042 src_range.m_finish = finish;
14043 return set_source_range (expr, src_range);
14046 location_t
14047 set_source_range (tree expr, source_range src_range)
14049 if (!EXPR_P (expr))
14050 return UNKNOWN_LOCATION;
14052 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
14053 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
14054 pure_loc,
14055 src_range,
14056 NULL);
14057 SET_EXPR_LOCATION (expr, adhoc);
14058 return adhoc;
14061 /* Return EXPR, potentially wrapped with a node expression LOC,
14062 if !CAN_HAVE_LOCATION_P (expr).
14064 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
14065 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
14067 Wrapper nodes can be identified using location_wrapper_p. */
14069 tree
14070 maybe_wrap_with_location (tree expr, location_t loc)
14072 if (expr == NULL)
14073 return NULL;
14074 if (loc == UNKNOWN_LOCATION)
14075 return expr;
14076 if (CAN_HAVE_LOCATION_P (expr))
14077 return expr;
14078 /* We should only be adding wrappers for constants and for decls,
14079 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
14080 gcc_assert (CONSTANT_CLASS_P (expr)
14081 || DECL_P (expr)
14082 || EXCEPTIONAL_CLASS_P (expr));
14084 /* For now, don't add wrappers to exceptional tree nodes, to minimize
14085 any impact of the wrapper nodes. */
14086 if (EXCEPTIONAL_CLASS_P (expr))
14087 return expr;
14089 /* Compiler-generated temporary variables don't need a wrapper. */
14090 if (DECL_P (expr) && DECL_ARTIFICIAL (expr) && DECL_IGNORED_P (expr))
14091 return expr;
14093 /* If any auto_suppress_location_wrappers are active, don't create
14094 wrappers. */
14095 if (suppress_location_wrappers > 0)
14096 return expr;
14098 tree_code code
14099 = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
14100 || (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
14101 ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
14102 tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
14103 /* Mark this node as being a wrapper. */
14104 EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
14105 return wrapper;
14108 int suppress_location_wrappers;
14110 /* Return the name of combined function FN, for debugging purposes. */
14112 const char *
14113 combined_fn_name (combined_fn fn)
14115 if (builtin_fn_p (fn))
14117 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14118 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14120 else
14121 return internal_fn_name (as_internal_fn (fn));
14124 /* Return a bitmap with a bit set corresponding to each argument in
14125 a function call type FNTYPE declared with attribute nonnull,
14126 or null if none of the function's argument are nonnull. The caller
14127 must free the bitmap. */
14129 bitmap
14130 get_nonnull_args (const_tree fntype)
14132 if (fntype == NULL_TREE)
14133 return NULL;
14135 bitmap argmap = NULL;
14136 if (TREE_CODE (fntype) == METHOD_TYPE)
14138 /* The this pointer in C++ non-static member functions is
14139 implicitly nonnull whether or not it's declared as such. */
14140 argmap = BITMAP_ALLOC (NULL);
14141 bitmap_set_bit (argmap, 0);
14144 tree attrs = TYPE_ATTRIBUTES (fntype);
14145 if (!attrs)
14146 return argmap;
14148 /* A function declaration can specify multiple attribute nonnull,
14149 each with zero or more arguments. The loop below creates a bitmap
14150 representing a union of all the arguments. An empty (but non-null)
14151 bitmap means that all arguments have been declaraed nonnull. */
14152 for ( ; attrs; attrs = TREE_CHAIN (attrs))
14154 attrs = lookup_attribute ("nonnull", attrs);
14155 if (!attrs)
14156 break;
14158 if (!argmap)
14159 argmap = BITMAP_ALLOC (NULL);
14161 if (!TREE_VALUE (attrs))
14163 /* Clear the bitmap in case a previous attribute nonnull
14164 set it and this one overrides it for all arguments. */
14165 bitmap_clear (argmap);
14166 return argmap;
14169 /* Iterate over the indices of the format arguments declared nonnull
14170 and set a bit for each. */
14171 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
14173 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
14174 bitmap_set_bit (argmap, val);
14178 return argmap;
14181 /* Returns true if TYPE is a type where it and all of its subobjects
14182 (recursively) are of structure, union, or array type. */
14184 bool
14185 is_empty_type (const_tree type)
14187 if (RECORD_OR_UNION_TYPE_P (type))
14189 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
14190 if (TREE_CODE (field) == FIELD_DECL
14191 && !DECL_PADDING_P (field)
14192 && !is_empty_type (TREE_TYPE (field)))
14193 return false;
14194 return true;
14196 else if (TREE_CODE (type) == ARRAY_TYPE)
14197 return (integer_minus_onep (array_type_nelts (type))
14198 || TYPE_DOMAIN (type) == NULL_TREE
14199 || is_empty_type (TREE_TYPE (type)));
14200 return false;
14203 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
14204 that shouldn't be passed via stack. */
14206 bool
14207 default_is_empty_record (const_tree type)
14209 if (!abi_version_at_least (12))
14210 return false;
14212 if (type == error_mark_node)
14213 return false;
14215 if (TREE_ADDRESSABLE (type))
14216 return false;
14218 return is_empty_type (TYPE_MAIN_VARIANT (type));
14221 /* Determine whether TYPE is a structure with a flexible array member,
14222 or a union containing such a structure (possibly recursively). */
14224 bool
14225 flexible_array_type_p (const_tree type)
14227 tree x, last;
14228 switch (TREE_CODE (type))
14230 case RECORD_TYPE:
14231 last = NULL_TREE;
14232 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14233 if (TREE_CODE (x) == FIELD_DECL)
14234 last = x;
14235 if (last == NULL_TREE)
14236 return false;
14237 if (TREE_CODE (TREE_TYPE (last)) == ARRAY_TYPE
14238 && TYPE_SIZE (TREE_TYPE (last)) == NULL_TREE
14239 && TYPE_DOMAIN (TREE_TYPE (last)) != NULL_TREE
14240 && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last))) == NULL_TREE)
14241 return true;
14242 return false;
14243 case UNION_TYPE:
14244 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14246 if (TREE_CODE (x) == FIELD_DECL
14247 && flexible_array_type_p (TREE_TYPE (x)))
14248 return true;
14250 return false;
14251 default:
14252 return false;
14256 /* Like int_size_in_bytes, but handle empty records specially. */
14258 HOST_WIDE_INT
14259 arg_int_size_in_bytes (const_tree type)
14261 return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
14264 /* Like size_in_bytes, but handle empty records specially. */
14266 tree
14267 arg_size_in_bytes (const_tree type)
14269 return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
14272 /* Return true if an expression with CODE has to have the same result type as
14273 its first operand. */
14275 bool
14276 expr_type_first_operand_type_p (tree_code code)
14278 switch (code)
14280 case NEGATE_EXPR:
14281 case ABS_EXPR:
14282 case BIT_NOT_EXPR:
14283 case PAREN_EXPR:
14284 case CONJ_EXPR:
14286 case PLUS_EXPR:
14287 case MINUS_EXPR:
14288 case MULT_EXPR:
14289 case TRUNC_DIV_EXPR:
14290 case CEIL_DIV_EXPR:
14291 case FLOOR_DIV_EXPR:
14292 case ROUND_DIV_EXPR:
14293 case TRUNC_MOD_EXPR:
14294 case CEIL_MOD_EXPR:
14295 case FLOOR_MOD_EXPR:
14296 case ROUND_MOD_EXPR:
14297 case RDIV_EXPR:
14298 case EXACT_DIV_EXPR:
14299 case MIN_EXPR:
14300 case MAX_EXPR:
14301 case BIT_IOR_EXPR:
14302 case BIT_XOR_EXPR:
14303 case BIT_AND_EXPR:
14305 case LSHIFT_EXPR:
14306 case RSHIFT_EXPR:
14307 case LROTATE_EXPR:
14308 case RROTATE_EXPR:
14309 return true;
14311 default:
14312 return false;
14316 /* Return a typenode for the "standard" C type with a given name. */
14317 tree
14318 get_typenode_from_name (const char *name)
14320 if (name == NULL || *name == '\0')
14321 return NULL_TREE;
14323 if (strcmp (name, "char") == 0)
14324 return char_type_node;
14325 if (strcmp (name, "unsigned char") == 0)
14326 return unsigned_char_type_node;
14327 if (strcmp (name, "signed char") == 0)
14328 return signed_char_type_node;
14330 if (strcmp (name, "short int") == 0)
14331 return short_integer_type_node;
14332 if (strcmp (name, "short unsigned int") == 0)
14333 return short_unsigned_type_node;
14335 if (strcmp (name, "int") == 0)
14336 return integer_type_node;
14337 if (strcmp (name, "unsigned int") == 0)
14338 return unsigned_type_node;
14340 if (strcmp (name, "long int") == 0)
14341 return long_integer_type_node;
14342 if (strcmp (name, "long unsigned int") == 0)
14343 return long_unsigned_type_node;
14345 if (strcmp (name, "long long int") == 0)
14346 return long_long_integer_type_node;
14347 if (strcmp (name, "long long unsigned int") == 0)
14348 return long_long_unsigned_type_node;
14350 gcc_unreachable ();
14353 /* List of pointer types used to declare builtins before we have seen their
14354 real declaration.
14356 Keep the size up to date in tree.h ! */
14357 const builtin_structptr_type builtin_structptr_types[6] =
14359 { fileptr_type_node, ptr_type_node, "FILE" },
14360 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
14361 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
14362 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
14363 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
14364 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
14367 /* Return the maximum object size. */
14369 tree
14370 max_object_size (void)
14372 /* To do: Make this a configurable parameter. */
14373 return TYPE_MAX_VALUE (ptrdiff_type_node);
14376 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
14377 parameter default to false and that weeds out error_mark_node. */
14379 bool
14380 verify_type_context (location_t loc, type_context_kind context,
14381 const_tree type, bool silent_p)
14383 if (type == error_mark_node)
14384 return true;
14386 gcc_assert (TYPE_P (type));
14387 return (!targetm.verify_type_context
14388 || targetm.verify_type_context (loc, context, type, silent_p));
14391 /* Return true if NEW_ASM and DELETE_ASM name a valid pair of new and
14392 delete operators. Return false if they may or may not name such
14393 a pair and, when nonnull, set *PCERTAIN to true if they certainly
14394 do not. */
14396 bool
14397 valid_new_delete_pair_p (tree new_asm, tree delete_asm,
14398 bool *pcertain /* = NULL */)
14400 bool certain;
14401 if (!pcertain)
14402 pcertain = &certain;
14404 const char *new_name = IDENTIFIER_POINTER (new_asm);
14405 const char *delete_name = IDENTIFIER_POINTER (delete_asm);
14406 unsigned int new_len = IDENTIFIER_LENGTH (new_asm);
14407 unsigned int delete_len = IDENTIFIER_LENGTH (delete_asm);
14409 /* The following failures are due to invalid names so they're not
14410 considered certain mismatches. */
14411 *pcertain = false;
14413 if (new_len < 5 || delete_len < 6)
14414 return false;
14415 if (new_name[0] == '_')
14416 ++new_name, --new_len;
14417 if (new_name[0] == '_')
14418 ++new_name, --new_len;
14419 if (delete_name[0] == '_')
14420 ++delete_name, --delete_len;
14421 if (delete_name[0] == '_')
14422 ++delete_name, --delete_len;
14423 if (new_len < 4 || delete_len < 5)
14424 return false;
14426 /* The following failures are due to names of user-defined operators
14427 so they're also not considered certain mismatches. */
14429 /* *_len is now just the length after initial underscores. */
14430 if (new_name[0] != 'Z' || new_name[1] != 'n')
14431 return false;
14432 if (delete_name[0] != 'Z' || delete_name[1] != 'd')
14433 return false;
14435 /* The following failures are certain mismatches. */
14436 *pcertain = true;
14438 /* _Znw must match _Zdl, _Zna must match _Zda. */
14439 if ((new_name[2] != 'w' || delete_name[2] != 'l')
14440 && (new_name[2] != 'a' || delete_name[2] != 'a'))
14441 return false;
14442 /* 'j', 'm' and 'y' correspond to size_t. */
14443 if (new_name[3] != 'j' && new_name[3] != 'm' && new_name[3] != 'y')
14444 return false;
14445 if (delete_name[3] != 'P' || delete_name[4] != 'v')
14446 return false;
14447 if (new_len == 4
14448 || (new_len == 18 && !memcmp (new_name + 4, "RKSt9nothrow_t", 14)))
14450 /* _ZnXY or _ZnXYRKSt9nothrow_t matches
14451 _ZdXPv, _ZdXPvY and _ZdXPvRKSt9nothrow_t. */
14452 if (delete_len == 5)
14453 return true;
14454 if (delete_len == 6 && delete_name[5] == new_name[3])
14455 return true;
14456 if (delete_len == 19 && !memcmp (delete_name + 5, "RKSt9nothrow_t", 14))
14457 return true;
14459 else if ((new_len == 19 && !memcmp (new_name + 4, "St11align_val_t", 15))
14460 || (new_len == 33
14461 && !memcmp (new_name + 4, "St11align_val_tRKSt9nothrow_t", 29)))
14463 /* _ZnXYSt11align_val_t or _ZnXYSt11align_val_tRKSt9nothrow_t matches
14464 _ZdXPvSt11align_val_t or _ZdXPvYSt11align_val_t or or
14465 _ZdXPvSt11align_val_tRKSt9nothrow_t. */
14466 if (delete_len == 20 && !memcmp (delete_name + 5, "St11align_val_t", 15))
14467 return true;
14468 if (delete_len == 21
14469 && delete_name[5] == new_name[3]
14470 && !memcmp (delete_name + 6, "St11align_val_t", 15))
14471 return true;
14472 if (delete_len == 34
14473 && !memcmp (delete_name + 5, "St11align_val_tRKSt9nothrow_t", 29))
14474 return true;
14477 /* The negative result is conservative. */
14478 *pcertain = false;
14479 return false;
14482 /* Return the zero-based number corresponding to the argument being
14483 deallocated if FNDECL is a deallocation function or an out-of-bounds
14484 value if it isn't. */
14486 unsigned
14487 fndecl_dealloc_argno (tree fndecl)
14489 /* A call to operator delete isn't recognized as one to a built-in. */
14490 if (DECL_IS_OPERATOR_DELETE_P (fndecl))
14492 if (DECL_IS_REPLACEABLE_OPERATOR (fndecl))
14493 return 0;
14495 /* Avoid placement delete that's not been inlined. */
14496 tree fname = DECL_ASSEMBLER_NAME (fndecl);
14497 if (id_equal (fname, "_ZdlPvS_") // ordinary form
14498 || id_equal (fname, "_ZdaPvS_")) // array form
14499 return UINT_MAX;
14500 return 0;
14503 /* TODO: Handle user-defined functions with attribute malloc? Handle
14504 known non-built-ins like fopen? */
14505 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14507 switch (DECL_FUNCTION_CODE (fndecl))
14509 case BUILT_IN_FREE:
14510 case BUILT_IN_REALLOC:
14511 return 0;
14512 default:
14513 break;
14515 return UINT_MAX;
14518 tree attrs = DECL_ATTRIBUTES (fndecl);
14519 if (!attrs)
14520 return UINT_MAX;
14522 for (tree atfree = attrs;
14523 (atfree = lookup_attribute ("*dealloc", atfree));
14524 atfree = TREE_CHAIN (atfree))
14526 tree alloc = TREE_VALUE (atfree);
14527 if (!alloc)
14528 continue;
14530 tree pos = TREE_CHAIN (alloc);
14531 if (!pos)
14532 return 0;
14534 pos = TREE_VALUE (pos);
14535 return TREE_INT_CST_LOW (pos) - 1;
14538 return UINT_MAX;
14541 /* If EXPR refers to a character array or pointer declared attribute
14542 nonstring, return a decl for that array or pointer and set *REF
14543 to the referenced enclosing object or pointer. Otherwise return
14544 null. */
14546 tree
14547 get_attr_nonstring_decl (tree expr, tree *ref)
14549 tree decl = expr;
14550 tree var = NULL_TREE;
14551 if (TREE_CODE (decl) == SSA_NAME)
14553 gimple *def = SSA_NAME_DEF_STMT (decl);
14555 if (is_gimple_assign (def))
14557 tree_code code = gimple_assign_rhs_code (def);
14558 if (code == ADDR_EXPR
14559 || code == COMPONENT_REF
14560 || code == VAR_DECL)
14561 decl = gimple_assign_rhs1 (def);
14563 else
14564 var = SSA_NAME_VAR (decl);
14567 if (TREE_CODE (decl) == ADDR_EXPR)
14568 decl = TREE_OPERAND (decl, 0);
14570 /* To simplify calling code, store the referenced DECL regardless of
14571 the attribute determined below, but avoid storing the SSA_NAME_VAR
14572 obtained above (it's not useful for dataflow purposes). */
14573 if (ref)
14574 *ref = decl;
14576 /* Use the SSA_NAME_VAR that was determined above to see if it's
14577 declared nonstring. Otherwise drill down into the referenced
14578 DECL. */
14579 if (var)
14580 decl = var;
14581 else if (TREE_CODE (decl) == ARRAY_REF)
14582 decl = TREE_OPERAND (decl, 0);
14583 else if (TREE_CODE (decl) == COMPONENT_REF)
14584 decl = TREE_OPERAND (decl, 1);
14585 else if (TREE_CODE (decl) == MEM_REF)
14586 return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
14588 if (DECL_P (decl)
14589 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
14590 return decl;
14592 return NULL_TREE;
14595 /* Return length of attribute names string,
14596 if arglist chain > 1, -1 otherwise. */
14599 get_target_clone_attr_len (tree arglist)
14601 tree arg;
14602 int str_len_sum = 0;
14603 int argnum = 0;
14605 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
14607 const char *str = TREE_STRING_POINTER (TREE_VALUE (arg));
14608 size_t len = strlen (str);
14609 str_len_sum += len + 1;
14610 for (const char *p = strchr (str, ','); p; p = strchr (p + 1, ','))
14611 argnum++;
14612 argnum++;
14614 if (argnum <= 1)
14615 return -1;
14616 return str_len_sum;
14619 #if CHECKING_P
14621 namespace selftest {
14623 /* Selftests for tree. */
14625 /* Verify that integer constants are sane. */
14627 static void
14628 test_integer_constants ()
14630 ASSERT_TRUE (integer_type_node != NULL);
14631 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
14633 tree type = integer_type_node;
14635 tree zero = build_zero_cst (type);
14636 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
14637 ASSERT_EQ (type, TREE_TYPE (zero));
14639 tree one = build_int_cst (type, 1);
14640 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
14641 ASSERT_EQ (type, TREE_TYPE (zero));
14644 /* Verify identifiers. */
14646 static void
14647 test_identifiers ()
14649 tree identifier = get_identifier ("foo");
14650 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
14651 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
14654 /* Verify LABEL_DECL. */
14656 static void
14657 test_labels ()
14659 tree identifier = get_identifier ("err");
14660 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
14661 identifier, void_type_node);
14662 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
14663 ASSERT_FALSE (FORCED_LABEL (label_decl));
14666 /* Return a new VECTOR_CST node whose type is TYPE and whose values
14667 are given by VALS. */
14669 static tree
14670 build_vector (tree type, const vec<tree> &vals MEM_STAT_DECL)
14672 gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
14673 tree_vector_builder builder (type, vals.length (), 1);
14674 builder.splice (vals);
14675 return builder.build ();
14678 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
14680 static void
14681 check_vector_cst (const vec<tree> &expected, tree actual)
14683 ASSERT_KNOWN_EQ (expected.length (),
14684 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
14685 for (unsigned int i = 0; i < expected.length (); ++i)
14686 ASSERT_EQ (wi::to_wide (expected[i]),
14687 wi::to_wide (vector_cst_elt (actual, i)));
14690 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
14691 and that its elements match EXPECTED. */
14693 static void
14694 check_vector_cst_duplicate (const vec<tree> &expected, tree actual,
14695 unsigned int npatterns)
14697 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14698 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
14699 ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
14700 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
14701 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
14702 check_vector_cst (expected, actual);
14705 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
14706 and NPATTERNS background elements, and that its elements match
14707 EXPECTED. */
14709 static void
14710 check_vector_cst_fill (const vec<tree> &expected, tree actual,
14711 unsigned int npatterns)
14713 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14714 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
14715 ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
14716 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
14717 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
14718 check_vector_cst (expected, actual);
14721 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
14722 and that its elements match EXPECTED. */
14724 static void
14725 check_vector_cst_stepped (const vec<tree> &expected, tree actual,
14726 unsigned int npatterns)
14728 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
14729 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
14730 ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
14731 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
14732 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
14733 check_vector_cst (expected, actual);
14736 /* Test the creation of VECTOR_CSTs. */
14738 static void
14739 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
14741 auto_vec<tree, 8> elements (8);
14742 elements.quick_grow (8);
14743 tree element_type = build_nonstandard_integer_type (16, true);
14744 tree vector_type = build_vector_type (element_type, 8);
14746 /* Test a simple linear series with a base of 0 and a step of 1:
14747 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
14748 for (unsigned int i = 0; i < 8; ++i)
14749 elements[i] = build_int_cst (element_type, i);
14750 tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
14751 check_vector_cst_stepped (elements, vector, 1);
14753 /* Try the same with the first element replaced by 100:
14754 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
14755 elements[0] = build_int_cst (element_type, 100);
14756 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14757 check_vector_cst_stepped (elements, vector, 1);
14759 /* Try a series that wraps around.
14760 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
14761 for (unsigned int i = 1; i < 8; ++i)
14762 elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
14763 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14764 check_vector_cst_stepped (elements, vector, 1);
14766 /* Try a downward series:
14767 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
14768 for (unsigned int i = 1; i < 8; ++i)
14769 elements[i] = build_int_cst (element_type, 80 - i);
14770 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14771 check_vector_cst_stepped (elements, vector, 1);
14773 /* Try two interleaved series with different bases and steps:
14774 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
14775 elements[1] = build_int_cst (element_type, 53);
14776 for (unsigned int i = 2; i < 8; i += 2)
14778 elements[i] = build_int_cst (element_type, 70 - i * 2);
14779 elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
14781 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14782 check_vector_cst_stepped (elements, vector, 2);
14784 /* Try a duplicated value:
14785 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
14786 for (unsigned int i = 1; i < 8; ++i)
14787 elements[i] = elements[0];
14788 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14789 check_vector_cst_duplicate (elements, vector, 1);
14791 /* Try an interleaved duplicated value:
14792 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
14793 elements[1] = build_int_cst (element_type, 55);
14794 for (unsigned int i = 2; i < 8; ++i)
14795 elements[i] = elements[i - 2];
14796 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14797 check_vector_cst_duplicate (elements, vector, 2);
14799 /* Try a duplicated value with 2 exceptions
14800 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
14801 elements[0] = build_int_cst (element_type, 41);
14802 elements[1] = build_int_cst (element_type, 97);
14803 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14804 check_vector_cst_fill (elements, vector, 2);
14806 /* Try with and without a step
14807 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
14808 for (unsigned int i = 3; i < 8; i += 2)
14809 elements[i] = build_int_cst (element_type, i * 7);
14810 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14811 check_vector_cst_stepped (elements, vector, 2);
14813 /* Try a fully-general constant:
14814 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
14815 elements[5] = build_int_cst (element_type, 9990);
14816 vector = build_vector (vector_type, elements PASS_MEM_STAT);
14817 check_vector_cst_fill (elements, vector, 4);
14820 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
14821 Helper function for test_location_wrappers, to deal with STRIP_NOPS
14822 modifying its argument in-place. */
14824 static void
14825 check_strip_nops (tree node, tree expected)
14827 STRIP_NOPS (node);
14828 ASSERT_EQ (expected, node);
14831 /* Verify location wrappers. */
14833 static void
14834 test_location_wrappers ()
14836 location_t loc = BUILTINS_LOCATION;
14838 ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
14840 /* Wrapping a constant. */
14841 tree int_cst = build_int_cst (integer_type_node, 42);
14842 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
14843 ASSERT_FALSE (location_wrapper_p (int_cst));
14845 tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
14846 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
14847 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
14848 ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
14850 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
14851 ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
14853 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
14854 tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
14855 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
14856 ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
14858 /* Wrapping a STRING_CST. */
14859 tree string_cst = build_string (4, "foo");
14860 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
14861 ASSERT_FALSE (location_wrapper_p (string_cst));
14863 tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
14864 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
14865 ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
14866 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
14867 ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
14870 /* Wrapping a variable. */
14871 tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
14872 get_identifier ("some_int_var"),
14873 integer_type_node);
14874 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
14875 ASSERT_FALSE (location_wrapper_p (int_var));
14877 tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
14878 ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
14879 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
14880 ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
14882 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
14883 wrapper. */
14884 tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
14885 ASSERT_FALSE (location_wrapper_p (r_cast));
14886 ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
14888 /* Verify that STRIP_NOPS removes wrappers. */
14889 check_strip_nops (wrapped_int_cst, int_cst);
14890 check_strip_nops (wrapped_string_cst, string_cst);
14891 check_strip_nops (wrapped_int_var, int_var);
14894 /* Test various tree predicates. Verify that location wrappers don't
14895 affect the results. */
14897 static void
14898 test_predicates ()
14900 /* Build various constants and wrappers around them. */
14902 location_t loc = BUILTINS_LOCATION;
14904 tree i_0 = build_int_cst (integer_type_node, 0);
14905 tree wr_i_0 = maybe_wrap_with_location (i_0, loc);
14907 tree i_1 = build_int_cst (integer_type_node, 1);
14908 tree wr_i_1 = maybe_wrap_with_location (i_1, loc);
14910 tree i_m1 = build_int_cst (integer_type_node, -1);
14911 tree wr_i_m1 = maybe_wrap_with_location (i_m1, loc);
14913 tree f_0 = build_real_from_int_cst (float_type_node, i_0);
14914 tree wr_f_0 = maybe_wrap_with_location (f_0, loc);
14915 tree f_1 = build_real_from_int_cst (float_type_node, i_1);
14916 tree wr_f_1 = maybe_wrap_with_location (f_1, loc);
14917 tree f_m1 = build_real_from_int_cst (float_type_node, i_m1);
14918 tree wr_f_m1 = maybe_wrap_with_location (f_m1, loc);
14920 tree c_i_0 = build_complex (NULL_TREE, i_0, i_0);
14921 tree c_i_1 = build_complex (NULL_TREE, i_1, i_0);
14922 tree c_i_m1 = build_complex (NULL_TREE, i_m1, i_0);
14924 tree c_f_0 = build_complex (NULL_TREE, f_0, f_0);
14925 tree c_f_1 = build_complex (NULL_TREE, f_1, f_0);
14926 tree c_f_m1 = build_complex (NULL_TREE, f_m1, f_0);
14928 /* TODO: vector constants. */
14930 /* Test integer_onep. */
14931 ASSERT_FALSE (integer_onep (i_0));
14932 ASSERT_FALSE (integer_onep (wr_i_0));
14933 ASSERT_TRUE (integer_onep (i_1));
14934 ASSERT_TRUE (integer_onep (wr_i_1));
14935 ASSERT_FALSE (integer_onep (i_m1));
14936 ASSERT_FALSE (integer_onep (wr_i_m1));
14937 ASSERT_FALSE (integer_onep (f_0));
14938 ASSERT_FALSE (integer_onep (wr_f_0));
14939 ASSERT_FALSE (integer_onep (f_1));
14940 ASSERT_FALSE (integer_onep (wr_f_1));
14941 ASSERT_FALSE (integer_onep (f_m1));
14942 ASSERT_FALSE (integer_onep (wr_f_m1));
14943 ASSERT_FALSE (integer_onep (c_i_0));
14944 ASSERT_TRUE (integer_onep (c_i_1));
14945 ASSERT_FALSE (integer_onep (c_i_m1));
14946 ASSERT_FALSE (integer_onep (c_f_0));
14947 ASSERT_FALSE (integer_onep (c_f_1));
14948 ASSERT_FALSE (integer_onep (c_f_m1));
14950 /* Test integer_zerop. */
14951 ASSERT_TRUE (integer_zerop (i_0));
14952 ASSERT_TRUE (integer_zerop (wr_i_0));
14953 ASSERT_FALSE (integer_zerop (i_1));
14954 ASSERT_FALSE (integer_zerop (wr_i_1));
14955 ASSERT_FALSE (integer_zerop (i_m1));
14956 ASSERT_FALSE (integer_zerop (wr_i_m1));
14957 ASSERT_FALSE (integer_zerop (f_0));
14958 ASSERT_FALSE (integer_zerop (wr_f_0));
14959 ASSERT_FALSE (integer_zerop (f_1));
14960 ASSERT_FALSE (integer_zerop (wr_f_1));
14961 ASSERT_FALSE (integer_zerop (f_m1));
14962 ASSERT_FALSE (integer_zerop (wr_f_m1));
14963 ASSERT_TRUE (integer_zerop (c_i_0));
14964 ASSERT_FALSE (integer_zerop (c_i_1));
14965 ASSERT_FALSE (integer_zerop (c_i_m1));
14966 ASSERT_FALSE (integer_zerop (c_f_0));
14967 ASSERT_FALSE (integer_zerop (c_f_1));
14968 ASSERT_FALSE (integer_zerop (c_f_m1));
14970 /* Test integer_all_onesp. */
14971 ASSERT_FALSE (integer_all_onesp (i_0));
14972 ASSERT_FALSE (integer_all_onesp (wr_i_0));
14973 ASSERT_FALSE (integer_all_onesp (i_1));
14974 ASSERT_FALSE (integer_all_onesp (wr_i_1));
14975 ASSERT_TRUE (integer_all_onesp (i_m1));
14976 ASSERT_TRUE (integer_all_onesp (wr_i_m1));
14977 ASSERT_FALSE (integer_all_onesp (f_0));
14978 ASSERT_FALSE (integer_all_onesp (wr_f_0));
14979 ASSERT_FALSE (integer_all_onesp (f_1));
14980 ASSERT_FALSE (integer_all_onesp (wr_f_1));
14981 ASSERT_FALSE (integer_all_onesp (f_m1));
14982 ASSERT_FALSE (integer_all_onesp (wr_f_m1));
14983 ASSERT_FALSE (integer_all_onesp (c_i_0));
14984 ASSERT_FALSE (integer_all_onesp (c_i_1));
14985 ASSERT_FALSE (integer_all_onesp (c_i_m1));
14986 ASSERT_FALSE (integer_all_onesp (c_f_0));
14987 ASSERT_FALSE (integer_all_onesp (c_f_1));
14988 ASSERT_FALSE (integer_all_onesp (c_f_m1));
14990 /* Test integer_minus_onep. */
14991 ASSERT_FALSE (integer_minus_onep (i_0));
14992 ASSERT_FALSE (integer_minus_onep (wr_i_0));
14993 ASSERT_FALSE (integer_minus_onep (i_1));
14994 ASSERT_FALSE (integer_minus_onep (wr_i_1));
14995 ASSERT_TRUE (integer_minus_onep (i_m1));
14996 ASSERT_TRUE (integer_minus_onep (wr_i_m1));
14997 ASSERT_FALSE (integer_minus_onep (f_0));
14998 ASSERT_FALSE (integer_minus_onep (wr_f_0));
14999 ASSERT_FALSE (integer_minus_onep (f_1));
15000 ASSERT_FALSE (integer_minus_onep (wr_f_1));
15001 ASSERT_FALSE (integer_minus_onep (f_m1));
15002 ASSERT_FALSE (integer_minus_onep (wr_f_m1));
15003 ASSERT_FALSE (integer_minus_onep (c_i_0));
15004 ASSERT_FALSE (integer_minus_onep (c_i_1));
15005 ASSERT_TRUE (integer_minus_onep (c_i_m1));
15006 ASSERT_FALSE (integer_minus_onep (c_f_0));
15007 ASSERT_FALSE (integer_minus_onep (c_f_1));
15008 ASSERT_FALSE (integer_minus_onep (c_f_m1));
15010 /* Test integer_each_onep. */
15011 ASSERT_FALSE (integer_each_onep (i_0));
15012 ASSERT_FALSE (integer_each_onep (wr_i_0));
15013 ASSERT_TRUE (integer_each_onep (i_1));
15014 ASSERT_TRUE (integer_each_onep (wr_i_1));
15015 ASSERT_FALSE (integer_each_onep (i_m1));
15016 ASSERT_FALSE (integer_each_onep (wr_i_m1));
15017 ASSERT_FALSE (integer_each_onep (f_0));
15018 ASSERT_FALSE (integer_each_onep (wr_f_0));
15019 ASSERT_FALSE (integer_each_onep (f_1));
15020 ASSERT_FALSE (integer_each_onep (wr_f_1));
15021 ASSERT_FALSE (integer_each_onep (f_m1));
15022 ASSERT_FALSE (integer_each_onep (wr_f_m1));
15023 ASSERT_FALSE (integer_each_onep (c_i_0));
15024 ASSERT_FALSE (integer_each_onep (c_i_1));
15025 ASSERT_FALSE (integer_each_onep (c_i_m1));
15026 ASSERT_FALSE (integer_each_onep (c_f_0));
15027 ASSERT_FALSE (integer_each_onep (c_f_1));
15028 ASSERT_FALSE (integer_each_onep (c_f_m1));
15030 /* Test integer_truep. */
15031 ASSERT_FALSE (integer_truep (i_0));
15032 ASSERT_FALSE (integer_truep (wr_i_0));
15033 ASSERT_TRUE (integer_truep (i_1));
15034 ASSERT_TRUE (integer_truep (wr_i_1));
15035 ASSERT_FALSE (integer_truep (i_m1));
15036 ASSERT_FALSE (integer_truep (wr_i_m1));
15037 ASSERT_FALSE (integer_truep (f_0));
15038 ASSERT_FALSE (integer_truep (wr_f_0));
15039 ASSERT_FALSE (integer_truep (f_1));
15040 ASSERT_FALSE (integer_truep (wr_f_1));
15041 ASSERT_FALSE (integer_truep (f_m1));
15042 ASSERT_FALSE (integer_truep (wr_f_m1));
15043 ASSERT_FALSE (integer_truep (c_i_0));
15044 ASSERT_TRUE (integer_truep (c_i_1));
15045 ASSERT_FALSE (integer_truep (c_i_m1));
15046 ASSERT_FALSE (integer_truep (c_f_0));
15047 ASSERT_FALSE (integer_truep (c_f_1));
15048 ASSERT_FALSE (integer_truep (c_f_m1));
15050 /* Test integer_nonzerop. */
15051 ASSERT_FALSE (integer_nonzerop (i_0));
15052 ASSERT_FALSE (integer_nonzerop (wr_i_0));
15053 ASSERT_TRUE (integer_nonzerop (i_1));
15054 ASSERT_TRUE (integer_nonzerop (wr_i_1));
15055 ASSERT_TRUE (integer_nonzerop (i_m1));
15056 ASSERT_TRUE (integer_nonzerop (wr_i_m1));
15057 ASSERT_FALSE (integer_nonzerop (f_0));
15058 ASSERT_FALSE (integer_nonzerop (wr_f_0));
15059 ASSERT_FALSE (integer_nonzerop (f_1));
15060 ASSERT_FALSE (integer_nonzerop (wr_f_1));
15061 ASSERT_FALSE (integer_nonzerop (f_m1));
15062 ASSERT_FALSE (integer_nonzerop (wr_f_m1));
15063 ASSERT_FALSE (integer_nonzerop (c_i_0));
15064 ASSERT_TRUE (integer_nonzerop (c_i_1));
15065 ASSERT_TRUE (integer_nonzerop (c_i_m1));
15066 ASSERT_FALSE (integer_nonzerop (c_f_0));
15067 ASSERT_FALSE (integer_nonzerop (c_f_1));
15068 ASSERT_FALSE (integer_nonzerop (c_f_m1));
15070 /* Test real_zerop. */
15071 ASSERT_FALSE (real_zerop (i_0));
15072 ASSERT_FALSE (real_zerop (wr_i_0));
15073 ASSERT_FALSE (real_zerop (i_1));
15074 ASSERT_FALSE (real_zerop (wr_i_1));
15075 ASSERT_FALSE (real_zerop (i_m1));
15076 ASSERT_FALSE (real_zerop (wr_i_m1));
15077 ASSERT_TRUE (real_zerop (f_0));
15078 ASSERT_TRUE (real_zerop (wr_f_0));
15079 ASSERT_FALSE (real_zerop (f_1));
15080 ASSERT_FALSE (real_zerop (wr_f_1));
15081 ASSERT_FALSE (real_zerop (f_m1));
15082 ASSERT_FALSE (real_zerop (wr_f_m1));
15083 ASSERT_FALSE (real_zerop (c_i_0));
15084 ASSERT_FALSE (real_zerop (c_i_1));
15085 ASSERT_FALSE (real_zerop (c_i_m1));
15086 ASSERT_TRUE (real_zerop (c_f_0));
15087 ASSERT_FALSE (real_zerop (c_f_1));
15088 ASSERT_FALSE (real_zerop (c_f_m1));
15090 /* Test real_onep. */
15091 ASSERT_FALSE (real_onep (i_0));
15092 ASSERT_FALSE (real_onep (wr_i_0));
15093 ASSERT_FALSE (real_onep (i_1));
15094 ASSERT_FALSE (real_onep (wr_i_1));
15095 ASSERT_FALSE (real_onep (i_m1));
15096 ASSERT_FALSE (real_onep (wr_i_m1));
15097 ASSERT_FALSE (real_onep (f_0));
15098 ASSERT_FALSE (real_onep (wr_f_0));
15099 ASSERT_TRUE (real_onep (f_1));
15100 ASSERT_TRUE (real_onep (wr_f_1));
15101 ASSERT_FALSE (real_onep (f_m1));
15102 ASSERT_FALSE (real_onep (wr_f_m1));
15103 ASSERT_FALSE (real_onep (c_i_0));
15104 ASSERT_FALSE (real_onep (c_i_1));
15105 ASSERT_FALSE (real_onep (c_i_m1));
15106 ASSERT_FALSE (real_onep (c_f_0));
15107 ASSERT_TRUE (real_onep (c_f_1));
15108 ASSERT_FALSE (real_onep (c_f_m1));
15110 /* Test real_minus_onep. */
15111 ASSERT_FALSE (real_minus_onep (i_0));
15112 ASSERT_FALSE (real_minus_onep (wr_i_0));
15113 ASSERT_FALSE (real_minus_onep (i_1));
15114 ASSERT_FALSE (real_minus_onep (wr_i_1));
15115 ASSERT_FALSE (real_minus_onep (i_m1));
15116 ASSERT_FALSE (real_minus_onep (wr_i_m1));
15117 ASSERT_FALSE (real_minus_onep (f_0));
15118 ASSERT_FALSE (real_minus_onep (wr_f_0));
15119 ASSERT_FALSE (real_minus_onep (f_1));
15120 ASSERT_FALSE (real_minus_onep (wr_f_1));
15121 ASSERT_TRUE (real_minus_onep (f_m1));
15122 ASSERT_TRUE (real_minus_onep (wr_f_m1));
15123 ASSERT_FALSE (real_minus_onep (c_i_0));
15124 ASSERT_FALSE (real_minus_onep (c_i_1));
15125 ASSERT_FALSE (real_minus_onep (c_i_m1));
15126 ASSERT_FALSE (real_minus_onep (c_f_0));
15127 ASSERT_FALSE (real_minus_onep (c_f_1));
15128 ASSERT_TRUE (real_minus_onep (c_f_m1));
15130 /* Test zerop. */
15131 ASSERT_TRUE (zerop (i_0));
15132 ASSERT_TRUE (zerop (wr_i_0));
15133 ASSERT_FALSE (zerop (i_1));
15134 ASSERT_FALSE (zerop (wr_i_1));
15135 ASSERT_FALSE (zerop (i_m1));
15136 ASSERT_FALSE (zerop (wr_i_m1));
15137 ASSERT_TRUE (zerop (f_0));
15138 ASSERT_TRUE (zerop (wr_f_0));
15139 ASSERT_FALSE (zerop (f_1));
15140 ASSERT_FALSE (zerop (wr_f_1));
15141 ASSERT_FALSE (zerop (f_m1));
15142 ASSERT_FALSE (zerop (wr_f_m1));
15143 ASSERT_TRUE (zerop (c_i_0));
15144 ASSERT_FALSE (zerop (c_i_1));
15145 ASSERT_FALSE (zerop (c_i_m1));
15146 ASSERT_TRUE (zerop (c_f_0));
15147 ASSERT_FALSE (zerop (c_f_1));
15148 ASSERT_FALSE (zerop (c_f_m1));
15150 /* Test tree_expr_nonnegative_p. */
15151 ASSERT_TRUE (tree_expr_nonnegative_p (i_0));
15152 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0));
15153 ASSERT_TRUE (tree_expr_nonnegative_p (i_1));
15154 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1));
15155 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1));
15156 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1));
15157 ASSERT_TRUE (tree_expr_nonnegative_p (f_0));
15158 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0));
15159 ASSERT_TRUE (tree_expr_nonnegative_p (f_1));
15160 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1));
15161 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1));
15162 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1));
15163 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0));
15164 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1));
15165 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1));
15166 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0));
15167 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1));
15168 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1));
15170 /* Test tree_expr_nonzero_p. */
15171 ASSERT_FALSE (tree_expr_nonzero_p (i_0));
15172 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0));
15173 ASSERT_TRUE (tree_expr_nonzero_p (i_1));
15174 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1));
15175 ASSERT_TRUE (tree_expr_nonzero_p (i_m1));
15176 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1));
15178 /* Test integer_valued_real_p. */
15179 ASSERT_FALSE (integer_valued_real_p (i_0));
15180 ASSERT_TRUE (integer_valued_real_p (f_0));
15181 ASSERT_TRUE (integer_valued_real_p (wr_f_0));
15182 ASSERT_TRUE (integer_valued_real_p (f_1));
15183 ASSERT_TRUE (integer_valued_real_p (wr_f_1));
15185 /* Test integer_pow2p. */
15186 ASSERT_FALSE (integer_pow2p (i_0));
15187 ASSERT_TRUE (integer_pow2p (i_1));
15188 ASSERT_TRUE (integer_pow2p (wr_i_1));
15190 /* Test uniform_integer_cst_p. */
15191 ASSERT_TRUE (uniform_integer_cst_p (i_0));
15192 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0));
15193 ASSERT_TRUE (uniform_integer_cst_p (i_1));
15194 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1));
15195 ASSERT_TRUE (uniform_integer_cst_p (i_m1));
15196 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1));
15197 ASSERT_FALSE (uniform_integer_cst_p (f_0));
15198 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0));
15199 ASSERT_FALSE (uniform_integer_cst_p (f_1));
15200 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1));
15201 ASSERT_FALSE (uniform_integer_cst_p (f_m1));
15202 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1));
15203 ASSERT_FALSE (uniform_integer_cst_p (c_i_0));
15204 ASSERT_FALSE (uniform_integer_cst_p (c_i_1));
15205 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1));
15206 ASSERT_FALSE (uniform_integer_cst_p (c_f_0));
15207 ASSERT_FALSE (uniform_integer_cst_p (c_f_1));
15208 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1));
15211 /* Check that string escaping works correctly. */
15213 static void
15214 test_escaped_strings (void)
15216 int saved_cutoff;
15217 escaped_string msg;
15219 msg.escape (NULL);
15220 /* ASSERT_STREQ does not accept NULL as a valid test
15221 result, so we have to use ASSERT_EQ instead. */
15222 ASSERT_EQ (NULL, (const char *) msg);
15224 msg.escape ("");
15225 ASSERT_STREQ ("", (const char *) msg);
15227 msg.escape ("foobar");
15228 ASSERT_STREQ ("foobar", (const char *) msg);
15230 /* Ensure that we have -fmessage-length set to 0. */
15231 saved_cutoff = pp_line_cutoff (global_dc->printer);
15232 pp_line_cutoff (global_dc->printer) = 0;
15234 msg.escape ("foo\nbar");
15235 ASSERT_STREQ ("foo\\nbar", (const char *) msg);
15237 msg.escape ("\a\b\f\n\r\t\v");
15238 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
15240 /* Now repeat the tests with -fmessage-length set to 5. */
15241 pp_line_cutoff (global_dc->printer) = 5;
15243 /* Note that the newline is not translated into an escape. */
15244 msg.escape ("foo\nbar");
15245 ASSERT_STREQ ("foo\nbar", (const char *) msg);
15247 msg.escape ("\a\b\f\n\r\t\v");
15248 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
15250 /* Restore the original message length setting. */
15251 pp_line_cutoff (global_dc->printer) = saved_cutoff;
15254 /* Run all of the selftests within this file. */
15256 void
15257 tree_cc_tests ()
15259 test_integer_constants ();
15260 test_identifiers ();
15261 test_labels ();
15262 test_vector_cst_patterns ();
15263 test_location_wrappers ();
15264 test_predicates ();
15265 test_escaped_strings ();
15268 } // namespace selftest
15270 #endif /* CHECKING_P */
15272 #include "gt-tree.h"