warn-access: wrong -Wdangling-pointer with labels [PR106080]
[official-gcc.git] / gcc / tree.cc
blob41ccbf3b3cbffddc4c79cb79eee560b392602d09
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "backend.h"
34 #include "target.h"
35 #include "tree.h"
36 #include "gimple.h"
37 #include "tree-pass.h"
38 #include "ssa.h"
39 #include "cgraph.h"
40 #include "diagnostic.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "attribs.h"
47 #include "toplev.h" /* get_random_seed */
48 #include "output.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "tree-dfa.h"
57 #include "langhooks-def.h"
58 #include "tree-diagnostic.h"
59 #include "except.h"
60 #include "builtins.h"
61 #include "print-tree.h"
62 #include "ipa-utils.h"
63 #include "selftest.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "rtl.h"
67 #include "regs.h"
68 #include "tree-vector-builder.h"
69 #include "gimple-fold.h"
70 #include "escaped_string.h"
71 #include "gimple-range.h"
72 #include "gomp-constants.h"
73 #include "dfp.h"
74 #include "asan.h"
75 #include "ubsan.h"
77 #if __cpp_inline_variables < 201606L
78 /* Tree code classes. */
80 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
81 #define END_OF_BASE_TREE_CODES tcc_exceptional,
83 const enum tree_code_class tree_code_type[] = {
84 #include "all-tree.def"
87 #undef DEFTREECODE
88 #undef END_OF_BASE_TREE_CODES
90 /* Table indexed by tree code giving number of expression
91 operands beyond the fixed part of the node structure.
92 Not used for types or decls. */
94 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
95 #define END_OF_BASE_TREE_CODES 0,
97 const unsigned char tree_code_length[] = {
98 #include "all-tree.def"
101 #undef DEFTREECODE
102 #undef END_OF_BASE_TREE_CODES
103 #endif
105 /* Names of tree components.
106 Used for printing out the tree and error messages. */
107 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
108 #define END_OF_BASE_TREE_CODES "@dummy",
110 static const char *const tree_code_name[] = {
111 #include "all-tree.def"
114 #undef DEFTREECODE
115 #undef END_OF_BASE_TREE_CODES
117 /* Each tree code class has an associated string representation.
118 These must correspond to the tree_code_class entries. */
120 const char *const tree_code_class_strings[] =
122 "exceptional",
123 "constant",
124 "type",
125 "declaration",
126 "reference",
127 "comparison",
128 "unary",
129 "binary",
130 "statement",
131 "vl_exp",
132 "expression"
135 /* obstack.[ch] explicitly declined to prototype this. */
136 extern int _obstack_allocated_p (struct obstack *h, void *obj);
138 /* Statistics-gathering stuff. */
140 static uint64_t tree_code_counts[MAX_TREE_CODES];
141 uint64_t tree_node_counts[(int) all_kinds];
142 uint64_t tree_node_sizes[(int) all_kinds];
144 /* Keep in sync with tree.h:enum tree_node_kind. */
145 static const char * const tree_node_kind_names[] = {
146 "decls",
147 "types",
148 "blocks",
149 "stmts",
150 "refs",
151 "exprs",
152 "constants",
153 "identifiers",
154 "vecs",
155 "binfos",
156 "ssa names",
157 "constructors",
158 "random kinds",
159 "lang_decl kinds",
160 "lang_type kinds",
161 "omp clauses",
164 /* Unique id for next decl created. */
165 static GTY(()) int next_decl_uid;
166 /* Unique id for next type created. */
167 static GTY(()) unsigned next_type_uid = 1;
168 /* Unique id for next debug decl created. Use negative numbers,
169 to catch erroneous uses. */
170 static GTY(()) int next_debug_decl_uid;
172 /* Since we cannot rehash a type after it is in the table, we have to
173 keep the hash code. */
175 struct GTY((for_user)) type_hash {
176 unsigned long hash;
177 tree type;
180 /* Initial size of the hash table (rounded to next prime). */
181 #define TYPE_HASH_INITIAL_SIZE 1000
183 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
185 static hashval_t hash (type_hash *t) { return t->hash; }
186 static bool equal (type_hash *a, type_hash *b);
188 static int
189 keep_cache_entry (type_hash *&t)
191 return ggc_marked_p (t->type);
195 /* Now here is the hash table. When recording a type, it is added to
196 the slot whose index is the hash code. Note that the hash table is
197 used for several kinds of types (function types, array types and
198 array index range types, for now). While all these live in the
199 same table, they are completely independent, and the hash code is
200 computed differently for each of these. */
202 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
204 /* Hash table and temporary node for larger integer const values. */
205 static GTY (()) tree int_cst_node;
207 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
209 static hashval_t hash (tree t);
210 static bool equal (tree x, tree y);
213 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
215 /* Class and variable for making sure that there is a single POLY_INT_CST
216 for a given value. */
217 struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
219 typedef std::pair<tree, const poly_wide_int *> compare_type;
220 static hashval_t hash (tree t);
221 static bool equal (tree x, const compare_type &y);
224 static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
226 /* Hash table for optimization flags and target option flags. Use the same
227 hash table for both sets of options. Nodes for building the current
228 optimization and target option nodes. The assumption is most of the time
229 the options created will already be in the hash table, so we avoid
230 allocating and freeing up a node repeatably. */
231 static GTY (()) tree cl_optimization_node;
232 static GTY (()) tree cl_target_option_node;
234 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
236 static hashval_t hash (tree t);
237 static bool equal (tree x, tree y);
240 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
242 /* General tree->tree mapping structure for use in hash tables. */
245 static GTY ((cache))
246 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
248 static GTY ((cache))
249 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
251 static GTY ((cache))
252 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
254 static void set_type_quals (tree, int);
255 static void print_type_hash_statistics (void);
256 static void print_debug_expr_statistics (void);
257 static void print_value_expr_statistics (void);
259 tree global_trees[TI_MAX];
260 tree integer_types[itk_none];
262 bool int_n_enabled_p[NUM_INT_N_ENTS];
263 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
265 bool tree_contains_struct[MAX_TREE_CODES][64];
267 /* Number of operands for each OMP clause. */
268 unsigned const char omp_clause_num_ops[] =
270 0, /* OMP_CLAUSE_ERROR */
271 1, /* OMP_CLAUSE_PRIVATE */
272 1, /* OMP_CLAUSE_SHARED */
273 1, /* OMP_CLAUSE_FIRSTPRIVATE */
274 2, /* OMP_CLAUSE_LASTPRIVATE */
275 5, /* OMP_CLAUSE_REDUCTION */
276 5, /* OMP_CLAUSE_TASK_REDUCTION */
277 5, /* OMP_CLAUSE_IN_REDUCTION */
278 1, /* OMP_CLAUSE_COPYIN */
279 1, /* OMP_CLAUSE_COPYPRIVATE */
280 3, /* OMP_CLAUSE_LINEAR */
281 1, /* OMP_CLAUSE_AFFINITY */
282 2, /* OMP_CLAUSE_ALIGNED */
283 3, /* OMP_CLAUSE_ALLOCATE */
284 1, /* OMP_CLAUSE_DEPEND */
285 1, /* OMP_CLAUSE_NONTEMPORAL */
286 1, /* OMP_CLAUSE_UNIFORM */
287 1, /* OMP_CLAUSE_ENTER */
288 1, /* OMP_CLAUSE_LINK */
289 1, /* OMP_CLAUSE_DETACH */
290 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
291 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
292 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
293 1, /* OMP_CLAUSE_INCLUSIVE */
294 1, /* OMP_CLAUSE_EXCLUSIVE */
295 2, /* OMP_CLAUSE_FROM */
296 2, /* OMP_CLAUSE_TO */
297 2, /* OMP_CLAUSE_MAP */
298 1, /* OMP_CLAUSE_HAS_DEVICE_ADDR */
299 1, /* OMP_CLAUSE_DOACROSS */
300 2, /* OMP_CLAUSE__CACHE_ */
301 2, /* OMP_CLAUSE_GANG */
302 1, /* OMP_CLAUSE_ASYNC */
303 1, /* OMP_CLAUSE_WAIT */
304 0, /* OMP_CLAUSE_AUTO */
305 0, /* OMP_CLAUSE_SEQ */
306 1, /* OMP_CLAUSE__LOOPTEMP_ */
307 1, /* OMP_CLAUSE__REDUCTEMP_ */
308 1, /* OMP_CLAUSE__CONDTEMP_ */
309 1, /* OMP_CLAUSE__SCANTEMP_ */
310 1, /* OMP_CLAUSE_IF */
311 1, /* OMP_CLAUSE_NUM_THREADS */
312 1, /* OMP_CLAUSE_SCHEDULE */
313 0, /* OMP_CLAUSE_NOWAIT */
314 1, /* OMP_CLAUSE_ORDERED */
315 0, /* OMP_CLAUSE_DEFAULT */
316 3, /* OMP_CLAUSE_COLLAPSE */
317 0, /* OMP_CLAUSE_UNTIED */
318 1, /* OMP_CLAUSE_FINAL */
319 0, /* OMP_CLAUSE_MERGEABLE */
320 1, /* OMP_CLAUSE_DEVICE */
321 1, /* OMP_CLAUSE_DIST_SCHEDULE */
322 0, /* OMP_CLAUSE_INBRANCH */
323 0, /* OMP_CLAUSE_NOTINBRANCH */
324 2, /* OMP_CLAUSE_NUM_TEAMS */
325 1, /* OMP_CLAUSE_THREAD_LIMIT */
326 0, /* OMP_CLAUSE_PROC_BIND */
327 1, /* OMP_CLAUSE_SAFELEN */
328 1, /* OMP_CLAUSE_SIMDLEN */
329 0, /* OMP_CLAUSE_DEVICE_TYPE */
330 0, /* OMP_CLAUSE_FOR */
331 0, /* OMP_CLAUSE_PARALLEL */
332 0, /* OMP_CLAUSE_SECTIONS */
333 0, /* OMP_CLAUSE_TASKGROUP */
334 1, /* OMP_CLAUSE_PRIORITY */
335 1, /* OMP_CLAUSE_GRAINSIZE */
336 1, /* OMP_CLAUSE_NUM_TASKS */
337 0, /* OMP_CLAUSE_NOGROUP */
338 0, /* OMP_CLAUSE_THREADS */
339 0, /* OMP_CLAUSE_SIMD */
340 1, /* OMP_CLAUSE_HINT */
341 0, /* OMP_CLAUSE_DEFAULTMAP */
342 0, /* OMP_CLAUSE_ORDER */
343 0, /* OMP_CLAUSE_BIND */
344 1, /* OMP_CLAUSE_FILTER */
345 1, /* OMP_CLAUSE__SIMDUID_ */
346 0, /* OMP_CLAUSE__SIMT_ */
347 0, /* OMP_CLAUSE_INDEPENDENT */
348 1, /* OMP_CLAUSE_WORKER */
349 1, /* OMP_CLAUSE_VECTOR */
350 1, /* OMP_CLAUSE_NUM_GANGS */
351 1, /* OMP_CLAUSE_NUM_WORKERS */
352 1, /* OMP_CLAUSE_VECTOR_LENGTH */
353 3, /* OMP_CLAUSE_TILE */
354 0, /* OMP_CLAUSE_IF_PRESENT */
355 0, /* OMP_CLAUSE_FINALIZE */
356 0, /* OMP_CLAUSE_NOHOST */
359 const char * const omp_clause_code_name[] =
361 "error_clause",
362 "private",
363 "shared",
364 "firstprivate",
365 "lastprivate",
366 "reduction",
367 "task_reduction",
368 "in_reduction",
369 "copyin",
370 "copyprivate",
371 "linear",
372 "affinity",
373 "aligned",
374 "allocate",
375 "depend",
376 "nontemporal",
377 "uniform",
378 "enter",
379 "link",
380 "detach",
381 "use_device_ptr",
382 "use_device_addr",
383 "is_device_ptr",
384 "inclusive",
385 "exclusive",
386 "from",
387 "to",
388 "map",
389 "has_device_addr",
390 "doacross",
391 "_cache_",
392 "gang",
393 "async",
394 "wait",
395 "auto",
396 "seq",
397 "_looptemp_",
398 "_reductemp_",
399 "_condtemp_",
400 "_scantemp_",
401 "if",
402 "num_threads",
403 "schedule",
404 "nowait",
405 "ordered",
406 "default",
407 "collapse",
408 "untied",
409 "final",
410 "mergeable",
411 "device",
412 "dist_schedule",
413 "inbranch",
414 "notinbranch",
415 "num_teams",
416 "thread_limit",
417 "proc_bind",
418 "safelen",
419 "simdlen",
420 "device_type",
421 "for",
422 "parallel",
423 "sections",
424 "taskgroup",
425 "priority",
426 "grainsize",
427 "num_tasks",
428 "nogroup",
429 "threads",
430 "simd",
431 "hint",
432 "defaultmap",
433 "order",
434 "bind",
435 "filter",
436 "_simduid_",
437 "_simt_",
438 "independent",
439 "worker",
440 "vector",
441 "num_gangs",
442 "num_workers",
443 "vector_length",
444 "tile",
445 "if_present",
446 "finalize",
447 "nohost",
450 /* Unless specific to OpenACC, we tend to internally maintain OpenMP-centric
451 clause names, but for use in diagnostics etc. would like to use the "user"
452 clause names. */
454 const char *
455 user_omp_clause_code_name (tree clause, bool oacc)
457 /* For OpenACC, the 'OMP_CLAUSE_MAP_KIND' of an 'OMP_CLAUSE_MAP' is used to
458 distinguish clauses as seen by the user. See also where front ends do
459 'build_omp_clause' with 'OMP_CLAUSE_MAP'. */
460 if (oacc && OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP)
461 switch (OMP_CLAUSE_MAP_KIND (clause))
463 case GOMP_MAP_FORCE_ALLOC:
464 case GOMP_MAP_ALLOC: return "create";
465 case GOMP_MAP_FORCE_TO:
466 case GOMP_MAP_TO: return "copyin";
467 case GOMP_MAP_FORCE_FROM:
468 case GOMP_MAP_FROM: return "copyout";
469 case GOMP_MAP_FORCE_TOFROM:
470 case GOMP_MAP_TOFROM: return "copy";
471 case GOMP_MAP_RELEASE: return "delete";
472 case GOMP_MAP_FORCE_PRESENT: return "present";
473 case GOMP_MAP_ATTACH: return "attach";
474 case GOMP_MAP_FORCE_DETACH:
475 case GOMP_MAP_DETACH: return "detach";
476 case GOMP_MAP_DEVICE_RESIDENT: return "device_resident";
477 case GOMP_MAP_LINK: return "link";
478 case GOMP_MAP_FORCE_DEVICEPTR: return "deviceptr";
479 default: break;
482 return omp_clause_code_name[OMP_CLAUSE_CODE (clause)];
486 /* Return the tree node structure used by tree code CODE. */
488 static inline enum tree_node_structure_enum
489 tree_node_structure_for_code (enum tree_code code)
491 switch (TREE_CODE_CLASS (code))
493 case tcc_declaration:
494 switch (code)
496 case CONST_DECL: return TS_CONST_DECL;
497 case DEBUG_EXPR_DECL: return TS_DECL_WRTL;
498 case FIELD_DECL: return TS_FIELD_DECL;
499 case FUNCTION_DECL: return TS_FUNCTION_DECL;
500 case LABEL_DECL: return TS_LABEL_DECL;
501 case PARM_DECL: return TS_PARM_DECL;
502 case RESULT_DECL: return TS_RESULT_DECL;
503 case TRANSLATION_UNIT_DECL: return TS_TRANSLATION_UNIT_DECL;
504 case TYPE_DECL: return TS_TYPE_DECL;
505 case VAR_DECL: return TS_VAR_DECL;
506 default: return TS_DECL_NON_COMMON;
509 case tcc_type: return TS_TYPE_NON_COMMON;
511 case tcc_binary:
512 case tcc_comparison:
513 case tcc_expression:
514 case tcc_reference:
515 case tcc_statement:
516 case tcc_unary:
517 case tcc_vl_exp: return TS_EXP;
519 default: /* tcc_constant and tcc_exceptional */
520 break;
523 switch (code)
525 /* tcc_constant cases. */
526 case COMPLEX_CST: return TS_COMPLEX;
527 case FIXED_CST: return TS_FIXED_CST;
528 case INTEGER_CST: return TS_INT_CST;
529 case POLY_INT_CST: return TS_POLY_INT_CST;
530 case REAL_CST: return TS_REAL_CST;
531 case STRING_CST: return TS_STRING;
532 case VECTOR_CST: return TS_VECTOR;
533 case VOID_CST: return TS_TYPED;
535 /* tcc_exceptional cases. */
536 case BLOCK: return TS_BLOCK;
537 case CONSTRUCTOR: return TS_CONSTRUCTOR;
538 case ERROR_MARK: return TS_COMMON;
539 case IDENTIFIER_NODE: return TS_IDENTIFIER;
540 case OMP_CLAUSE: return TS_OMP_CLAUSE;
541 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
542 case PLACEHOLDER_EXPR: return TS_COMMON;
543 case SSA_NAME: return TS_SSA_NAME;
544 case STATEMENT_LIST: return TS_STATEMENT_LIST;
545 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
546 case TREE_BINFO: return TS_BINFO;
547 case TREE_LIST: return TS_LIST;
548 case TREE_VEC: return TS_VEC;
550 default:
551 gcc_unreachable ();
556 /* Initialize tree_contains_struct to describe the hierarchy of tree
557 nodes. */
559 static void
560 initialize_tree_contains_struct (void)
562 unsigned i;
564 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
566 enum tree_code code;
567 enum tree_node_structure_enum ts_code;
569 code = (enum tree_code) i;
570 ts_code = tree_node_structure_for_code (code);
572 /* Mark the TS structure itself. */
573 tree_contains_struct[code][ts_code] = 1;
575 /* Mark all the structures that TS is derived from. */
576 switch (ts_code)
578 case TS_TYPED:
579 case TS_BLOCK:
580 case TS_OPTIMIZATION:
581 case TS_TARGET_OPTION:
582 MARK_TS_BASE (code);
583 break;
585 case TS_COMMON:
586 case TS_INT_CST:
587 case TS_POLY_INT_CST:
588 case TS_REAL_CST:
589 case TS_FIXED_CST:
590 case TS_VECTOR:
591 case TS_STRING:
592 case TS_COMPLEX:
593 case TS_SSA_NAME:
594 case TS_CONSTRUCTOR:
595 case TS_EXP:
596 case TS_STATEMENT_LIST:
597 MARK_TS_TYPED (code);
598 break;
600 case TS_IDENTIFIER:
601 case TS_DECL_MINIMAL:
602 case TS_TYPE_COMMON:
603 case TS_LIST:
604 case TS_VEC:
605 case TS_BINFO:
606 case TS_OMP_CLAUSE:
607 MARK_TS_COMMON (code);
608 break;
610 case TS_TYPE_WITH_LANG_SPECIFIC:
611 MARK_TS_TYPE_COMMON (code);
612 break;
614 case TS_TYPE_NON_COMMON:
615 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
616 break;
618 case TS_DECL_COMMON:
619 MARK_TS_DECL_MINIMAL (code);
620 break;
622 case TS_DECL_WRTL:
623 case TS_CONST_DECL:
624 MARK_TS_DECL_COMMON (code);
625 break;
627 case TS_DECL_NON_COMMON:
628 MARK_TS_DECL_WITH_VIS (code);
629 break;
631 case TS_DECL_WITH_VIS:
632 case TS_PARM_DECL:
633 case TS_LABEL_DECL:
634 case TS_RESULT_DECL:
635 MARK_TS_DECL_WRTL (code);
636 break;
638 case TS_FIELD_DECL:
639 MARK_TS_DECL_COMMON (code);
640 break;
642 case TS_VAR_DECL:
643 MARK_TS_DECL_WITH_VIS (code);
644 break;
646 case TS_TYPE_DECL:
647 case TS_FUNCTION_DECL:
648 MARK_TS_DECL_NON_COMMON (code);
649 break;
651 case TS_TRANSLATION_UNIT_DECL:
652 MARK_TS_DECL_COMMON (code);
653 break;
655 default:
656 gcc_unreachable ();
660 /* Basic consistency checks for attributes used in fold. */
661 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
662 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
663 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
664 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
665 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
666 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
667 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
668 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
669 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
670 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
671 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
672 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
673 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
674 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
675 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
676 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
677 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
678 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
679 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
680 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
681 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
682 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
683 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
684 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
685 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
686 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
687 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
688 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
689 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
690 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
691 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
692 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
693 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
694 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
695 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
696 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
697 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
698 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
699 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
700 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
704 /* Init tree.cc. */
706 void
707 init_ttree (void)
709 /* Initialize the hash table of types. */
710 type_hash_table
711 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
713 debug_expr_for_decl
714 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
716 value_expr_for_decl
717 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
719 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
721 poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
723 int_cst_node = make_int_cst (1, 1);
725 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
727 cl_optimization_node = make_node (OPTIMIZATION_NODE);
728 cl_target_option_node = make_node (TARGET_OPTION_NODE);
730 /* Initialize the tree_contains_struct array. */
731 initialize_tree_contains_struct ();
732 lang_hooks.init_ts ();
736 /* The name of the object as the assembler will see it (but before any
737 translations made by ASM_OUTPUT_LABELREF). Often this is the same
738 as DECL_NAME. It is an IDENTIFIER_NODE. */
739 tree
740 decl_assembler_name (tree decl)
742 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
743 lang_hooks.set_decl_assembler_name (decl);
744 return DECL_ASSEMBLER_NAME_RAW (decl);
747 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
748 (either of which may be NULL). Inform the FE, if this changes the
749 name. */
751 void
752 overwrite_decl_assembler_name (tree decl, tree name)
754 if (DECL_ASSEMBLER_NAME_RAW (decl) != name)
755 lang_hooks.overwrite_decl_assembler_name (decl, name);
758 /* Return true if DECL may need an assembler name to be set. */
760 static inline bool
761 need_assembler_name_p (tree decl)
763 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
764 Rule merging. This makes type_odr_p to return true on those types during
765 LTO and by comparing the mangled name, we can say what types are intended
766 to be equivalent across compilation unit.
768 We do not store names of type_in_anonymous_namespace_p.
770 Record, union and enumeration type have linkage that allows use
771 to check type_in_anonymous_namespace_p. We do not mangle compound types
772 that always can be compared structurally.
774 Similarly for builtin types, we compare properties of their main variant.
775 A special case are integer types where mangling do make differences
776 between char/signed char/unsigned char etc. Storing name for these makes
777 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
778 See cp/mangle.cc:write_builtin_type for details. */
780 if (TREE_CODE (decl) == TYPE_DECL)
782 if (DECL_NAME (decl)
783 && decl == TYPE_NAME (TREE_TYPE (decl))
784 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
785 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
786 && ((TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
787 && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE)
788 || TYPE_CXX_ODR_P (TREE_TYPE (decl)))
789 && (type_with_linkage_p (TREE_TYPE (decl))
790 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
791 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
792 return !DECL_ASSEMBLER_NAME_SET_P (decl);
793 return false;
795 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
796 if (!VAR_OR_FUNCTION_DECL_P (decl))
797 return false;
799 /* If DECL already has its assembler name set, it does not need a
800 new one. */
801 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
802 || DECL_ASSEMBLER_NAME_SET_P (decl))
803 return false;
805 /* Abstract decls do not need an assembler name. */
806 if (DECL_ABSTRACT_P (decl))
807 return false;
809 /* For VAR_DECLs, only static, public and external symbols need an
810 assembler name. */
811 if (VAR_P (decl)
812 && !TREE_STATIC (decl)
813 && !TREE_PUBLIC (decl)
814 && !DECL_EXTERNAL (decl))
815 return false;
817 if (TREE_CODE (decl) == FUNCTION_DECL)
819 /* Do not set assembler name on builtins. Allow RTL expansion to
820 decide whether to expand inline or via a regular call. */
821 if (fndecl_built_in_p (decl)
822 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
823 return false;
825 /* Functions represented in the callgraph need an assembler name. */
826 if (cgraph_node::get (decl) != NULL)
827 return true;
829 /* Unused and not public functions don't need an assembler name. */
830 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
831 return false;
834 return true;
837 /* If T needs an assembler name, have one created for it. */
839 void
840 assign_assembler_name_if_needed (tree t)
842 if (need_assembler_name_p (t))
844 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
845 diagnostics that use input_location to show locus
846 information. The problem here is that, at this point,
847 input_location is generally anchored to the end of the file
848 (since the parser is long gone), so we don't have a good
849 position to pin it to.
851 To alleviate this problem, this uses the location of T's
852 declaration. Examples of this are
853 testsuite/g++.dg/template/cond2.C and
854 testsuite/g++.dg/template/pr35240.C. */
855 location_t saved_location = input_location;
856 input_location = DECL_SOURCE_LOCATION (t);
858 decl_assembler_name (t);
860 input_location = saved_location;
864 /* When the target supports COMDAT groups, this indicates which group the
865 DECL is associated with. This can be either an IDENTIFIER_NODE or a
866 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
867 tree
868 decl_comdat_group (const_tree node)
870 struct symtab_node *snode = symtab_node::get (node);
871 if (!snode)
872 return NULL;
873 return snode->get_comdat_group ();
876 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
877 tree
878 decl_comdat_group_id (const_tree node)
880 struct symtab_node *snode = symtab_node::get (node);
881 if (!snode)
882 return NULL;
883 return snode->get_comdat_group_id ();
886 /* When the target supports named section, return its name as IDENTIFIER_NODE
887 or NULL if it is in no section. */
888 const char *
889 decl_section_name (const_tree node)
891 struct symtab_node *snode = symtab_node::get (node);
892 if (!snode)
893 return NULL;
894 return snode->get_section ();
897 /* Set section name of NODE to VALUE (that is expected to be
898 identifier node) */
899 void
900 set_decl_section_name (tree node, const char *value)
902 struct symtab_node *snode;
904 if (value == NULL)
906 snode = symtab_node::get (node);
907 if (!snode)
908 return;
910 else if (VAR_P (node))
911 snode = varpool_node::get_create (node);
912 else
913 snode = cgraph_node::get_create (node);
914 snode->set_section (value);
917 /* Set section name of NODE to match the section name of OTHER.
919 set_decl_section_name (decl, other) is equivalent to
920 set_decl_section_name (decl, DECL_SECTION_NAME (other)), but possibly more
921 efficient. */
922 void
923 set_decl_section_name (tree decl, const_tree other)
925 struct symtab_node *other_node = symtab_node::get (other);
926 if (other_node)
928 struct symtab_node *decl_node;
929 if (VAR_P (decl))
930 decl_node = varpool_node::get_create (decl);
931 else
932 decl_node = cgraph_node::get_create (decl);
933 decl_node->set_section (*other_node);
935 else
937 struct symtab_node *decl_node = symtab_node::get (decl);
938 if (!decl_node)
939 return;
940 decl_node->set_section (NULL);
944 /* Return TLS model of a variable NODE. */
945 enum tls_model
946 decl_tls_model (const_tree node)
948 struct varpool_node *snode = varpool_node::get (node);
949 if (!snode)
950 return TLS_MODEL_NONE;
951 return snode->tls_model;
954 /* Set TLS model of variable NODE to MODEL. */
955 void
956 set_decl_tls_model (tree node, enum tls_model model)
958 struct varpool_node *vnode;
960 if (model == TLS_MODEL_NONE)
962 vnode = varpool_node::get (node);
963 if (!vnode)
964 return;
966 else
967 vnode = varpool_node::get_create (node);
968 vnode->tls_model = model;
971 /* Compute the number of bytes occupied by a tree with code CODE.
972 This function cannot be used for nodes that have variable sizes,
973 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
974 size_t
975 tree_code_size (enum tree_code code)
977 switch (TREE_CODE_CLASS (code))
979 case tcc_declaration: /* A decl node */
980 switch (code)
982 case FIELD_DECL: return sizeof (tree_field_decl);
983 case PARM_DECL: return sizeof (tree_parm_decl);
984 case VAR_DECL: return sizeof (tree_var_decl);
985 case LABEL_DECL: return sizeof (tree_label_decl);
986 case RESULT_DECL: return sizeof (tree_result_decl);
987 case CONST_DECL: return sizeof (tree_const_decl);
988 case TYPE_DECL: return sizeof (tree_type_decl);
989 case FUNCTION_DECL: return sizeof (tree_function_decl);
990 case DEBUG_EXPR_DECL: return sizeof (tree_decl_with_rtl);
991 case TRANSLATION_UNIT_DECL: return sizeof (tree_translation_unit_decl);
992 case NAMESPACE_DECL:
993 case IMPORTED_DECL:
994 case NAMELIST_DECL: return sizeof (tree_decl_non_common);
995 default:
996 gcc_checking_assert (code >= NUM_TREE_CODES);
997 return lang_hooks.tree_size (code);
1000 case tcc_type: /* a type node */
1001 switch (code)
1003 case OFFSET_TYPE:
1004 case ENUMERAL_TYPE:
1005 case BOOLEAN_TYPE:
1006 case INTEGER_TYPE:
1007 case REAL_TYPE:
1008 case OPAQUE_TYPE:
1009 case POINTER_TYPE:
1010 case REFERENCE_TYPE:
1011 case NULLPTR_TYPE:
1012 case FIXED_POINT_TYPE:
1013 case COMPLEX_TYPE:
1014 case VECTOR_TYPE:
1015 case ARRAY_TYPE:
1016 case RECORD_TYPE:
1017 case UNION_TYPE:
1018 case QUAL_UNION_TYPE:
1019 case VOID_TYPE:
1020 case FUNCTION_TYPE:
1021 case METHOD_TYPE:
1022 case LANG_TYPE: return sizeof (tree_type_non_common);
1023 default:
1024 gcc_checking_assert (code >= NUM_TREE_CODES);
1025 return lang_hooks.tree_size (code);
1028 case tcc_reference: /* a reference */
1029 case tcc_expression: /* an expression */
1030 case tcc_statement: /* an expression with side effects */
1031 case tcc_comparison: /* a comparison expression */
1032 case tcc_unary: /* a unary arithmetic expression */
1033 case tcc_binary: /* a binary arithmetic expression */
1034 return (sizeof (struct tree_exp)
1035 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
1037 case tcc_constant: /* a constant */
1038 switch (code)
1040 case VOID_CST: return sizeof (tree_typed);
1041 case INTEGER_CST: gcc_unreachable ();
1042 case POLY_INT_CST: return sizeof (tree_poly_int_cst);
1043 case REAL_CST: return sizeof (tree_real_cst);
1044 case FIXED_CST: return sizeof (tree_fixed_cst);
1045 case COMPLEX_CST: return sizeof (tree_complex);
1046 case VECTOR_CST: gcc_unreachable ();
1047 case STRING_CST: gcc_unreachable ();
1048 default:
1049 gcc_checking_assert (code >= NUM_TREE_CODES);
1050 return lang_hooks.tree_size (code);
1053 case tcc_exceptional: /* something random, like an identifier. */
1054 switch (code)
1056 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
1057 case TREE_LIST: return sizeof (tree_list);
1059 case ERROR_MARK:
1060 case PLACEHOLDER_EXPR: return sizeof (tree_common);
1062 case TREE_VEC: gcc_unreachable ();
1063 case OMP_CLAUSE: gcc_unreachable ();
1065 case SSA_NAME: return sizeof (tree_ssa_name);
1067 case STATEMENT_LIST: return sizeof (tree_statement_list);
1068 case BLOCK: return sizeof (struct tree_block);
1069 case CONSTRUCTOR: return sizeof (tree_constructor);
1070 case OPTIMIZATION_NODE: return sizeof (tree_optimization_option);
1071 case TARGET_OPTION_NODE: return sizeof (tree_target_option);
1073 default:
1074 gcc_checking_assert (code >= NUM_TREE_CODES);
1075 return lang_hooks.tree_size (code);
1078 default:
1079 gcc_unreachable ();
1083 /* Compute the number of bytes occupied by NODE. This routine only
1084 looks at TREE_CODE, except for those nodes that have variable sizes. */
1085 size_t
1086 tree_size (const_tree node)
1088 const enum tree_code code = TREE_CODE (node);
1089 switch (code)
1091 case INTEGER_CST:
1092 return (sizeof (struct tree_int_cst)
1093 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
1095 case TREE_BINFO:
1096 return (offsetof (struct tree_binfo, base_binfos)
1097 + vec<tree, va_gc>
1098 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
1100 case TREE_VEC:
1101 return (sizeof (struct tree_vec)
1102 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
1104 case VECTOR_CST:
1105 return (sizeof (struct tree_vector)
1106 + (vector_cst_encoded_nelts (node) - 1) * sizeof (tree));
1108 case STRING_CST:
1109 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
1111 case OMP_CLAUSE:
1112 return (sizeof (struct tree_omp_clause)
1113 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
1114 * sizeof (tree));
1116 default:
1117 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
1118 return (sizeof (struct tree_exp)
1119 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
1120 else
1121 return tree_code_size (code);
1125 /* Return tree node kind based on tree CODE. */
1127 static tree_node_kind
1128 get_stats_node_kind (enum tree_code code)
1130 enum tree_code_class type = TREE_CODE_CLASS (code);
1132 switch (type)
1134 case tcc_declaration: /* A decl node */
1135 return d_kind;
1136 case tcc_type: /* a type node */
1137 return t_kind;
1138 case tcc_statement: /* an expression with side effects */
1139 return s_kind;
1140 case tcc_reference: /* a reference */
1141 return r_kind;
1142 case tcc_expression: /* an expression */
1143 case tcc_comparison: /* a comparison expression */
1144 case tcc_unary: /* a unary arithmetic expression */
1145 case tcc_binary: /* a binary arithmetic expression */
1146 return e_kind;
1147 case tcc_constant: /* a constant */
1148 return c_kind;
1149 case tcc_exceptional: /* something random, like an identifier. */
1150 switch (code)
1152 case IDENTIFIER_NODE:
1153 return id_kind;
1154 case TREE_VEC:
1155 return vec_kind;
1156 case TREE_BINFO:
1157 return binfo_kind;
1158 case SSA_NAME:
1159 return ssa_name_kind;
1160 case BLOCK:
1161 return b_kind;
1162 case CONSTRUCTOR:
1163 return constr_kind;
1164 case OMP_CLAUSE:
1165 return omp_clause_kind;
1166 default:
1167 return x_kind;
1169 break;
1170 case tcc_vl_exp:
1171 return e_kind;
1172 default:
1173 gcc_unreachable ();
1177 /* Record interesting allocation statistics for a tree node with CODE
1178 and LENGTH. */
1180 static void
1181 record_node_allocation_statistics (enum tree_code code, size_t length)
1183 if (!GATHER_STATISTICS)
1184 return;
1186 tree_node_kind kind = get_stats_node_kind (code);
1188 tree_code_counts[(int) code]++;
1189 tree_node_counts[(int) kind]++;
1190 tree_node_sizes[(int) kind] += length;
1193 /* Allocate and return a new UID from the DECL_UID namespace. */
1196 allocate_decl_uid (void)
1198 return next_decl_uid++;
1201 /* Return a newly allocated node of code CODE. For decl and type
1202 nodes, some other fields are initialized. The rest of the node is
1203 initialized to zero. This function cannot be used for TREE_VEC,
1204 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1205 tree_code_size.
1207 Achoo! I got a code in the node. */
1209 tree
1210 make_node (enum tree_code code MEM_STAT_DECL)
1212 tree t;
1213 enum tree_code_class type = TREE_CODE_CLASS (code);
1214 size_t length = tree_code_size (code);
1216 record_node_allocation_statistics (code, length);
1218 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1219 TREE_SET_CODE (t, code);
1221 switch (type)
1223 case tcc_statement:
1224 if (code != DEBUG_BEGIN_STMT)
1225 TREE_SIDE_EFFECTS (t) = 1;
1226 break;
1228 case tcc_declaration:
1229 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1231 if (code == FUNCTION_DECL)
1233 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1234 SET_DECL_MODE (t, FUNCTION_MODE);
1236 else
1237 SET_DECL_ALIGN (t, 1);
1239 DECL_SOURCE_LOCATION (t) = input_location;
1240 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1241 DECL_UID (t) = --next_debug_decl_uid;
1242 else
1244 DECL_UID (t) = allocate_decl_uid ();
1245 SET_DECL_PT_UID (t, -1);
1247 if (TREE_CODE (t) == LABEL_DECL)
1248 LABEL_DECL_UID (t) = -1;
1250 break;
1252 case tcc_type:
1253 TYPE_UID (t) = next_type_uid++;
1254 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1255 TYPE_USER_ALIGN (t) = 0;
1256 TYPE_MAIN_VARIANT (t) = t;
1257 TYPE_CANONICAL (t) = t;
1259 /* Default to no attributes for type, but let target change that. */
1260 TYPE_ATTRIBUTES (t) = NULL_TREE;
1261 targetm.set_default_type_attributes (t);
1263 /* We have not yet computed the alias set for this type. */
1264 TYPE_ALIAS_SET (t) = -1;
1265 break;
1267 case tcc_constant:
1268 TREE_CONSTANT (t) = 1;
1269 break;
1271 case tcc_expression:
1272 switch (code)
1274 case INIT_EXPR:
1275 case MODIFY_EXPR:
1276 case VA_ARG_EXPR:
1277 case PREDECREMENT_EXPR:
1278 case PREINCREMENT_EXPR:
1279 case POSTDECREMENT_EXPR:
1280 case POSTINCREMENT_EXPR:
1281 /* All of these have side-effects, no matter what their
1282 operands are. */
1283 TREE_SIDE_EFFECTS (t) = 1;
1284 break;
1286 default:
1287 break;
1289 break;
1291 case tcc_exceptional:
1292 switch (code)
1294 case TARGET_OPTION_NODE:
1295 TREE_TARGET_OPTION(t)
1296 = ggc_cleared_alloc<struct cl_target_option> ();
1297 break;
1299 case OPTIMIZATION_NODE:
1300 TREE_OPTIMIZATION (t)
1301 = ggc_cleared_alloc<struct cl_optimization> ();
1302 break;
1304 default:
1305 break;
1307 break;
1309 default:
1310 /* Other classes need no special treatment. */
1311 break;
1314 return t;
1317 /* Free tree node. */
1319 void
1320 free_node (tree node)
1322 enum tree_code code = TREE_CODE (node);
1323 if (GATHER_STATISTICS)
1325 enum tree_node_kind kind = get_stats_node_kind (code);
1327 gcc_checking_assert (tree_code_counts[(int) TREE_CODE (node)] != 0);
1328 gcc_checking_assert (tree_node_counts[(int) kind] != 0);
1329 gcc_checking_assert (tree_node_sizes[(int) kind] >= tree_size (node));
1331 tree_code_counts[(int) TREE_CODE (node)]--;
1332 tree_node_counts[(int) kind]--;
1333 tree_node_sizes[(int) kind] -= tree_size (node);
1335 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1336 vec_free (CONSTRUCTOR_ELTS (node));
1337 else if (code == BLOCK)
1338 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1339 else if (code == TREE_BINFO)
1340 vec_free (BINFO_BASE_ACCESSES (node));
1341 else if (code == OPTIMIZATION_NODE)
1342 cl_optimization_option_free (TREE_OPTIMIZATION (node));
1343 else if (code == TARGET_OPTION_NODE)
1344 cl_target_option_free (TREE_TARGET_OPTION (node));
1345 ggc_free (node);
1348 /* Return a new node with the same contents as NODE except that its
1349 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1351 tree
1352 copy_node (tree node MEM_STAT_DECL)
1354 tree t;
1355 enum tree_code code = TREE_CODE (node);
1356 size_t length;
1358 gcc_assert (code != STATEMENT_LIST);
1360 length = tree_size (node);
1361 record_node_allocation_statistics (code, length);
1362 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1363 memcpy (t, node, length);
1365 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1366 TREE_CHAIN (t) = 0;
1367 TREE_ASM_WRITTEN (t) = 0;
1368 TREE_VISITED (t) = 0;
1370 if (TREE_CODE_CLASS (code) == tcc_declaration)
1372 if (code == DEBUG_EXPR_DECL)
1373 DECL_UID (t) = --next_debug_decl_uid;
1374 else
1376 DECL_UID (t) = allocate_decl_uid ();
1377 if (DECL_PT_UID_SET_P (node))
1378 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1380 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1381 && DECL_HAS_VALUE_EXPR_P (node))
1383 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1384 DECL_HAS_VALUE_EXPR_P (t) = 1;
1386 /* DECL_DEBUG_EXPR is copied explicitly by callers. */
1387 if (VAR_P (node))
1389 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1390 t->decl_with_vis.symtab_node = NULL;
1392 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1394 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1395 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1397 if (TREE_CODE (node) == FUNCTION_DECL)
1399 DECL_STRUCT_FUNCTION (t) = NULL;
1400 t->decl_with_vis.symtab_node = NULL;
1403 else if (TREE_CODE_CLASS (code) == tcc_type)
1405 TYPE_UID (t) = next_type_uid++;
1406 /* The following is so that the debug code for
1407 the copy is different from the original type.
1408 The two statements usually duplicate each other
1409 (because they clear fields of the same union),
1410 but the optimizer should catch that. */
1411 TYPE_SYMTAB_ADDRESS (t) = 0;
1412 TYPE_SYMTAB_DIE (t) = 0;
1414 /* Do not copy the values cache. */
1415 if (TYPE_CACHED_VALUES_P (t))
1417 TYPE_CACHED_VALUES_P (t) = 0;
1418 TYPE_CACHED_VALUES (t) = NULL_TREE;
1421 else if (code == TARGET_OPTION_NODE)
1423 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1424 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1425 sizeof (struct cl_target_option));
1427 else if (code == OPTIMIZATION_NODE)
1429 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1430 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1431 sizeof (struct cl_optimization));
1434 return t;
1437 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1438 For example, this can copy a list made of TREE_LIST nodes. */
1440 tree
1441 copy_list (tree list)
1443 tree head;
1444 tree prev, next;
1446 if (list == 0)
1447 return 0;
1449 head = prev = copy_node (list);
1450 next = TREE_CHAIN (list);
1451 while (next)
1453 TREE_CHAIN (prev) = copy_node (next);
1454 prev = TREE_CHAIN (prev);
1455 next = TREE_CHAIN (next);
1457 return head;
1461 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1462 INTEGER_CST with value CST and type TYPE. */
1464 static unsigned int
1465 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1467 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1468 /* We need extra HWIs if CST is an unsigned integer with its
1469 upper bit set. */
1470 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1471 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1472 return cst.get_len ();
1475 /* Return a new INTEGER_CST with value CST and type TYPE. */
1477 static tree
1478 build_new_int_cst (tree type, const wide_int &cst)
1480 unsigned int len = cst.get_len ();
1481 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1482 tree nt = make_int_cst (len, ext_len);
1484 if (len < ext_len)
1486 --ext_len;
1487 TREE_INT_CST_ELT (nt, ext_len)
1488 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1489 for (unsigned int i = len; i < ext_len; ++i)
1490 TREE_INT_CST_ELT (nt, i) = -1;
1492 else if (TYPE_UNSIGNED (type)
1493 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1495 len--;
1496 TREE_INT_CST_ELT (nt, len)
1497 = zext_hwi (cst.elt (len),
1498 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1501 for (unsigned int i = 0; i < len; i++)
1502 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1503 TREE_TYPE (nt) = type;
1504 return nt;
1507 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1509 static tree
1510 build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS]
1511 CXX_MEM_STAT_INFO)
1513 size_t length = sizeof (struct tree_poly_int_cst);
1514 record_node_allocation_statistics (POLY_INT_CST, length);
1516 tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1518 TREE_SET_CODE (t, POLY_INT_CST);
1519 TREE_CONSTANT (t) = 1;
1520 TREE_TYPE (t) = type;
1521 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1522 POLY_INT_CST_COEFF (t, i) = coeffs[i];
1523 return t;
1526 /* Create a constant tree that contains CST sign-extended to TYPE. */
1528 tree
1529 build_int_cst (tree type, poly_int64 cst)
1531 /* Support legacy code. */
1532 if (!type)
1533 type = integer_type_node;
1535 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1538 /* Create a constant tree that contains CST zero-extended to TYPE. */
1540 tree
1541 build_int_cstu (tree type, poly_uint64 cst)
1543 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1546 /* Create a constant tree that contains CST sign-extended to TYPE. */
1548 tree
1549 build_int_cst_type (tree type, poly_int64 cst)
1551 gcc_assert (type);
1552 return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
1555 /* Constructs tree in type TYPE from with value given by CST. Signedness
1556 of CST is assumed to be the same as the signedness of TYPE. */
1558 tree
1559 double_int_to_tree (tree type, double_int cst)
1561 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1564 /* We force the wide_int CST to the range of the type TYPE by sign or
1565 zero extending it. OVERFLOWABLE indicates if we are interested in
1566 overflow of the value, when >0 we are only interested in signed
1567 overflow, for <0 we are interested in any overflow. OVERFLOWED
1568 indicates whether overflow has already occurred. CONST_OVERFLOWED
1569 indicates whether constant overflow has already occurred. We force
1570 T's value to be within range of T's type (by setting to 0 or 1 all
1571 the bits outside the type's range). We set TREE_OVERFLOWED if,
1572 OVERFLOWED is nonzero,
1573 or OVERFLOWABLE is >0 and signed overflow occurs
1574 or OVERFLOWABLE is <0 and any overflow occurs
1575 We return a new tree node for the extended wide_int. The node
1576 is shared if no overflow flags are set. */
1579 tree
1580 force_fit_type (tree type, const poly_wide_int_ref &cst,
1581 int overflowable, bool overflowed)
1583 signop sign = TYPE_SIGN (type);
1585 /* If we need to set overflow flags, return a new unshared node. */
1586 if (overflowed || !wi::fits_to_tree_p (cst, type))
1588 if (overflowed
1589 || overflowable < 0
1590 || (overflowable > 0 && sign == SIGNED))
1592 poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
1593 sign);
1594 tree t;
1595 if (tmp.is_constant ())
1596 t = build_new_int_cst (type, tmp.coeffs[0]);
1597 else
1599 tree coeffs[NUM_POLY_INT_COEFFS];
1600 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1602 coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
1603 TREE_OVERFLOW (coeffs[i]) = 1;
1605 t = build_new_poly_int_cst (type, coeffs);
1607 TREE_OVERFLOW (t) = 1;
1608 return t;
1612 /* Else build a shared node. */
1613 return wide_int_to_tree (type, cst);
1616 /* These are the hash table functions for the hash table of INTEGER_CST
1617 nodes of a sizetype. */
1619 /* Return the hash code X, an INTEGER_CST. */
1621 hashval_t
1622 int_cst_hasher::hash (tree x)
1624 const_tree const t = x;
1625 hashval_t code = TYPE_UID (TREE_TYPE (t));
1626 int i;
1628 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1629 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1631 return code;
1634 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1635 is the same as that given by *Y, which is the same. */
1637 bool
1638 int_cst_hasher::equal (tree x, tree y)
1640 const_tree const xt = x;
1641 const_tree const yt = y;
1643 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1644 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1645 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1646 return false;
1648 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1649 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1650 return false;
1652 return true;
1655 /* Cache wide_int CST into the TYPE_CACHED_VALUES cache for TYPE.
1656 SLOT is the slot entry to store it in, and MAX_SLOTS is the maximum
1657 number of slots that can be cached for the type. */
1659 static inline tree
1660 cache_wide_int_in_type_cache (tree type, const wide_int &cst,
1661 int slot, int max_slots)
1663 gcc_checking_assert (slot >= 0);
1664 /* Initialize cache. */
1665 if (!TYPE_CACHED_VALUES_P (type))
1667 TYPE_CACHED_VALUES_P (type) = 1;
1668 TYPE_CACHED_VALUES (type) = make_tree_vec (max_slots);
1670 tree t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot);
1671 if (!t)
1673 /* Create a new shared int. */
1674 t = build_new_int_cst (type, cst);
1675 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), slot) = t;
1677 return t;
1680 /* Create an INT_CST node of TYPE and value CST.
1681 The returned node is always shared. For small integers we use a
1682 per-type vector cache, for larger ones we use a single hash table.
1683 The value is extended from its precision according to the sign of
1684 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1685 the upper bits and ensures that hashing and value equality based
1686 upon the underlying HOST_WIDE_INTs works without masking. */
1688 static tree
1689 wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
1691 tree t;
1692 int ix = -1;
1693 int limit = 0;
1695 gcc_assert (type);
1696 unsigned int prec = TYPE_PRECISION (type);
1697 signop sgn = TYPE_SIGN (type);
1699 /* Verify that everything is canonical. */
1700 int l = pcst.get_len ();
1701 if (l > 1)
1703 if (pcst.elt (l - 1) == 0)
1704 gcc_checking_assert (pcst.elt (l - 2) < 0);
1705 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1706 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1709 wide_int cst = wide_int::from (pcst, prec, sgn);
1710 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1712 enum tree_code code = TREE_CODE (type);
1713 if (code == POINTER_TYPE || code == REFERENCE_TYPE)
1715 /* Cache NULL pointer and zero bounds. */
1716 if (cst == 0)
1717 ix = 0;
1718 /* Cache upper bounds of pointers. */
1719 else if (cst == wi::max_value (prec, sgn))
1720 ix = 1;
1721 /* Cache 1 which is used for a non-zero range. */
1722 else if (cst == 1)
1723 ix = 2;
1725 if (ix >= 0)
1727 t = cache_wide_int_in_type_cache (type, cst, ix, 3);
1728 /* Make sure no one is clobbering the shared constant. */
1729 gcc_checking_assert (TREE_TYPE (t) == type
1730 && cst == wi::to_wide (t));
1731 return t;
1734 if (ext_len == 1)
1736 /* We just need to store a single HOST_WIDE_INT. */
1737 HOST_WIDE_INT hwi;
1738 if (TYPE_UNSIGNED (type))
1739 hwi = cst.to_uhwi ();
1740 else
1741 hwi = cst.to_shwi ();
1743 switch (code)
1745 case NULLPTR_TYPE:
1746 gcc_assert (hwi == 0);
1747 /* Fallthru. */
1749 case POINTER_TYPE:
1750 case REFERENCE_TYPE:
1751 /* Ignore pointers, as they were already handled above. */
1752 break;
1754 case BOOLEAN_TYPE:
1755 /* Cache false or true. */
1756 limit = 2;
1757 if (IN_RANGE (hwi, 0, 1))
1758 ix = hwi;
1759 break;
1761 case INTEGER_TYPE:
1762 case OFFSET_TYPE:
1763 if (TYPE_SIGN (type) == UNSIGNED)
1765 /* Cache [0, N). */
1766 limit = param_integer_share_limit;
1767 if (IN_RANGE (hwi, 0, param_integer_share_limit - 1))
1768 ix = hwi;
1770 else
1772 /* Cache [-1, N). */
1773 limit = param_integer_share_limit + 1;
1774 if (IN_RANGE (hwi, -1, param_integer_share_limit - 1))
1775 ix = hwi + 1;
1777 break;
1779 case ENUMERAL_TYPE:
1780 break;
1782 default:
1783 gcc_unreachable ();
1786 if (ix >= 0)
1788 t = cache_wide_int_in_type_cache (type, cst, ix, limit);
1789 /* Make sure no one is clobbering the shared constant. */
1790 gcc_checking_assert (TREE_TYPE (t) == type
1791 && TREE_INT_CST_NUNITS (t) == 1
1792 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1793 && TREE_INT_CST_EXT_NUNITS (t) == 1
1794 && TREE_INT_CST_ELT (t, 0) == hwi);
1795 return t;
1797 else
1799 /* Use the cache of larger shared ints, using int_cst_node as
1800 a temporary. */
1802 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1803 TREE_TYPE (int_cst_node) = type;
1805 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1806 t = *slot;
1807 if (!t)
1809 /* Insert this one into the hash table. */
1810 t = int_cst_node;
1811 *slot = t;
1812 /* Make a new node for next time round. */
1813 int_cst_node = make_int_cst (1, 1);
1817 else
1819 /* The value either hashes properly or we drop it on the floor
1820 for the gc to take care of. There will not be enough of them
1821 to worry about. */
1823 tree nt = build_new_int_cst (type, cst);
1824 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1825 t = *slot;
1826 if (!t)
1828 /* Insert this one into the hash table. */
1829 t = nt;
1830 *slot = t;
1832 else
1833 ggc_free (nt);
1836 return t;
1839 hashval_t
1840 poly_int_cst_hasher::hash (tree t)
1842 inchash::hash hstate;
1844 hstate.add_int (TYPE_UID (TREE_TYPE (t)));
1845 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1846 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
1848 return hstate.end ();
1851 bool
1852 poly_int_cst_hasher::equal (tree x, const compare_type &y)
1854 if (TREE_TYPE (x) != y.first)
1855 return false;
1856 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1857 if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
1858 return false;
1859 return true;
1862 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1863 The elements must also have type TYPE. */
1865 tree
1866 build_poly_int_cst (tree type, const poly_wide_int_ref &values)
1868 unsigned int prec = TYPE_PRECISION (type);
1869 gcc_assert (prec <= values.coeffs[0].get_precision ());
1870 poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
1872 inchash::hash h;
1873 h.add_int (TYPE_UID (type));
1874 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1875 h.add_wide_int (c.coeffs[i]);
1876 poly_int_cst_hasher::compare_type comp (type, &c);
1877 tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
1878 INSERT);
1879 if (*slot == NULL_TREE)
1881 tree coeffs[NUM_POLY_INT_COEFFS];
1882 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1883 coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
1884 *slot = build_new_poly_int_cst (type, coeffs);
1886 return *slot;
1889 /* Create a constant tree with value VALUE in type TYPE. */
1891 tree
1892 wide_int_to_tree (tree type, const poly_wide_int_ref &value)
1894 if (value.is_constant ())
1895 return wide_int_to_tree_1 (type, value.coeffs[0]);
1896 return build_poly_int_cst (type, value);
1899 /* Insert INTEGER_CST T into a cache of integer constants. And return
1900 the cached constant (which may or may not be T). If MIGHT_DUPLICATE
1901 is false, and T falls into the type's 'smaller values' range, there
1902 cannot be an existing entry. Otherwise, if MIGHT_DUPLICATE is true,
1903 or the value is large, should an existing entry exist, it is
1904 returned (rather than inserting T). */
1906 tree
1907 cache_integer_cst (tree t, bool might_duplicate ATTRIBUTE_UNUSED)
1909 tree type = TREE_TYPE (t);
1910 int ix = -1;
1911 int limit = 0;
1912 int prec = TYPE_PRECISION (type);
1914 gcc_assert (!TREE_OVERFLOW (t));
1916 /* The caching indices here must match those in
1917 wide_int_to_type_1. */
1918 switch (TREE_CODE (type))
1920 case NULLPTR_TYPE:
1921 gcc_checking_assert (integer_zerop (t));
1922 /* Fallthru. */
1924 case POINTER_TYPE:
1925 case REFERENCE_TYPE:
1927 if (integer_zerop (t))
1928 ix = 0;
1929 else if (integer_onep (t))
1930 ix = 2;
1932 if (ix >= 0)
1933 limit = 3;
1935 break;
1937 case BOOLEAN_TYPE:
1938 /* Cache false or true. */
1939 limit = 2;
1940 if (wi::ltu_p (wi::to_wide (t), 2))
1941 ix = TREE_INT_CST_ELT (t, 0);
1942 break;
1944 case INTEGER_TYPE:
1945 case OFFSET_TYPE:
1946 if (TYPE_UNSIGNED (type))
1948 /* Cache 0..N */
1949 limit = param_integer_share_limit;
1951 /* This is a little hokie, but if the prec is smaller than
1952 what is necessary to hold param_integer_share_limit, then the
1953 obvious test will not get the correct answer. */
1954 if (prec < HOST_BITS_PER_WIDE_INT)
1956 if (tree_to_uhwi (t)
1957 < (unsigned HOST_WIDE_INT) param_integer_share_limit)
1958 ix = tree_to_uhwi (t);
1960 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1961 ix = tree_to_uhwi (t);
1963 else
1965 /* Cache -1..N */
1966 limit = param_integer_share_limit + 1;
1968 if (integer_minus_onep (t))
1969 ix = 0;
1970 else if (!wi::neg_p (wi::to_wide (t)))
1972 if (prec < HOST_BITS_PER_WIDE_INT)
1974 if (tree_to_shwi (t) < param_integer_share_limit)
1975 ix = tree_to_shwi (t) + 1;
1977 else if (wi::ltu_p (wi::to_wide (t), param_integer_share_limit))
1978 ix = tree_to_shwi (t) + 1;
1981 break;
1983 case ENUMERAL_TYPE:
1984 /* The slot used by TYPE_CACHED_VALUES is used for the enum
1985 members. */
1986 break;
1988 default:
1989 gcc_unreachable ();
1992 if (ix >= 0)
1994 /* Look for it in the type's vector of small shared ints. */
1995 if (!TYPE_CACHED_VALUES_P (type))
1997 TYPE_CACHED_VALUES_P (type) = 1;
1998 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
2001 if (tree r = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix))
2003 gcc_checking_assert (might_duplicate);
2004 t = r;
2006 else
2007 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
2009 else
2011 /* Use the cache of larger shared ints. */
2012 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
2013 if (tree r = *slot)
2015 /* If there is already an entry for the number verify it's the
2016 same value. */
2017 gcc_checking_assert (wi::to_wide (tree (r)) == wi::to_wide (t));
2018 /* And return the cached value. */
2019 t = r;
2021 else
2022 /* Otherwise insert this one into the hash table. */
2023 *slot = t;
2026 return t;
2030 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
2031 and the rest are zeros. */
2033 tree
2034 build_low_bits_mask (tree type, unsigned bits)
2036 gcc_assert (bits <= TYPE_PRECISION (type));
2038 return wide_int_to_tree (type, wi::mask (bits, false,
2039 TYPE_PRECISION (type)));
2042 /* Checks that X is integer constant that can be expressed in (unsigned)
2043 HOST_WIDE_INT without loss of precision. */
2045 bool
2046 cst_and_fits_in_hwi (const_tree x)
2048 return (TREE_CODE (x) == INTEGER_CST
2049 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
2052 /* Build a newly constructed VECTOR_CST with the given values of
2053 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
2055 tree
2056 make_vector (unsigned log2_npatterns,
2057 unsigned int nelts_per_pattern MEM_STAT_DECL)
2059 gcc_assert (IN_RANGE (nelts_per_pattern, 1, 3));
2060 tree t;
2061 unsigned npatterns = 1 << log2_npatterns;
2062 unsigned encoded_nelts = npatterns * nelts_per_pattern;
2063 unsigned length = (sizeof (struct tree_vector)
2064 + (encoded_nelts - 1) * sizeof (tree));
2066 record_node_allocation_statistics (VECTOR_CST, length);
2068 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2070 TREE_SET_CODE (t, VECTOR_CST);
2071 TREE_CONSTANT (t) = 1;
2072 VECTOR_CST_LOG2_NPATTERNS (t) = log2_npatterns;
2073 VECTOR_CST_NELTS_PER_PATTERN (t) = nelts_per_pattern;
2075 return t;
2078 /* Return a new VECTOR_CST node whose type is TYPE and whose values
2079 are extracted from V, a vector of CONSTRUCTOR_ELT. */
2081 tree
2082 build_vector_from_ctor (tree type, const vec<constructor_elt, va_gc> *v)
2084 if (vec_safe_length (v) == 0)
2085 return build_zero_cst (type);
2087 unsigned HOST_WIDE_INT idx, nelts;
2088 tree value;
2090 /* We can't construct a VECTOR_CST for a variable number of elements. */
2091 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
2092 tree_vector_builder vec (type, nelts, 1);
2093 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
2095 if (TREE_CODE (value) == VECTOR_CST)
2097 /* If NELTS is constant then this must be too. */
2098 unsigned int sub_nelts = VECTOR_CST_NELTS (value).to_constant ();
2099 for (unsigned i = 0; i < sub_nelts; ++i)
2100 vec.quick_push (VECTOR_CST_ELT (value, i));
2102 else
2103 vec.quick_push (value);
2105 while (vec.length () < nelts)
2106 vec.quick_push (build_zero_cst (TREE_TYPE (type)));
2108 return vec.build ();
2111 /* Build a vector of type VECTYPE where all the elements are SCs. */
2112 tree
2113 build_vector_from_val (tree vectype, tree sc)
2115 unsigned HOST_WIDE_INT i, nunits;
2117 if (sc == error_mark_node)
2118 return sc;
2120 /* Verify that the vector type is suitable for SC. Note that there
2121 is some inconsistency in the type-system with respect to restrict
2122 qualifications of pointers. Vector types always have a main-variant
2123 element type and the qualification is applied to the vector-type.
2124 So TREE_TYPE (vector-type) does not return a properly qualified
2125 vector element-type. */
2126 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
2127 TREE_TYPE (vectype)));
2129 if (CONSTANT_CLASS_P (sc))
2131 tree_vector_builder v (vectype, 1, 1);
2132 v.quick_push (sc);
2133 return v.build ();
2135 else if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits))
2136 return fold_build1 (VEC_DUPLICATE_EXPR, vectype, sc);
2137 else
2139 vec<constructor_elt, va_gc> *v;
2140 vec_alloc (v, nunits);
2141 for (i = 0; i < nunits; ++i)
2142 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
2143 return build_constructor (vectype, v);
2147 /* If TYPE is not a vector type, just return SC, otherwise return
2148 build_vector_from_val (TYPE, SC). */
2150 tree
2151 build_uniform_cst (tree type, tree sc)
2153 if (!VECTOR_TYPE_P (type))
2154 return sc;
2156 return build_vector_from_val (type, sc);
2159 /* Build a vector series of type TYPE in which element I has the value
2160 BASE + I * STEP. The result is a constant if BASE and STEP are constant
2161 and a VEC_SERIES_EXPR otherwise. */
2163 tree
2164 build_vec_series (tree type, tree base, tree step)
2166 if (integer_zerop (step))
2167 return build_vector_from_val (type, base);
2168 if (TREE_CODE (base) == INTEGER_CST && TREE_CODE (step) == INTEGER_CST)
2170 tree_vector_builder builder (type, 1, 3);
2171 tree elt1 = wide_int_to_tree (TREE_TYPE (base),
2172 wi::to_wide (base) + wi::to_wide (step));
2173 tree elt2 = wide_int_to_tree (TREE_TYPE (base),
2174 wi::to_wide (elt1) + wi::to_wide (step));
2175 builder.quick_push (base);
2176 builder.quick_push (elt1);
2177 builder.quick_push (elt2);
2178 return builder.build ();
2180 return build2 (VEC_SERIES_EXPR, type, base, step);
2183 /* Return a vector with the same number of units and number of bits
2184 as VEC_TYPE, but in which the elements are a linear series of unsigned
2185 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
2187 tree
2188 build_index_vector (tree vec_type, poly_uint64 base, poly_uint64 step)
2190 tree index_vec_type = vec_type;
2191 tree index_elt_type = TREE_TYPE (vec_type);
2192 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vec_type);
2193 if (!INTEGRAL_TYPE_P (index_elt_type) || !TYPE_UNSIGNED (index_elt_type))
2195 index_elt_type = build_nonstandard_integer_type
2196 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type)), true);
2197 index_vec_type = build_vector_type (index_elt_type, nunits);
2200 tree_vector_builder v (index_vec_type, 1, 3);
2201 for (unsigned int i = 0; i < 3; ++i)
2202 v.quick_push (build_int_cstu (index_elt_type, base + i * step));
2203 return v.build ();
2206 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
2207 elements are A and the rest are B. */
2209 tree
2210 build_vector_a_then_b (tree vec_type, unsigned int num_a, tree a, tree b)
2212 gcc_assert (known_le (num_a, TYPE_VECTOR_SUBPARTS (vec_type)));
2213 unsigned int count = constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type));
2214 /* Optimize the constant case. */
2215 if ((count & 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type).is_constant ())
2216 count /= 2;
2217 tree_vector_builder builder (vec_type, count, 2);
2218 for (unsigned int i = 0; i < count * 2; ++i)
2219 builder.quick_push (i < num_a ? a : b);
2220 return builder.build ();
2223 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2224 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
2226 void
2227 recompute_constructor_flags (tree c)
2229 unsigned int i;
2230 tree val;
2231 bool constant_p = true;
2232 bool side_effects_p = false;
2233 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2235 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2237 /* Mostly ctors will have elts that don't have side-effects, so
2238 the usual case is to scan all the elements. Hence a single
2239 loop for both const and side effects, rather than one loop
2240 each (with early outs). */
2241 if (!TREE_CONSTANT (val))
2242 constant_p = false;
2243 if (TREE_SIDE_EFFECTS (val))
2244 side_effects_p = true;
2247 TREE_SIDE_EFFECTS (c) = side_effects_p;
2248 TREE_CONSTANT (c) = constant_p;
2251 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2252 CONSTRUCTOR C. */
2254 void
2255 verify_constructor_flags (tree c)
2257 unsigned int i;
2258 tree val;
2259 bool constant_p = TREE_CONSTANT (c);
2260 bool side_effects_p = TREE_SIDE_EFFECTS (c);
2261 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
2263 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
2265 if (constant_p && !TREE_CONSTANT (val))
2266 internal_error ("non-constant element in constant CONSTRUCTOR");
2267 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
2268 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2272 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2273 are in the vec pointed to by VALS. */
2274 tree
2275 build_constructor (tree type, vec<constructor_elt, va_gc> *vals MEM_STAT_DECL)
2277 tree c = make_node (CONSTRUCTOR PASS_MEM_STAT);
2279 TREE_TYPE (c) = type;
2280 CONSTRUCTOR_ELTS (c) = vals;
2282 recompute_constructor_flags (c);
2284 return c;
2287 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2288 INDEX and VALUE. */
2289 tree
2290 build_constructor_single (tree type, tree index, tree value)
2292 vec<constructor_elt, va_gc> *v;
2293 constructor_elt elt = {index, value};
2295 vec_alloc (v, 1);
2296 v->quick_push (elt);
2298 return build_constructor (type, v);
2302 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2303 are in a list pointed to by VALS. */
2304 tree
2305 build_constructor_from_list (tree type, tree vals)
2307 tree t;
2308 vec<constructor_elt, va_gc> *v = NULL;
2310 if (vals)
2312 vec_alloc (v, list_length (vals));
2313 for (t = vals; t; t = TREE_CHAIN (t))
2314 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
2317 return build_constructor (type, v);
2320 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2321 are in a vector pointed to by VALS. Note that the TREE_PURPOSE
2322 fields in the constructor remain null. */
2324 tree
2325 build_constructor_from_vec (tree type, const vec<tree, va_gc> *vals)
2327 vec<constructor_elt, va_gc> *v = NULL;
2329 for (tree t : vals)
2330 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t);
2332 return build_constructor (type, v);
2335 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2336 of elements, provided as index/value pairs. */
2338 tree
2339 build_constructor_va (tree type, int nelts, ...)
2341 vec<constructor_elt, va_gc> *v = NULL;
2342 va_list p;
2344 va_start (p, nelts);
2345 vec_alloc (v, nelts);
2346 while (nelts--)
2348 tree index = va_arg (p, tree);
2349 tree value = va_arg (p, tree);
2350 CONSTRUCTOR_APPEND_ELT (v, index, value);
2352 va_end (p);
2353 return build_constructor (type, v);
2356 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2358 tree
2359 build_clobber (tree type, enum clobber_kind kind)
2361 tree clobber = build_constructor (type, NULL);
2362 TREE_THIS_VOLATILE (clobber) = true;
2363 CLOBBER_KIND (clobber) = kind;
2364 return clobber;
2367 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2369 tree
2370 build_fixed (tree type, FIXED_VALUE_TYPE f)
2372 tree v;
2373 FIXED_VALUE_TYPE *fp;
2375 v = make_node (FIXED_CST);
2376 fp = ggc_alloc<fixed_value> ();
2377 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
2379 TREE_TYPE (v) = type;
2380 TREE_FIXED_CST_PTR (v) = fp;
2381 return v;
2384 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2386 tree
2387 build_real (tree type, REAL_VALUE_TYPE d)
2389 tree v;
2390 int overflow = 0;
2392 /* dconst{0,1,2,m1,half} are used in various places in
2393 the middle-end and optimizers, allow them here
2394 even for decimal floating point types as an exception
2395 by converting them to decimal. */
2396 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type))
2397 && (d.cl == rvc_normal || d.cl == rvc_zero)
2398 && !d.decimal)
2400 if (memcmp (&d, &dconst1, sizeof (d)) == 0)
2401 decimal_real_from_string (&d, "1");
2402 else if (memcmp (&d, &dconst2, sizeof (d)) == 0)
2403 decimal_real_from_string (&d, "2");
2404 else if (memcmp (&d, &dconstm1, sizeof (d)) == 0)
2405 decimal_real_from_string (&d, "-1");
2406 else if (memcmp (&d, &dconsthalf, sizeof (d)) == 0)
2407 decimal_real_from_string (&d, "0.5");
2408 else if (memcmp (&d, &dconst0, sizeof (d)) == 0)
2410 /* Make sure to give zero the minimum quantum exponent for
2411 the type (which corresponds to all bits zero). */
2412 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
2413 char buf[16];
2414 sprintf (buf, "0e%d", fmt->emin - fmt->p);
2415 decimal_real_from_string (&d, buf);
2417 else
2418 gcc_unreachable ();
2421 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2422 Consider doing it via real_convert now. */
2424 v = make_node (REAL_CST);
2425 TREE_TYPE (v) = type;
2426 memcpy (TREE_REAL_CST_PTR (v), &d, sizeof (REAL_VALUE_TYPE));
2427 TREE_OVERFLOW (v) = overflow;
2428 return v;
2431 /* Like build_real, but first truncate D to the type. */
2433 tree
2434 build_real_truncate (tree type, REAL_VALUE_TYPE d)
2436 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
2439 /* Return a new REAL_CST node whose type is TYPE
2440 and whose value is the integer value of the INTEGER_CST node I. */
2442 REAL_VALUE_TYPE
2443 real_value_from_int_cst (const_tree type, const_tree i)
2445 REAL_VALUE_TYPE d;
2447 /* Clear all bits of the real value type so that we can later do
2448 bitwise comparisons to see if two values are the same. */
2449 memset (&d, 0, sizeof d);
2451 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, wi::to_wide (i),
2452 TYPE_SIGN (TREE_TYPE (i)));
2453 return d;
2456 /* Given a tree representing an integer constant I, return a tree
2457 representing the same value as a floating-point constant of type TYPE. */
2459 tree
2460 build_real_from_int_cst (tree type, const_tree i)
2462 tree v;
2463 int overflow = TREE_OVERFLOW (i);
2465 v = build_real (type, real_value_from_int_cst (type, i));
2467 TREE_OVERFLOW (v) |= overflow;
2468 return v;
2471 /* Return a new REAL_CST node whose type is TYPE
2472 and whose value is the integer value I which has sign SGN. */
2474 tree
2475 build_real_from_wide (tree type, const wide_int_ref &i, signop sgn)
2477 REAL_VALUE_TYPE d;
2479 /* Clear all bits of the real value type so that we can later do
2480 bitwise comparisons to see if two values are the same. */
2481 memset (&d, 0, sizeof d);
2483 real_from_integer (&d, TYPE_MODE (type), i, sgn);
2484 return build_real (type, d);
2487 /* Return a newly constructed STRING_CST node whose value is the LEN
2488 characters at STR when STR is nonnull, or all zeros otherwise.
2489 Note that for a C string literal, LEN should include the trailing NUL.
2490 The TREE_TYPE is not initialized. */
2492 tree
2493 build_string (unsigned len, const char *str /*= NULL */)
2495 /* Do not waste bytes provided by padding of struct tree_string. */
2496 unsigned size = len + offsetof (struct tree_string, str) + 1;
2498 record_node_allocation_statistics (STRING_CST, size);
2500 tree s = (tree) ggc_internal_alloc (size);
2502 memset (s, 0, sizeof (struct tree_typed));
2503 TREE_SET_CODE (s, STRING_CST);
2504 TREE_CONSTANT (s) = 1;
2505 TREE_STRING_LENGTH (s) = len;
2506 if (str)
2507 memcpy (s->string.str, str, len);
2508 else
2509 memset (s->string.str, 0, len);
2510 s->string.str[len] = '\0';
2512 return s;
2515 /* Return a newly constructed COMPLEX_CST node whose value is
2516 specified by the real and imaginary parts REAL and IMAG.
2517 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2518 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2520 tree
2521 build_complex (tree type, tree real, tree imag)
2523 gcc_assert (CONSTANT_CLASS_P (real));
2524 gcc_assert (CONSTANT_CLASS_P (imag));
2526 tree t = make_node (COMPLEX_CST);
2528 TREE_REALPART (t) = real;
2529 TREE_IMAGPART (t) = imag;
2530 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2531 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2532 return t;
2535 /* Build a complex (inf +- 0i), such as for the result of cproj.
2536 TYPE is the complex tree type of the result. If NEG is true, the
2537 imaginary zero is negative. */
2539 tree
2540 build_complex_inf (tree type, bool neg)
2542 REAL_VALUE_TYPE rzero = dconst0;
2544 rzero.sign = neg;
2545 return build_complex (type, build_real (TREE_TYPE (type), dconstinf),
2546 build_real (TREE_TYPE (type), rzero));
2549 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2550 element is set to 1. In particular, this is 1 + i for complex types. */
2552 tree
2553 build_each_one_cst (tree type)
2555 if (TREE_CODE (type) == COMPLEX_TYPE)
2557 tree scalar = build_one_cst (TREE_TYPE (type));
2558 return build_complex (type, scalar, scalar);
2560 else
2561 return build_one_cst (type);
2564 /* Return a constant of arithmetic type TYPE which is the
2565 multiplicative identity of the set TYPE. */
2567 tree
2568 build_one_cst (tree type)
2570 switch (TREE_CODE (type))
2572 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2573 case POINTER_TYPE: case REFERENCE_TYPE:
2574 case OFFSET_TYPE:
2575 return build_int_cst (type, 1);
2577 case REAL_TYPE:
2578 return build_real (type, dconst1);
2580 case FIXED_POINT_TYPE:
2581 /* We can only generate 1 for accum types. */
2582 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2583 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2585 case VECTOR_TYPE:
2587 tree scalar = build_one_cst (TREE_TYPE (type));
2589 return build_vector_from_val (type, scalar);
2592 case COMPLEX_TYPE:
2593 return build_complex (type,
2594 build_one_cst (TREE_TYPE (type)),
2595 build_zero_cst (TREE_TYPE (type)));
2597 default:
2598 gcc_unreachable ();
2602 /* Return an integer of type TYPE containing all 1's in as much precision as
2603 it contains, or a complex or vector whose subparts are such integers. */
2605 tree
2606 build_all_ones_cst (tree type)
2608 if (TREE_CODE (type) == COMPLEX_TYPE)
2610 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2611 return build_complex (type, scalar, scalar);
2613 else
2614 return build_minus_one_cst (type);
2617 /* Return a constant of arithmetic type TYPE which is the
2618 opposite of the multiplicative identity of the set TYPE. */
2620 tree
2621 build_minus_one_cst (tree type)
2623 switch (TREE_CODE (type))
2625 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2626 case POINTER_TYPE: case REFERENCE_TYPE:
2627 case OFFSET_TYPE:
2628 return build_int_cst (type, -1);
2630 case REAL_TYPE:
2631 return build_real (type, dconstm1);
2633 case FIXED_POINT_TYPE:
2634 /* We can only generate 1 for accum types. */
2635 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2636 return build_fixed (type,
2637 fixed_from_double_int (double_int_minus_one,
2638 SCALAR_TYPE_MODE (type)));
2640 case VECTOR_TYPE:
2642 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2644 return build_vector_from_val (type, scalar);
2647 case COMPLEX_TYPE:
2648 return build_complex (type,
2649 build_minus_one_cst (TREE_TYPE (type)),
2650 build_zero_cst (TREE_TYPE (type)));
2652 default:
2653 gcc_unreachable ();
2657 /* Build 0 constant of type TYPE. This is used by constructor folding
2658 and thus the constant should be represented in memory by
2659 zero(es). */
2661 tree
2662 build_zero_cst (tree type)
2664 switch (TREE_CODE (type))
2666 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2667 case POINTER_TYPE: case REFERENCE_TYPE:
2668 case OFFSET_TYPE: case NULLPTR_TYPE:
2669 return build_int_cst (type, 0);
2671 case REAL_TYPE:
2672 return build_real (type, dconst0);
2674 case FIXED_POINT_TYPE:
2675 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2677 case VECTOR_TYPE:
2679 tree scalar = build_zero_cst (TREE_TYPE (type));
2681 return build_vector_from_val (type, scalar);
2684 case COMPLEX_TYPE:
2686 tree zero = build_zero_cst (TREE_TYPE (type));
2688 return build_complex (type, zero, zero);
2691 default:
2692 if (!AGGREGATE_TYPE_P (type))
2693 return fold_convert (type, integer_zero_node);
2694 return build_constructor (type, NULL);
2698 /* If floating-point type TYPE has an IEEE-style sign bit, return an
2699 unsigned constant in which only the sign bit is set. Return null
2700 otherwise. */
2702 tree
2703 sign_mask_for (tree type)
2705 /* Avoid having to choose between a real-only sign and a pair of signs.
2706 This could be relaxed if the choice becomes obvious later. */
2707 if (TREE_CODE (type) == COMPLEX_TYPE)
2708 return NULL_TREE;
2710 auto eltmode = as_a<scalar_float_mode> (element_mode (type));
2711 auto bits = REAL_MODE_FORMAT (eltmode)->ieee_bits;
2712 if (!bits || !pow2p_hwi (bits))
2713 return NULL_TREE;
2715 tree inttype = unsigned_type_for (type);
2716 if (!inttype)
2717 return NULL_TREE;
2719 auto mask = wi::set_bit_in_zero (bits - 1, bits);
2720 if (TREE_CODE (inttype) == VECTOR_TYPE)
2722 tree elt = wide_int_to_tree (TREE_TYPE (inttype), mask);
2723 return build_vector_from_val (inttype, elt);
2725 return wide_int_to_tree (inttype, mask);
2728 /* Build a BINFO with LEN language slots. */
2730 tree
2731 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL)
2733 tree t;
2734 size_t length = (offsetof (struct tree_binfo, base_binfos)
2735 + vec<tree, va_gc>::embedded_size (base_binfos));
2737 record_node_allocation_statistics (TREE_BINFO, length);
2739 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2741 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2743 TREE_SET_CODE (t, TREE_BINFO);
2745 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2747 return t;
2750 /* Create a CASE_LABEL_EXPR tree node and return it. */
2752 tree
2753 build_case_label (tree low_value, tree high_value, tree label_decl)
2755 tree t = make_node (CASE_LABEL_EXPR);
2757 TREE_TYPE (t) = void_type_node;
2758 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2760 CASE_LOW (t) = low_value;
2761 CASE_HIGH (t) = high_value;
2762 CASE_LABEL (t) = label_decl;
2763 CASE_CHAIN (t) = NULL_TREE;
2765 return t;
2768 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2769 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2770 The latter determines the length of the HOST_WIDE_INT vector. */
2772 tree
2773 make_int_cst (int len, int ext_len MEM_STAT_DECL)
2775 tree t;
2776 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2777 + sizeof (struct tree_int_cst));
2779 gcc_assert (len);
2780 record_node_allocation_statistics (INTEGER_CST, length);
2782 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2784 TREE_SET_CODE (t, INTEGER_CST);
2785 TREE_INT_CST_NUNITS (t) = len;
2786 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2787 /* to_offset can only be applied to trees that are offset_int-sized
2788 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2789 must be exactly the precision of offset_int and so LEN is correct. */
2790 if (ext_len <= OFFSET_INT_ELTS)
2791 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2792 else
2793 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2795 TREE_CONSTANT (t) = 1;
2797 return t;
2800 /* Build a newly constructed TREE_VEC node of length LEN. */
2802 tree
2803 make_tree_vec (int len MEM_STAT_DECL)
2805 tree t;
2806 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2808 record_node_allocation_statistics (TREE_VEC, length);
2810 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2812 TREE_SET_CODE (t, TREE_VEC);
2813 TREE_VEC_LENGTH (t) = len;
2815 return t;
2818 /* Grow a TREE_VEC node to new length LEN. */
2820 tree
2821 grow_tree_vec (tree v, int len MEM_STAT_DECL)
2823 gcc_assert (TREE_CODE (v) == TREE_VEC);
2825 int oldlen = TREE_VEC_LENGTH (v);
2826 gcc_assert (len > oldlen);
2828 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2829 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2831 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2833 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2835 TREE_VEC_LENGTH (v) = len;
2837 return v;
2840 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2841 fixed, and scalar, complex or vector. */
2843 bool
2844 zerop (const_tree expr)
2846 return (integer_zerop (expr)
2847 || real_zerop (expr)
2848 || fixed_zerop (expr));
2851 /* Return 1 if EXPR is the integer constant zero or a complex constant
2852 of zero, or a location wrapper for such a constant. */
2854 bool
2855 integer_zerop (const_tree expr)
2857 STRIP_ANY_LOCATION_WRAPPER (expr);
2859 switch (TREE_CODE (expr))
2861 case INTEGER_CST:
2862 return wi::to_wide (expr) == 0;
2863 case COMPLEX_CST:
2864 return (integer_zerop (TREE_REALPART (expr))
2865 && integer_zerop (TREE_IMAGPART (expr)));
2866 case VECTOR_CST:
2867 return (VECTOR_CST_NPATTERNS (expr) == 1
2868 && VECTOR_CST_DUPLICATE_P (expr)
2869 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr, 0)));
2870 default:
2871 return false;
2875 /* Return 1 if EXPR is the integer constant one or the corresponding
2876 complex constant, or a location wrapper for such a constant. */
2878 bool
2879 integer_onep (const_tree expr)
2881 STRIP_ANY_LOCATION_WRAPPER (expr);
2883 switch (TREE_CODE (expr))
2885 case INTEGER_CST:
2886 return wi::eq_p (wi::to_widest (expr), 1);
2887 case COMPLEX_CST:
2888 return (integer_onep (TREE_REALPART (expr))
2889 && integer_zerop (TREE_IMAGPART (expr)));
2890 case VECTOR_CST:
2891 return (VECTOR_CST_NPATTERNS (expr) == 1
2892 && VECTOR_CST_DUPLICATE_P (expr)
2893 && integer_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
2894 default:
2895 return false;
2899 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2900 return 1 if every piece is the integer constant one.
2901 Also return 1 for location wrappers for such a constant. */
2903 bool
2904 integer_each_onep (const_tree expr)
2906 STRIP_ANY_LOCATION_WRAPPER (expr);
2908 if (TREE_CODE (expr) == COMPLEX_CST)
2909 return (integer_onep (TREE_REALPART (expr))
2910 && integer_onep (TREE_IMAGPART (expr)));
2911 else
2912 return integer_onep (expr);
2915 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2916 it contains, or a complex or vector whose subparts are such integers,
2917 or a location wrapper for such a constant. */
2919 bool
2920 integer_all_onesp (const_tree expr)
2922 STRIP_ANY_LOCATION_WRAPPER (expr);
2924 if (TREE_CODE (expr) == COMPLEX_CST
2925 && integer_all_onesp (TREE_REALPART (expr))
2926 && integer_all_onesp (TREE_IMAGPART (expr)))
2927 return true;
2929 else if (TREE_CODE (expr) == VECTOR_CST)
2930 return (VECTOR_CST_NPATTERNS (expr) == 1
2931 && VECTOR_CST_DUPLICATE_P (expr)
2932 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr, 0)));
2934 else if (TREE_CODE (expr) != INTEGER_CST)
2935 return false;
2937 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED)
2938 == wi::to_wide (expr));
2941 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
2942 for such a constant. */
2944 bool
2945 integer_minus_onep (const_tree expr)
2947 STRIP_ANY_LOCATION_WRAPPER (expr);
2949 if (TREE_CODE (expr) == COMPLEX_CST)
2950 return (integer_all_onesp (TREE_REALPART (expr))
2951 && integer_zerop (TREE_IMAGPART (expr)));
2952 else
2953 return integer_all_onesp (expr);
2956 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2957 one bit on), or a location wrapper for such a constant. */
2959 bool
2960 integer_pow2p (const_tree expr)
2962 STRIP_ANY_LOCATION_WRAPPER (expr);
2964 if (TREE_CODE (expr) == COMPLEX_CST
2965 && integer_pow2p (TREE_REALPART (expr))
2966 && integer_zerop (TREE_IMAGPART (expr)))
2967 return true;
2969 if (TREE_CODE (expr) != INTEGER_CST)
2970 return false;
2972 return wi::popcount (wi::to_wide (expr)) == 1;
2975 /* Return 1 if EXPR is an integer constant other than zero or a
2976 complex constant other than zero, or a location wrapper for such a
2977 constant. */
2979 bool
2980 integer_nonzerop (const_tree expr)
2982 STRIP_ANY_LOCATION_WRAPPER (expr);
2984 return ((TREE_CODE (expr) == INTEGER_CST
2985 && wi::to_wide (expr) != 0)
2986 || (TREE_CODE (expr) == COMPLEX_CST
2987 && (integer_nonzerop (TREE_REALPART (expr))
2988 || integer_nonzerop (TREE_IMAGPART (expr)))));
2991 /* Return 1 if EXPR is the integer constant one. For vector,
2992 return 1 if every piece is the integer constant minus one
2993 (representing the value TRUE).
2994 Also return 1 for location wrappers for such a constant. */
2996 bool
2997 integer_truep (const_tree expr)
2999 STRIP_ANY_LOCATION_WRAPPER (expr);
3001 if (TREE_CODE (expr) == VECTOR_CST)
3002 return integer_all_onesp (expr);
3003 return integer_onep (expr);
3006 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
3007 for such a constant. */
3009 bool
3010 fixed_zerop (const_tree expr)
3012 STRIP_ANY_LOCATION_WRAPPER (expr);
3014 return (TREE_CODE (expr) == FIXED_CST
3015 && TREE_FIXED_CST (expr).data.is_zero ());
3018 /* Return the power of two represented by a tree node known to be a
3019 power of two. */
3022 tree_log2 (const_tree expr)
3024 if (TREE_CODE (expr) == COMPLEX_CST)
3025 return tree_log2 (TREE_REALPART (expr));
3027 return wi::exact_log2 (wi::to_wide (expr));
3030 /* Similar, but return the largest integer Y such that 2 ** Y is less
3031 than or equal to EXPR. */
3034 tree_floor_log2 (const_tree expr)
3036 if (TREE_CODE (expr) == COMPLEX_CST)
3037 return tree_log2 (TREE_REALPART (expr));
3039 return wi::floor_log2 (wi::to_wide (expr));
3042 /* Return number of known trailing zero bits in EXPR, or, if the value of
3043 EXPR is known to be zero, the precision of it's type. */
3045 unsigned int
3046 tree_ctz (const_tree expr)
3048 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3049 && !POINTER_TYPE_P (TREE_TYPE (expr)))
3050 return 0;
3052 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
3053 switch (TREE_CODE (expr))
3055 case INTEGER_CST:
3056 ret1 = wi::ctz (wi::to_wide (expr));
3057 return MIN (ret1, prec);
3058 case SSA_NAME:
3059 ret1 = wi::ctz (get_nonzero_bits (expr));
3060 return MIN (ret1, prec);
3061 case PLUS_EXPR:
3062 case MINUS_EXPR:
3063 case BIT_IOR_EXPR:
3064 case BIT_XOR_EXPR:
3065 case MIN_EXPR:
3066 case MAX_EXPR:
3067 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3068 if (ret1 == 0)
3069 return ret1;
3070 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3071 return MIN (ret1, ret2);
3072 case POINTER_PLUS_EXPR:
3073 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3074 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3075 /* Second operand is sizetype, which could be in theory
3076 wider than pointer's precision. Make sure we never
3077 return more than prec. */
3078 ret2 = MIN (ret2, prec);
3079 return MIN (ret1, ret2);
3080 case BIT_AND_EXPR:
3081 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3082 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3083 return MAX (ret1, ret2);
3084 case MULT_EXPR:
3085 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3086 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
3087 return MIN (ret1 + ret2, prec);
3088 case LSHIFT_EXPR:
3089 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3090 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3091 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3093 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3094 return MIN (ret1 + ret2, prec);
3096 return ret1;
3097 case RSHIFT_EXPR:
3098 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
3099 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
3101 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3102 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
3103 if (ret1 > ret2)
3104 return ret1 - ret2;
3106 return 0;
3107 case TRUNC_DIV_EXPR:
3108 case CEIL_DIV_EXPR:
3109 case FLOOR_DIV_EXPR:
3110 case ROUND_DIV_EXPR:
3111 case EXACT_DIV_EXPR:
3112 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3113 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
3115 int l = tree_log2 (TREE_OPERAND (expr, 1));
3116 if (l >= 0)
3118 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3119 ret2 = l;
3120 if (ret1 > ret2)
3121 return ret1 - ret2;
3124 return 0;
3125 CASE_CONVERT:
3126 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
3127 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
3128 ret1 = prec;
3129 return MIN (ret1, prec);
3130 case SAVE_EXPR:
3131 return tree_ctz (TREE_OPERAND (expr, 0));
3132 case COND_EXPR:
3133 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
3134 if (ret1 == 0)
3135 return 0;
3136 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
3137 return MIN (ret1, ret2);
3138 case COMPOUND_EXPR:
3139 return tree_ctz (TREE_OPERAND (expr, 1));
3140 case ADDR_EXPR:
3141 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
3142 if (ret1 > BITS_PER_UNIT)
3144 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
3145 return MIN (ret1, prec);
3147 return 0;
3148 default:
3149 return 0;
3153 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
3154 decimal float constants, so don't return 1 for them.
3155 Also return 1 for location wrappers around such a constant. */
3157 bool
3158 real_zerop (const_tree expr)
3160 STRIP_ANY_LOCATION_WRAPPER (expr);
3162 switch (TREE_CODE (expr))
3164 case REAL_CST:
3165 return real_equal (&TREE_REAL_CST (expr), &dconst0)
3166 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3167 case COMPLEX_CST:
3168 return real_zerop (TREE_REALPART (expr))
3169 && real_zerop (TREE_IMAGPART (expr));
3170 case VECTOR_CST:
3172 /* Don't simply check for a duplicate because the predicate
3173 accepts both +0.0 and -0.0. */
3174 unsigned count = vector_cst_encoded_nelts (expr);
3175 for (unsigned int i = 0; i < count; ++i)
3176 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
3177 return false;
3178 return true;
3180 default:
3181 return false;
3185 /* Return 1 if EXPR is the real constant one in real or complex form.
3186 Trailing zeroes matter for decimal float constants, so don't return
3187 1 for them.
3188 Also return 1 for location wrappers around such a constant. */
3190 bool
3191 real_onep (const_tree expr)
3193 STRIP_ANY_LOCATION_WRAPPER (expr);
3195 switch (TREE_CODE (expr))
3197 case REAL_CST:
3198 return real_equal (&TREE_REAL_CST (expr), &dconst1)
3199 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3200 case COMPLEX_CST:
3201 return real_onep (TREE_REALPART (expr))
3202 && real_zerop (TREE_IMAGPART (expr));
3203 case VECTOR_CST:
3204 return (VECTOR_CST_NPATTERNS (expr) == 1
3205 && VECTOR_CST_DUPLICATE_P (expr)
3206 && real_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3207 default:
3208 return false;
3212 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
3213 matter for decimal float constants, so don't return 1 for them.
3214 Also return 1 for location wrappers around such a constant. */
3216 bool
3217 real_minus_onep (const_tree expr)
3219 STRIP_ANY_LOCATION_WRAPPER (expr);
3221 switch (TREE_CODE (expr))
3223 case REAL_CST:
3224 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
3225 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
3226 case COMPLEX_CST:
3227 return real_minus_onep (TREE_REALPART (expr))
3228 && real_zerop (TREE_IMAGPART (expr));
3229 case VECTOR_CST:
3230 return (VECTOR_CST_NPATTERNS (expr) == 1
3231 && VECTOR_CST_DUPLICATE_P (expr)
3232 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr, 0)));
3233 default:
3234 return false;
3238 /* Return true if T could be a floating point zero. */
3240 bool
3241 real_maybe_zerop (const_tree expr)
3243 switch (TREE_CODE (expr))
3245 case REAL_CST:
3246 /* Can't use real_zerop here, as it always returns false for decimal
3247 floats. And can't use TREE_REAL_CST (expr).cl == rvc_zero
3248 either, as decimal zeros are rvc_normal. */
3249 return real_equal (&TREE_REAL_CST (expr), &dconst0);
3250 case COMPLEX_CST:
3251 return (real_maybe_zerop (TREE_REALPART (expr))
3252 || real_maybe_zerop (TREE_IMAGPART (expr)));
3253 case VECTOR_CST:
3255 unsigned count = vector_cst_encoded_nelts (expr);
3256 for (unsigned int i = 0; i < count; ++i)
3257 if (real_maybe_zerop (VECTOR_CST_ENCODED_ELT (expr, i)))
3258 return true;
3259 return false;
3261 default:
3262 /* Perhaps for SSA_NAMEs we could query frange. */
3263 return true;
3267 /* Nonzero if EXP is a constant or a cast of a constant. */
3269 bool
3270 really_constant_p (const_tree exp)
3272 /* This is not quite the same as STRIP_NOPS. It does more. */
3273 while (CONVERT_EXPR_P (exp)
3274 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3275 exp = TREE_OPERAND (exp, 0);
3276 return TREE_CONSTANT (exp);
3279 /* Return true if T holds a polynomial pointer difference, storing it in
3280 *VALUE if so. A true return means that T's precision is no greater
3281 than 64 bits, which is the largest address space we support, so *VALUE
3282 never loses precision. However, the signedness of the result does
3283 not necessarily match the signedness of T: sometimes an unsigned type
3284 like sizetype is used to encode a value that is actually negative. */
3286 bool
3287 ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
3289 if (!t)
3290 return false;
3291 if (TREE_CODE (t) == INTEGER_CST)
3293 if (!cst_and_fits_in_hwi (t))
3294 return false;
3295 *value = int_cst_value (t);
3296 return true;
3298 if (POLY_INT_CST_P (t))
3300 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3301 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
3302 return false;
3303 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3304 value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
3305 return true;
3307 return false;
3310 poly_int64
3311 tree_to_poly_int64 (const_tree t)
3313 gcc_assert (tree_fits_poly_int64_p (t));
3314 if (POLY_INT_CST_P (t))
3315 return poly_int_cst_value (t).force_shwi ();
3316 return TREE_INT_CST_LOW (t);
3319 poly_uint64
3320 tree_to_poly_uint64 (const_tree t)
3322 gcc_assert (tree_fits_poly_uint64_p (t));
3323 if (POLY_INT_CST_P (t))
3324 return poly_int_cst_value (t).force_uhwi ();
3325 return TREE_INT_CST_LOW (t);
3328 /* Return first list element whose TREE_VALUE is ELEM.
3329 Return 0 if ELEM is not in LIST. */
3331 tree
3332 value_member (tree elem, tree list)
3334 while (list)
3336 if (elem == TREE_VALUE (list))
3337 return list;
3338 list = TREE_CHAIN (list);
3340 return NULL_TREE;
3343 /* Return first list element whose TREE_PURPOSE is ELEM.
3344 Return 0 if ELEM is not in LIST. */
3346 tree
3347 purpose_member (const_tree elem, tree list)
3349 while (list)
3351 if (elem == TREE_PURPOSE (list))
3352 return list;
3353 list = TREE_CHAIN (list);
3355 return NULL_TREE;
3358 /* Return true if ELEM is in V. */
3360 bool
3361 vec_member (const_tree elem, vec<tree, va_gc> *v)
3363 unsigned ix;
3364 tree t;
3365 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
3366 if (elem == t)
3367 return true;
3368 return false;
3371 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3372 NULL_TREE. */
3374 tree
3375 chain_index (int idx, tree chain)
3377 for (; chain && idx > 0; --idx)
3378 chain = TREE_CHAIN (chain);
3379 return chain;
3382 /* Return nonzero if ELEM is part of the chain CHAIN. */
3384 bool
3385 chain_member (const_tree elem, const_tree chain)
3387 while (chain)
3389 if (elem == chain)
3390 return true;
3391 chain = DECL_CHAIN (chain);
3394 return false;
3397 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3398 We expect a null pointer to mark the end of the chain.
3399 This is the Lisp primitive `length'. */
3402 list_length (const_tree t)
3404 const_tree p = t;
3405 #ifdef ENABLE_TREE_CHECKING
3406 const_tree q = t;
3407 #endif
3408 int len = 0;
3410 while (p)
3412 p = TREE_CHAIN (p);
3413 #ifdef ENABLE_TREE_CHECKING
3414 if (len % 2)
3415 q = TREE_CHAIN (q);
3416 gcc_assert (p != q);
3417 #endif
3418 len++;
3421 return len;
3424 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3425 UNION_TYPE TYPE, or NULL_TREE if none. */
3427 tree
3428 first_field (const_tree type)
3430 tree t = TYPE_FIELDS (type);
3431 while (t && TREE_CODE (t) != FIELD_DECL)
3432 t = TREE_CHAIN (t);
3433 return t;
3436 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3437 UNION_TYPE TYPE, or NULL_TREE if none. */
3439 tree
3440 last_field (const_tree type)
3442 tree last = NULL_TREE;
3444 for (tree fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
3446 if (TREE_CODE (fld) != FIELD_DECL)
3447 continue;
3449 last = fld;
3452 return last;
3455 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3456 by modifying the last node in chain 1 to point to chain 2.
3457 This is the Lisp primitive `nconc'. */
3459 tree
3460 chainon (tree op1, tree op2)
3462 tree t1;
3464 if (!op1)
3465 return op2;
3466 if (!op2)
3467 return op1;
3469 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
3470 continue;
3471 TREE_CHAIN (t1) = op2;
3473 #ifdef ENABLE_TREE_CHECKING
3475 tree t2;
3476 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
3477 gcc_assert (t2 != t1);
3479 #endif
3481 return op1;
3484 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3486 tree
3487 tree_last (tree chain)
3489 tree next;
3490 if (chain)
3491 while ((next = TREE_CHAIN (chain)))
3492 chain = next;
3493 return chain;
3496 /* Reverse the order of elements in the chain T,
3497 and return the new head of the chain (old last element). */
3499 tree
3500 nreverse (tree t)
3502 tree prev = 0, decl, next;
3503 for (decl = t; decl; decl = next)
3505 /* We shouldn't be using this function to reverse BLOCK chains; we
3506 have blocks_nreverse for that. */
3507 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
3508 next = TREE_CHAIN (decl);
3509 TREE_CHAIN (decl) = prev;
3510 prev = decl;
3512 return prev;
3515 /* Return a newly created TREE_LIST node whose
3516 purpose and value fields are PARM and VALUE. */
3518 tree
3519 build_tree_list (tree parm, tree value MEM_STAT_DECL)
3521 tree t = make_node (TREE_LIST PASS_MEM_STAT);
3522 TREE_PURPOSE (t) = parm;
3523 TREE_VALUE (t) = value;
3524 return t;
3527 /* Build a chain of TREE_LIST nodes from a vector. */
3529 tree
3530 build_tree_list_vec (const vec<tree, va_gc> *vec MEM_STAT_DECL)
3532 tree ret = NULL_TREE;
3533 tree *pp = &ret;
3534 unsigned int i;
3535 tree t;
3536 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
3538 *pp = build_tree_list (NULL, t PASS_MEM_STAT);
3539 pp = &TREE_CHAIN (*pp);
3541 return ret;
3544 /* Return a newly created TREE_LIST node whose
3545 purpose and value fields are PURPOSE and VALUE
3546 and whose TREE_CHAIN is CHAIN. */
3548 tree
3549 tree_cons (tree purpose, tree value, tree chain MEM_STAT_DECL)
3551 tree node;
3553 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
3554 memset (node, 0, sizeof (struct tree_common));
3556 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
3558 TREE_SET_CODE (node, TREE_LIST);
3559 TREE_CHAIN (node) = chain;
3560 TREE_PURPOSE (node) = purpose;
3561 TREE_VALUE (node) = value;
3562 return node;
3565 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3566 trees. */
3568 vec<tree, va_gc> *
3569 ctor_to_vec (tree ctor)
3571 vec<tree, va_gc> *vec;
3572 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
3573 unsigned int ix;
3574 tree val;
3576 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
3577 vec->quick_push (val);
3579 return vec;
3582 /* Return the size nominally occupied by an object of type TYPE
3583 when it resides in memory. The value is measured in units of bytes,
3584 and its data type is that normally used for type sizes
3585 (which is the first type created by make_signed_type or
3586 make_unsigned_type). */
3588 tree
3589 size_in_bytes_loc (location_t loc, const_tree type)
3591 tree t;
3593 if (type == error_mark_node)
3594 return integer_zero_node;
3596 type = TYPE_MAIN_VARIANT (type);
3597 t = TYPE_SIZE_UNIT (type);
3599 if (t == 0)
3601 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
3602 return size_zero_node;
3605 return t;
3608 /* Return the size of TYPE (in bytes) as a wide integer
3609 or return -1 if the size can vary or is larger than an integer. */
3611 HOST_WIDE_INT
3612 int_size_in_bytes (const_tree type)
3614 tree t;
3616 if (type == error_mark_node)
3617 return 0;
3619 type = TYPE_MAIN_VARIANT (type);
3620 t = TYPE_SIZE_UNIT (type);
3622 if (t && tree_fits_uhwi_p (t))
3623 return TREE_INT_CST_LOW (t);
3624 else
3625 return -1;
3628 /* Return the maximum size of TYPE (in bytes) as a wide integer
3629 or return -1 if the size can vary or is larger than an integer. */
3631 HOST_WIDE_INT
3632 max_int_size_in_bytes (const_tree type)
3634 HOST_WIDE_INT size = -1;
3635 tree size_tree;
3637 /* If this is an array type, check for a possible MAX_SIZE attached. */
3639 if (TREE_CODE (type) == ARRAY_TYPE)
3641 size_tree = TYPE_ARRAY_MAX_SIZE (type);
3643 if (size_tree && tree_fits_uhwi_p (size_tree))
3644 size = tree_to_uhwi (size_tree);
3647 /* If we still haven't been able to get a size, see if the language
3648 can compute a maximum size. */
3650 if (size == -1)
3652 size_tree = lang_hooks.types.max_size (type);
3654 if (size_tree && tree_fits_uhwi_p (size_tree))
3655 size = tree_to_uhwi (size_tree);
3658 return size;
3661 /* Return the bit position of FIELD, in bits from the start of the record.
3662 This is a tree of type bitsizetype. */
3664 tree
3665 bit_position (const_tree field)
3667 return bit_from_pos (DECL_FIELD_OFFSET (field),
3668 DECL_FIELD_BIT_OFFSET (field));
3671 /* Return the byte position of FIELD, in bytes from the start of the record.
3672 This is a tree of type sizetype. */
3674 tree
3675 byte_position (const_tree field)
3677 return byte_from_pos (DECL_FIELD_OFFSET (field),
3678 DECL_FIELD_BIT_OFFSET (field));
3681 /* Likewise, but return as an integer. It must be representable in
3682 that way (since it could be a signed value, we don't have the
3683 option of returning -1 like int_size_in_byte can. */
3685 HOST_WIDE_INT
3686 int_byte_position (const_tree field)
3688 return tree_to_shwi (byte_position (field));
3691 /* Return, as a tree node, the number of elements for TYPE (which is an
3692 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3694 tree
3695 array_type_nelts (const_tree type)
3697 tree index_type, min, max;
3699 /* If they did it with unspecified bounds, then we should have already
3700 given an error about it before we got here. */
3701 if (! TYPE_DOMAIN (type))
3702 return error_mark_node;
3704 index_type = TYPE_DOMAIN (type);
3705 min = TYPE_MIN_VALUE (index_type);
3706 max = TYPE_MAX_VALUE (index_type);
3708 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3709 if (!max)
3711 /* zero sized arrays are represented from C FE as complete types with
3712 NULL TYPE_MAX_VALUE and zero TYPE_SIZE, while C++ FE represents
3713 them as min 0, max -1. */
3714 if (COMPLETE_TYPE_P (type)
3715 && integer_zerop (TYPE_SIZE (type))
3716 && integer_zerop (min))
3717 return build_int_cst (TREE_TYPE (min), -1);
3719 return error_mark_node;
3722 return (integer_zerop (min)
3723 ? max
3724 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3727 /* If arg is static -- a reference to an object in static storage -- then
3728 return the object. This is not the same as the C meaning of `static'.
3729 If arg isn't static, return NULL. */
3731 tree
3732 staticp (tree arg)
3734 switch (TREE_CODE (arg))
3736 case FUNCTION_DECL:
3737 /* Nested functions are static, even though taking their address will
3738 involve a trampoline as we unnest the nested function and create
3739 the trampoline on the tree level. */
3740 return arg;
3742 case VAR_DECL:
3743 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3744 && ! DECL_THREAD_LOCAL_P (arg)
3745 && ! DECL_DLLIMPORT_P (arg)
3746 ? arg : NULL);
3748 case CONST_DECL:
3749 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3750 ? arg : NULL);
3752 case CONSTRUCTOR:
3753 return TREE_STATIC (arg) ? arg : NULL;
3755 case LABEL_DECL:
3756 case STRING_CST:
3757 return arg;
3759 case COMPONENT_REF:
3760 /* If the thing being referenced is not a field, then it is
3761 something language specific. */
3762 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3764 /* If we are referencing a bitfield, we can't evaluate an
3765 ADDR_EXPR at compile time and so it isn't a constant. */
3766 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3767 return NULL;
3769 return staticp (TREE_OPERAND (arg, 0));
3771 case BIT_FIELD_REF:
3772 return NULL;
3774 case INDIRECT_REF:
3775 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3777 case ARRAY_REF:
3778 case ARRAY_RANGE_REF:
3779 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3780 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3781 return staticp (TREE_OPERAND (arg, 0));
3782 else
3783 return NULL;
3785 case COMPOUND_LITERAL_EXPR:
3786 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3788 default:
3789 return NULL;
3796 /* Return whether OP is a DECL whose address is function-invariant. */
3798 bool
3799 decl_address_invariant_p (const_tree op)
3801 /* The conditions below are slightly less strict than the one in
3802 staticp. */
3804 switch (TREE_CODE (op))
3806 case PARM_DECL:
3807 case RESULT_DECL:
3808 case LABEL_DECL:
3809 case FUNCTION_DECL:
3810 return true;
3812 case VAR_DECL:
3813 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3814 || DECL_THREAD_LOCAL_P (op)
3815 || DECL_CONTEXT (op) == current_function_decl
3816 || decl_function_context (op) == current_function_decl)
3817 return true;
3818 break;
3820 case CONST_DECL:
3821 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3822 || decl_function_context (op) == current_function_decl)
3823 return true;
3824 break;
3826 default:
3827 break;
3830 return false;
3833 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3835 bool
3836 decl_address_ip_invariant_p (const_tree op)
3838 /* The conditions below are slightly less strict than the one in
3839 staticp. */
3841 switch (TREE_CODE (op))
3843 case LABEL_DECL:
3844 case FUNCTION_DECL:
3845 case STRING_CST:
3846 return true;
3848 case VAR_DECL:
3849 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3850 && !DECL_DLLIMPORT_P (op))
3851 || DECL_THREAD_LOCAL_P (op))
3852 return true;
3853 break;
3855 case CONST_DECL:
3856 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3857 return true;
3858 break;
3860 default:
3861 break;
3864 return false;
3868 /* Return true if T is function-invariant (internal function, does
3869 not handle arithmetic; that's handled in skip_simple_arithmetic and
3870 tree_invariant_p). */
3872 static bool
3873 tree_invariant_p_1 (tree t)
3875 tree op;
3877 if (TREE_CONSTANT (t)
3878 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3879 return true;
3881 switch (TREE_CODE (t))
3883 case SAVE_EXPR:
3884 return true;
3886 case ADDR_EXPR:
3887 op = TREE_OPERAND (t, 0);
3888 while (handled_component_p (op))
3890 switch (TREE_CODE (op))
3892 case ARRAY_REF:
3893 case ARRAY_RANGE_REF:
3894 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3895 || TREE_OPERAND (op, 2) != NULL_TREE
3896 || TREE_OPERAND (op, 3) != NULL_TREE)
3897 return false;
3898 break;
3900 case COMPONENT_REF:
3901 if (TREE_OPERAND (op, 2) != NULL_TREE)
3902 return false;
3903 break;
3905 default:;
3907 op = TREE_OPERAND (op, 0);
3910 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3912 default:
3913 break;
3916 return false;
3919 /* Return true if T is function-invariant. */
3921 bool
3922 tree_invariant_p (tree t)
3924 tree inner = skip_simple_arithmetic (t);
3925 return tree_invariant_p_1 (inner);
3928 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3929 Do this to any expression which may be used in more than one place,
3930 but must be evaluated only once.
3932 Normally, expand_expr would reevaluate the expression each time.
3933 Calling save_expr produces something that is evaluated and recorded
3934 the first time expand_expr is called on it. Subsequent calls to
3935 expand_expr just reuse the recorded value.
3937 The call to expand_expr that generates code that actually computes
3938 the value is the first call *at compile time*. Subsequent calls
3939 *at compile time* generate code to use the saved value.
3940 This produces correct result provided that *at run time* control
3941 always flows through the insns made by the first expand_expr
3942 before reaching the other places where the save_expr was evaluated.
3943 You, the caller of save_expr, must make sure this is so.
3945 Constants, and certain read-only nodes, are returned with no
3946 SAVE_EXPR because that is safe. Expressions containing placeholders
3947 are not touched; see tree.def for an explanation of what these
3948 are used for. */
3950 tree
3951 save_expr (tree expr)
3953 tree inner;
3955 /* If the tree evaluates to a constant, then we don't want to hide that
3956 fact (i.e. this allows further folding, and direct checks for constants).
3957 However, a read-only object that has side effects cannot be bypassed.
3958 Since it is no problem to reevaluate literals, we just return the
3959 literal node. */
3960 inner = skip_simple_arithmetic (expr);
3961 if (TREE_CODE (inner) == ERROR_MARK)
3962 return inner;
3964 if (tree_invariant_p_1 (inner))
3965 return expr;
3967 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3968 it means that the size or offset of some field of an object depends on
3969 the value within another field.
3971 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3972 and some variable since it would then need to be both evaluated once and
3973 evaluated more than once. Front-ends must assure this case cannot
3974 happen by surrounding any such subexpressions in their own SAVE_EXPR
3975 and forcing evaluation at the proper time. */
3976 if (contains_placeholder_p (inner))
3977 return expr;
3979 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3981 /* This expression might be placed ahead of a jump to ensure that the
3982 value was computed on both sides of the jump. So make sure it isn't
3983 eliminated as dead. */
3984 TREE_SIDE_EFFECTS (expr) = 1;
3985 return expr;
3988 /* Look inside EXPR into any simple arithmetic operations. Return the
3989 outermost non-arithmetic or non-invariant node. */
3991 tree
3992 skip_simple_arithmetic (tree expr)
3994 /* We don't care about whether this can be used as an lvalue in this
3995 context. */
3996 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3997 expr = TREE_OPERAND (expr, 0);
3999 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
4000 a constant, it will be more efficient to not make another SAVE_EXPR since
4001 it will allow better simplification and GCSE will be able to merge the
4002 computations if they actually occur. */
4003 while (true)
4005 if (UNARY_CLASS_P (expr))
4006 expr = TREE_OPERAND (expr, 0);
4007 else if (BINARY_CLASS_P (expr))
4009 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
4010 expr = TREE_OPERAND (expr, 0);
4011 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
4012 expr = TREE_OPERAND (expr, 1);
4013 else
4014 break;
4016 else
4017 break;
4020 return expr;
4023 /* Look inside EXPR into simple arithmetic operations involving constants.
4024 Return the outermost non-arithmetic or non-constant node. */
4026 tree
4027 skip_simple_constant_arithmetic (tree expr)
4029 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
4030 expr = TREE_OPERAND (expr, 0);
4032 while (true)
4034 if (UNARY_CLASS_P (expr))
4035 expr = TREE_OPERAND (expr, 0);
4036 else if (BINARY_CLASS_P (expr))
4038 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
4039 expr = TREE_OPERAND (expr, 0);
4040 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
4041 expr = TREE_OPERAND (expr, 1);
4042 else
4043 break;
4045 else
4046 break;
4049 return expr;
4052 /* Return which tree structure is used by T. */
4054 enum tree_node_structure_enum
4055 tree_node_structure (const_tree t)
4057 const enum tree_code code = TREE_CODE (t);
4058 return tree_node_structure_for_code (code);
4061 /* Set various status flags when building a CALL_EXPR object T. */
4063 static void
4064 process_call_operands (tree t)
4066 bool side_effects = TREE_SIDE_EFFECTS (t);
4067 bool read_only = false;
4068 int i = call_expr_flags (t);
4070 /* Calls have side-effects, except those to const or pure functions. */
4071 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
4072 side_effects = true;
4073 /* Propagate TREE_READONLY of arguments for const functions. */
4074 if (i & ECF_CONST)
4075 read_only = true;
4077 if (!side_effects || read_only)
4078 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
4080 tree op = TREE_OPERAND (t, i);
4081 if (op && TREE_SIDE_EFFECTS (op))
4082 side_effects = true;
4083 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
4084 read_only = false;
4087 TREE_SIDE_EFFECTS (t) = side_effects;
4088 TREE_READONLY (t) = read_only;
4091 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
4092 size or offset that depends on a field within a record. */
4094 bool
4095 contains_placeholder_p (const_tree exp)
4097 enum tree_code code;
4099 if (!exp)
4100 return 0;
4102 code = TREE_CODE (exp);
4103 if (code == PLACEHOLDER_EXPR)
4104 return 1;
4106 switch (TREE_CODE_CLASS (code))
4108 case tcc_reference:
4109 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
4110 position computations since they will be converted into a
4111 WITH_RECORD_EXPR involving the reference, which will assume
4112 here will be valid. */
4113 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4115 case tcc_exceptional:
4116 if (code == TREE_LIST)
4117 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
4118 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
4119 break;
4121 case tcc_unary:
4122 case tcc_binary:
4123 case tcc_comparison:
4124 case tcc_expression:
4125 switch (code)
4127 case COMPOUND_EXPR:
4128 /* Ignoring the first operand isn't quite right, but works best. */
4129 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
4131 case COND_EXPR:
4132 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4133 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
4134 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
4136 case SAVE_EXPR:
4137 /* The save_expr function never wraps anything containing
4138 a PLACEHOLDER_EXPR. */
4139 return 0;
4141 default:
4142 break;
4145 switch (TREE_CODE_LENGTH (code))
4147 case 1:
4148 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
4149 case 2:
4150 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
4151 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
4152 default:
4153 return 0;
4156 case tcc_vl_exp:
4157 switch (code)
4159 case CALL_EXPR:
4161 const_tree arg;
4162 const_call_expr_arg_iterator iter;
4163 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
4164 if (CONTAINS_PLACEHOLDER_P (arg))
4165 return 1;
4166 return 0;
4168 default:
4169 return 0;
4172 default:
4173 return 0;
4175 return 0;
4178 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
4179 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
4180 field positions. */
4182 static bool
4183 type_contains_placeholder_1 (const_tree type)
4185 /* If the size contains a placeholder or the parent type (component type in
4186 the case of arrays) type involves a placeholder, this type does. */
4187 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
4188 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
4189 || (!POINTER_TYPE_P (type)
4190 && TREE_TYPE (type)
4191 && type_contains_placeholder_p (TREE_TYPE (type))))
4192 return true;
4194 /* Now do type-specific checks. Note that the last part of the check above
4195 greatly limits what we have to do below. */
4196 switch (TREE_CODE (type))
4198 case VOID_TYPE:
4199 case OPAQUE_TYPE:
4200 case COMPLEX_TYPE:
4201 case ENUMERAL_TYPE:
4202 case BOOLEAN_TYPE:
4203 case POINTER_TYPE:
4204 case OFFSET_TYPE:
4205 case REFERENCE_TYPE:
4206 case METHOD_TYPE:
4207 case FUNCTION_TYPE:
4208 case VECTOR_TYPE:
4209 case NULLPTR_TYPE:
4210 return false;
4212 case INTEGER_TYPE:
4213 case REAL_TYPE:
4214 case FIXED_POINT_TYPE:
4215 /* Here we just check the bounds. */
4216 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
4217 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
4219 case ARRAY_TYPE:
4220 /* We have already checked the component type above, so just check
4221 the domain type. Flexible array members have a null domain. */
4222 return TYPE_DOMAIN (type) ?
4223 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
4225 case RECORD_TYPE:
4226 case UNION_TYPE:
4227 case QUAL_UNION_TYPE:
4229 tree field;
4231 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4232 if (TREE_CODE (field) == FIELD_DECL
4233 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
4234 || (TREE_CODE (type) == QUAL_UNION_TYPE
4235 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
4236 || type_contains_placeholder_p (TREE_TYPE (field))))
4237 return true;
4239 return false;
4242 default:
4243 gcc_unreachable ();
4247 /* Wrapper around above function used to cache its result. */
4249 bool
4250 type_contains_placeholder_p (tree type)
4252 bool result;
4254 /* If the contains_placeholder_bits field has been initialized,
4255 then we know the answer. */
4256 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
4257 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
4259 /* Indicate that we've seen this type node, and the answer is false.
4260 This is what we want to return if we run into recursion via fields. */
4261 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
4263 /* Compute the real value. */
4264 result = type_contains_placeholder_1 (type);
4266 /* Store the real value. */
4267 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
4269 return result;
4272 /* Push tree EXP onto vector QUEUE if it is not already present. */
4274 static void
4275 push_without_duplicates (tree exp, vec<tree> *queue)
4277 unsigned int i;
4278 tree iter;
4280 FOR_EACH_VEC_ELT (*queue, i, iter)
4281 if (simple_cst_equal (iter, exp) == 1)
4282 break;
4284 if (!iter)
4285 queue->safe_push (exp);
4288 /* Given a tree EXP, find all occurrences of references to fields
4289 in a PLACEHOLDER_EXPR and place them in vector REFS without
4290 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
4291 we assume here that EXP contains only arithmetic expressions
4292 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
4293 argument list. */
4295 void
4296 find_placeholder_in_expr (tree exp, vec<tree> *refs)
4298 enum tree_code code = TREE_CODE (exp);
4299 tree inner;
4300 int i;
4302 /* We handle TREE_LIST and COMPONENT_REF separately. */
4303 if (code == TREE_LIST)
4305 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
4306 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
4308 else if (code == COMPONENT_REF)
4310 for (inner = TREE_OPERAND (exp, 0);
4311 REFERENCE_CLASS_P (inner);
4312 inner = TREE_OPERAND (inner, 0))
4315 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
4316 push_without_duplicates (exp, refs);
4317 else
4318 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
4320 else
4321 switch (TREE_CODE_CLASS (code))
4323 case tcc_constant:
4324 break;
4326 case tcc_declaration:
4327 /* Variables allocated to static storage can stay. */
4328 if (!TREE_STATIC (exp))
4329 push_without_duplicates (exp, refs);
4330 break;
4332 case tcc_expression:
4333 /* This is the pattern built in ada/make_aligning_type. */
4334 if (code == ADDR_EXPR
4335 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
4337 push_without_duplicates (exp, refs);
4338 break;
4341 /* Fall through. */
4343 case tcc_exceptional:
4344 case tcc_unary:
4345 case tcc_binary:
4346 case tcc_comparison:
4347 case tcc_reference:
4348 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
4349 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4350 break;
4352 case tcc_vl_exp:
4353 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4354 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
4355 break;
4357 default:
4358 gcc_unreachable ();
4362 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4363 return a tree with all occurrences of references to F in a
4364 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4365 CONST_DECLs. Note that we assume here that EXP contains only
4366 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4367 occurring only in their argument list. */
4369 tree
4370 substitute_in_expr (tree exp, tree f, tree r)
4372 enum tree_code code = TREE_CODE (exp);
4373 tree op0, op1, op2, op3;
4374 tree new_tree;
4376 /* We handle TREE_LIST and COMPONENT_REF separately. */
4377 if (code == TREE_LIST)
4379 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
4380 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
4381 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4382 return exp;
4384 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4386 else if (code == COMPONENT_REF)
4388 tree inner;
4390 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4391 and it is the right field, replace it with R. */
4392 for (inner = TREE_OPERAND (exp, 0);
4393 REFERENCE_CLASS_P (inner);
4394 inner = TREE_OPERAND (inner, 0))
4397 /* The field. */
4398 op1 = TREE_OPERAND (exp, 1);
4400 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
4401 return r;
4403 /* If this expression hasn't been completed let, leave it alone. */
4404 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
4405 return exp;
4407 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4408 if (op0 == TREE_OPERAND (exp, 0))
4409 return exp;
4411 new_tree
4412 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
4414 else
4415 switch (TREE_CODE_CLASS (code))
4417 case tcc_constant:
4418 return exp;
4420 case tcc_declaration:
4421 if (exp == f)
4422 return r;
4423 else
4424 return exp;
4426 case tcc_expression:
4427 if (exp == f)
4428 return r;
4430 /* Fall through. */
4432 case tcc_exceptional:
4433 case tcc_unary:
4434 case tcc_binary:
4435 case tcc_comparison:
4436 case tcc_reference:
4437 switch (TREE_CODE_LENGTH (code))
4439 case 0:
4440 return exp;
4442 case 1:
4443 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4444 if (op0 == TREE_OPERAND (exp, 0))
4445 return exp;
4447 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4448 break;
4450 case 2:
4451 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4452 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4454 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4455 return exp;
4457 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4458 break;
4460 case 3:
4461 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4462 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4463 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4465 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4466 && op2 == TREE_OPERAND (exp, 2))
4467 return exp;
4469 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4470 break;
4472 case 4:
4473 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
4474 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
4475 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
4476 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
4478 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4479 && op2 == TREE_OPERAND (exp, 2)
4480 && op3 == TREE_OPERAND (exp, 3))
4481 return exp;
4483 new_tree
4484 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4485 break;
4487 default:
4488 gcc_unreachable ();
4490 break;
4492 case tcc_vl_exp:
4494 int i;
4496 new_tree = NULL_TREE;
4498 /* If we are trying to replace F with a constant or with another
4499 instance of one of the arguments of the call, inline back
4500 functions which do nothing else than computing a value from
4501 the arguments they are passed. This makes it possible to
4502 fold partially or entirely the replacement expression. */
4503 if (code == CALL_EXPR)
4505 bool maybe_inline = false;
4506 if (CONSTANT_CLASS_P (r))
4507 maybe_inline = true;
4508 else
4509 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
4510 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
4512 maybe_inline = true;
4513 break;
4515 if (maybe_inline)
4517 tree t = maybe_inline_call_in_expr (exp);
4518 if (t)
4519 return SUBSTITUTE_IN_EXPR (t, f, r);
4523 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4525 tree op = TREE_OPERAND (exp, i);
4526 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
4527 if (new_op != op)
4529 if (!new_tree)
4530 new_tree = copy_node (exp);
4531 TREE_OPERAND (new_tree, i) = new_op;
4535 if (new_tree)
4537 new_tree = fold (new_tree);
4538 if (TREE_CODE (new_tree) == CALL_EXPR)
4539 process_call_operands (new_tree);
4541 else
4542 return exp;
4544 break;
4546 default:
4547 gcc_unreachable ();
4550 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4552 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4553 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4555 return new_tree;
4558 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4559 for it within OBJ, a tree that is an object or a chain of references. */
4561 tree
4562 substitute_placeholder_in_expr (tree exp, tree obj)
4564 enum tree_code code = TREE_CODE (exp);
4565 tree op0, op1, op2, op3;
4566 tree new_tree;
4568 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4569 in the chain of OBJ. */
4570 if (code == PLACEHOLDER_EXPR)
4572 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
4573 tree elt;
4575 for (elt = obj; elt != 0;
4576 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4577 || TREE_CODE (elt) == COND_EXPR)
4578 ? TREE_OPERAND (elt, 1)
4579 : (REFERENCE_CLASS_P (elt)
4580 || UNARY_CLASS_P (elt)
4581 || BINARY_CLASS_P (elt)
4582 || VL_EXP_CLASS_P (elt)
4583 || EXPRESSION_CLASS_P (elt))
4584 ? TREE_OPERAND (elt, 0) : 0))
4585 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
4586 return elt;
4588 for (elt = obj; elt != 0;
4589 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
4590 || TREE_CODE (elt) == COND_EXPR)
4591 ? TREE_OPERAND (elt, 1)
4592 : (REFERENCE_CLASS_P (elt)
4593 || UNARY_CLASS_P (elt)
4594 || BINARY_CLASS_P (elt)
4595 || VL_EXP_CLASS_P (elt)
4596 || EXPRESSION_CLASS_P (elt))
4597 ? TREE_OPERAND (elt, 0) : 0))
4598 if (POINTER_TYPE_P (TREE_TYPE (elt))
4599 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
4600 == need_type))
4601 return fold_build1 (INDIRECT_REF, need_type, elt);
4603 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4604 survives until RTL generation, there will be an error. */
4605 return exp;
4608 /* TREE_LIST is special because we need to look at TREE_VALUE
4609 and TREE_CHAIN, not TREE_OPERANDS. */
4610 else if (code == TREE_LIST)
4612 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4613 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4614 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4615 return exp;
4617 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4619 else
4620 switch (TREE_CODE_CLASS (code))
4622 case tcc_constant:
4623 case tcc_declaration:
4624 return exp;
4626 case tcc_exceptional:
4627 case tcc_unary:
4628 case tcc_binary:
4629 case tcc_comparison:
4630 case tcc_expression:
4631 case tcc_reference:
4632 case tcc_statement:
4633 switch (TREE_CODE_LENGTH (code))
4635 case 0:
4636 return exp;
4638 case 1:
4639 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4640 if (op0 == TREE_OPERAND (exp, 0))
4641 return exp;
4643 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4644 break;
4646 case 2:
4647 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4648 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4650 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4651 return exp;
4653 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4654 break;
4656 case 3:
4657 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4658 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4659 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4661 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4662 && op2 == TREE_OPERAND (exp, 2))
4663 return exp;
4665 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4666 break;
4668 case 4:
4669 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4670 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4671 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4672 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4674 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4675 && op2 == TREE_OPERAND (exp, 2)
4676 && op3 == TREE_OPERAND (exp, 3))
4677 return exp;
4679 new_tree
4680 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4681 break;
4683 default:
4684 gcc_unreachable ();
4686 break;
4688 case tcc_vl_exp:
4690 int i;
4692 new_tree = NULL_TREE;
4694 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4696 tree op = TREE_OPERAND (exp, i);
4697 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4698 if (new_op != op)
4700 if (!new_tree)
4701 new_tree = copy_node (exp);
4702 TREE_OPERAND (new_tree, i) = new_op;
4706 if (new_tree)
4708 new_tree = fold (new_tree);
4709 if (TREE_CODE (new_tree) == CALL_EXPR)
4710 process_call_operands (new_tree);
4712 else
4713 return exp;
4715 break;
4717 default:
4718 gcc_unreachable ();
4721 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4723 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4724 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4726 return new_tree;
4730 /* Subroutine of stabilize_reference; this is called for subtrees of
4731 references. Any expression with side-effects must be put in a SAVE_EXPR
4732 to ensure that it is only evaluated once.
4734 We don't put SAVE_EXPR nodes around everything, because assigning very
4735 simple expressions to temporaries causes us to miss good opportunities
4736 for optimizations. Among other things, the opportunity to fold in the
4737 addition of a constant into an addressing mode often gets lost, e.g.
4738 "y[i+1] += x;". In general, we take the approach that we should not make
4739 an assignment unless we are forced into it - i.e., that any non-side effect
4740 operator should be allowed, and that cse should take care of coalescing
4741 multiple utterances of the same expression should that prove fruitful. */
4743 static tree
4744 stabilize_reference_1 (tree e)
4746 tree result;
4747 enum tree_code code = TREE_CODE (e);
4749 /* We cannot ignore const expressions because it might be a reference
4750 to a const array but whose index contains side-effects. But we can
4751 ignore things that are actual constant or that already have been
4752 handled by this function. */
4754 if (tree_invariant_p (e))
4755 return e;
4757 switch (TREE_CODE_CLASS (code))
4759 case tcc_exceptional:
4760 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4761 have side-effects. */
4762 if (code == STATEMENT_LIST)
4763 return save_expr (e);
4764 /* FALLTHRU */
4765 case tcc_type:
4766 case tcc_declaration:
4767 case tcc_comparison:
4768 case tcc_statement:
4769 case tcc_expression:
4770 case tcc_reference:
4771 case tcc_vl_exp:
4772 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4773 so that it will only be evaluated once. */
4774 /* The reference (r) and comparison (<) classes could be handled as
4775 below, but it is generally faster to only evaluate them once. */
4776 if (TREE_SIDE_EFFECTS (e))
4777 return save_expr (e);
4778 return e;
4780 case tcc_constant:
4781 /* Constants need no processing. In fact, we should never reach
4782 here. */
4783 return e;
4785 case tcc_binary:
4786 /* Division is slow and tends to be compiled with jumps,
4787 especially the division by powers of 2 that is often
4788 found inside of an array reference. So do it just once. */
4789 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4790 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4791 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4792 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4793 return save_expr (e);
4794 /* Recursively stabilize each operand. */
4795 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4796 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4797 break;
4799 case tcc_unary:
4800 /* Recursively stabilize each operand. */
4801 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4802 break;
4804 default:
4805 gcc_unreachable ();
4808 TREE_TYPE (result) = TREE_TYPE (e);
4809 TREE_READONLY (result) = TREE_READONLY (e);
4810 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4811 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4813 return result;
4816 /* Stabilize a reference so that we can use it any number of times
4817 without causing its operands to be evaluated more than once.
4818 Returns the stabilized reference. This works by means of save_expr,
4819 so see the caveats in the comments about save_expr.
4821 Also allows conversion expressions whose operands are references.
4822 Any other kind of expression is returned unchanged. */
4824 tree
4825 stabilize_reference (tree ref)
4827 tree result;
4828 enum tree_code code = TREE_CODE (ref);
4830 switch (code)
4832 case VAR_DECL:
4833 case PARM_DECL:
4834 case RESULT_DECL:
4835 /* No action is needed in this case. */
4836 return ref;
4838 CASE_CONVERT:
4839 case FLOAT_EXPR:
4840 case FIX_TRUNC_EXPR:
4841 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4842 break;
4844 case INDIRECT_REF:
4845 result = build_nt (INDIRECT_REF,
4846 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4847 break;
4849 case COMPONENT_REF:
4850 result = build_nt (COMPONENT_REF,
4851 stabilize_reference (TREE_OPERAND (ref, 0)),
4852 TREE_OPERAND (ref, 1), NULL_TREE);
4853 break;
4855 case BIT_FIELD_REF:
4856 result = build_nt (BIT_FIELD_REF,
4857 stabilize_reference (TREE_OPERAND (ref, 0)),
4858 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4859 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4860 break;
4862 case ARRAY_REF:
4863 result = build_nt (ARRAY_REF,
4864 stabilize_reference (TREE_OPERAND (ref, 0)),
4865 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4866 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4867 break;
4869 case ARRAY_RANGE_REF:
4870 result = build_nt (ARRAY_RANGE_REF,
4871 stabilize_reference (TREE_OPERAND (ref, 0)),
4872 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4873 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4874 break;
4876 case COMPOUND_EXPR:
4877 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4878 it wouldn't be ignored. This matters when dealing with
4879 volatiles. */
4880 return stabilize_reference_1 (ref);
4882 /* If arg isn't a kind of lvalue we recognize, make no change.
4883 Caller should recognize the error for an invalid lvalue. */
4884 default:
4885 return ref;
4887 case ERROR_MARK:
4888 return error_mark_node;
4891 TREE_TYPE (result) = TREE_TYPE (ref);
4892 TREE_READONLY (result) = TREE_READONLY (ref);
4893 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4894 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4895 protected_set_expr_location (result, EXPR_LOCATION (ref));
4897 return result;
4900 /* Low-level constructors for expressions. */
4902 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4903 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4905 void
4906 recompute_tree_invariant_for_addr_expr (tree t)
4908 tree node;
4909 bool tc = true, se = false;
4911 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4913 /* We started out assuming this address is both invariant and constant, but
4914 does not have side effects. Now go down any handled components and see if
4915 any of them involve offsets that are either non-constant or non-invariant.
4916 Also check for side-effects.
4918 ??? Note that this code makes no attempt to deal with the case where
4919 taking the address of something causes a copy due to misalignment. */
4921 #define UPDATE_FLAGS(NODE) \
4922 do { tree _node = (NODE); \
4923 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4924 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4926 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4927 node = TREE_OPERAND (node, 0))
4929 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4930 array reference (probably made temporarily by the G++ front end),
4931 so ignore all the operands. */
4932 if ((TREE_CODE (node) == ARRAY_REF
4933 || TREE_CODE (node) == ARRAY_RANGE_REF)
4934 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4936 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4937 if (TREE_OPERAND (node, 2))
4938 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4939 if (TREE_OPERAND (node, 3))
4940 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4942 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4943 FIELD_DECL, apparently. The G++ front end can put something else
4944 there, at least temporarily. */
4945 else if (TREE_CODE (node) == COMPONENT_REF
4946 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4948 if (TREE_OPERAND (node, 2))
4949 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4953 node = lang_hooks.expr_to_decl (node, &tc, &se);
4955 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4956 the address, since &(*a)->b is a form of addition. If it's a constant, the
4957 address is constant too. If it's a decl, its address is constant if the
4958 decl is static. Everything else is not constant and, furthermore,
4959 taking the address of a volatile variable is not volatile. */
4960 if (TREE_CODE (node) == INDIRECT_REF
4961 || TREE_CODE (node) == MEM_REF)
4962 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4963 else if (CONSTANT_CLASS_P (node))
4965 else if (DECL_P (node))
4966 tc &= (staticp (node) != NULL_TREE);
4967 else
4969 tc = false;
4970 se |= TREE_SIDE_EFFECTS (node);
4974 TREE_CONSTANT (t) = tc;
4975 TREE_SIDE_EFFECTS (t) = se;
4976 #undef UPDATE_FLAGS
4979 /* Build an expression of code CODE, data type TYPE, and operands as
4980 specified. Expressions and reference nodes can be created this way.
4981 Constants, decls, types and misc nodes cannot be.
4983 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4984 enough for all extant tree codes. */
4986 tree
4987 build0 (enum tree_code code, tree tt MEM_STAT_DECL)
4989 tree t;
4991 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4993 t = make_node (code PASS_MEM_STAT);
4994 TREE_TYPE (t) = tt;
4996 return t;
4999 tree
5000 build1 (enum tree_code code, tree type, tree node MEM_STAT_DECL)
5002 int length = sizeof (struct tree_exp);
5003 tree t;
5005 record_node_allocation_statistics (code, length);
5007 gcc_assert (TREE_CODE_LENGTH (code) == 1);
5009 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
5011 memset (t, 0, sizeof (struct tree_common));
5013 TREE_SET_CODE (t, code);
5015 TREE_TYPE (t) = type;
5016 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
5017 TREE_OPERAND (t, 0) = node;
5018 if (node && !TYPE_P (node))
5020 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
5021 TREE_READONLY (t) = TREE_READONLY (node);
5024 if (TREE_CODE_CLASS (code) == tcc_statement)
5026 if (code != DEBUG_BEGIN_STMT)
5027 TREE_SIDE_EFFECTS (t) = 1;
5029 else switch (code)
5031 case VA_ARG_EXPR:
5032 /* All of these have side-effects, no matter what their
5033 operands are. */
5034 TREE_SIDE_EFFECTS (t) = 1;
5035 TREE_READONLY (t) = 0;
5036 break;
5038 case INDIRECT_REF:
5039 /* Whether a dereference is readonly has nothing to do with whether
5040 its operand is readonly. */
5041 TREE_READONLY (t) = 0;
5042 break;
5044 case ADDR_EXPR:
5045 if (node)
5046 recompute_tree_invariant_for_addr_expr (t);
5047 break;
5049 default:
5050 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
5051 && node && !TYPE_P (node)
5052 && TREE_CONSTANT (node))
5053 TREE_CONSTANT (t) = 1;
5054 if (TREE_CODE_CLASS (code) == tcc_reference
5055 && node && TREE_THIS_VOLATILE (node))
5056 TREE_THIS_VOLATILE (t) = 1;
5057 break;
5060 return t;
5063 #define PROCESS_ARG(N) \
5064 do { \
5065 TREE_OPERAND (t, N) = arg##N; \
5066 if (arg##N &&!TYPE_P (arg##N)) \
5068 if (TREE_SIDE_EFFECTS (arg##N)) \
5069 side_effects = 1; \
5070 if (!TREE_READONLY (arg##N) \
5071 && !CONSTANT_CLASS_P (arg##N)) \
5072 (void) (read_only = 0); \
5073 if (!TREE_CONSTANT (arg##N)) \
5074 (void) (constant = 0); \
5076 } while (0)
5078 tree
5079 build2 (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
5081 bool constant, read_only, side_effects, div_by_zero;
5082 tree t;
5084 gcc_assert (TREE_CODE_LENGTH (code) == 2);
5086 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
5087 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
5088 /* When sizetype precision doesn't match that of pointers
5089 we need to be able to build explicit extensions or truncations
5090 of the offset argument. */
5091 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
5092 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
5093 && TREE_CODE (arg1) == INTEGER_CST);
5095 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
5096 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
5097 && ptrofftype_p (TREE_TYPE (arg1)));
5099 t = make_node (code PASS_MEM_STAT);
5100 TREE_TYPE (t) = tt;
5102 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
5103 result based on those same flags for the arguments. But if the
5104 arguments aren't really even `tree' expressions, we shouldn't be trying
5105 to do this. */
5107 /* Expressions without side effects may be constant if their
5108 arguments are as well. */
5109 constant = (TREE_CODE_CLASS (code) == tcc_comparison
5110 || TREE_CODE_CLASS (code) == tcc_binary);
5111 read_only = 1;
5112 side_effects = TREE_SIDE_EFFECTS (t);
5114 switch (code)
5116 case TRUNC_DIV_EXPR:
5117 case CEIL_DIV_EXPR:
5118 case FLOOR_DIV_EXPR:
5119 case ROUND_DIV_EXPR:
5120 case EXACT_DIV_EXPR:
5121 case CEIL_MOD_EXPR:
5122 case FLOOR_MOD_EXPR:
5123 case ROUND_MOD_EXPR:
5124 case TRUNC_MOD_EXPR:
5125 div_by_zero = integer_zerop (arg1);
5126 break;
5127 default:
5128 div_by_zero = false;
5131 PROCESS_ARG (0);
5132 PROCESS_ARG (1);
5134 TREE_SIDE_EFFECTS (t) = side_effects;
5135 if (code == MEM_REF)
5137 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5139 tree o = TREE_OPERAND (arg0, 0);
5140 TREE_READONLY (t) = TREE_READONLY (o);
5141 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5144 else
5146 TREE_READONLY (t) = read_only;
5147 /* Don't mark X / 0 as constant. */
5148 TREE_CONSTANT (t) = constant && !div_by_zero;
5149 TREE_THIS_VOLATILE (t)
5150 = (TREE_CODE_CLASS (code) == tcc_reference
5151 && arg0 && TREE_THIS_VOLATILE (arg0));
5154 return t;
5158 tree
5159 build3 (enum tree_code code, tree tt, tree arg0, tree arg1,
5160 tree arg2 MEM_STAT_DECL)
5162 bool constant, read_only, side_effects;
5163 tree t;
5165 gcc_assert (TREE_CODE_LENGTH (code) == 3);
5166 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5168 t = make_node (code PASS_MEM_STAT);
5169 TREE_TYPE (t) = tt;
5171 read_only = 1;
5173 /* As a special exception, if COND_EXPR has NULL branches, we
5174 assume that it is a gimple statement and always consider
5175 it to have side effects. */
5176 if (code == COND_EXPR
5177 && tt == void_type_node
5178 && arg1 == NULL_TREE
5179 && arg2 == NULL_TREE)
5180 side_effects = true;
5181 else
5182 side_effects = TREE_SIDE_EFFECTS (t);
5184 PROCESS_ARG (0);
5185 PROCESS_ARG (1);
5186 PROCESS_ARG (2);
5188 if (code == COND_EXPR)
5189 TREE_READONLY (t) = read_only;
5191 TREE_SIDE_EFFECTS (t) = side_effects;
5192 TREE_THIS_VOLATILE (t)
5193 = (TREE_CODE_CLASS (code) == tcc_reference
5194 && arg0 && TREE_THIS_VOLATILE (arg0));
5196 return t;
5199 tree
5200 build4 (enum tree_code code, tree tt, tree arg0, tree arg1,
5201 tree arg2, tree arg3 MEM_STAT_DECL)
5203 bool constant, read_only, side_effects;
5204 tree t;
5206 gcc_assert (TREE_CODE_LENGTH (code) == 4);
5208 t = make_node (code PASS_MEM_STAT);
5209 TREE_TYPE (t) = tt;
5211 side_effects = TREE_SIDE_EFFECTS (t);
5213 PROCESS_ARG (0);
5214 PROCESS_ARG (1);
5215 PROCESS_ARG (2);
5216 PROCESS_ARG (3);
5218 TREE_SIDE_EFFECTS (t) = side_effects;
5219 TREE_THIS_VOLATILE (t)
5220 = (TREE_CODE_CLASS (code) == tcc_reference
5221 && arg0 && TREE_THIS_VOLATILE (arg0));
5223 return t;
5226 tree
5227 build5 (enum tree_code code, tree tt, tree arg0, tree arg1,
5228 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
5230 bool constant, read_only, side_effects;
5231 tree t;
5233 gcc_assert (TREE_CODE_LENGTH (code) == 5);
5235 t = make_node (code PASS_MEM_STAT);
5236 TREE_TYPE (t) = tt;
5238 side_effects = TREE_SIDE_EFFECTS (t);
5240 PROCESS_ARG (0);
5241 PROCESS_ARG (1);
5242 PROCESS_ARG (2);
5243 PROCESS_ARG (3);
5244 PROCESS_ARG (4);
5246 TREE_SIDE_EFFECTS (t) = side_effects;
5247 if (code == TARGET_MEM_REF)
5249 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
5251 tree o = TREE_OPERAND (arg0, 0);
5252 TREE_READONLY (t) = TREE_READONLY (o);
5253 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
5256 else
5257 TREE_THIS_VOLATILE (t)
5258 = (TREE_CODE_CLASS (code) == tcc_reference
5259 && arg0 && TREE_THIS_VOLATILE (arg0));
5261 return t;
5264 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
5265 on the pointer PTR. */
5267 tree
5268 build_simple_mem_ref_loc (location_t loc, tree ptr)
5270 poly_int64 offset = 0;
5271 tree ptype = TREE_TYPE (ptr);
5272 tree tem;
5273 /* For convenience allow addresses that collapse to a simple base
5274 and offset. */
5275 if (TREE_CODE (ptr) == ADDR_EXPR
5276 && (handled_component_p (TREE_OPERAND (ptr, 0))
5277 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
5279 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
5280 gcc_assert (ptr);
5281 if (TREE_CODE (ptr) == MEM_REF)
5283 offset += mem_ref_offset (ptr).force_shwi ();
5284 ptr = TREE_OPERAND (ptr, 0);
5286 else
5287 ptr = build_fold_addr_expr (ptr);
5288 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
5290 tem = build2 (MEM_REF, TREE_TYPE (ptype),
5291 ptr, build_int_cst (ptype, offset));
5292 SET_EXPR_LOCATION (tem, loc);
5293 return tem;
5296 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
5298 poly_offset_int
5299 mem_ref_offset (const_tree t)
5301 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
5302 SIGNED);
5305 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
5306 offsetted by OFFSET units. */
5308 tree
5309 build_invariant_address (tree type, tree base, poly_int64 offset)
5311 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
5312 build_fold_addr_expr (base),
5313 build_int_cst (ptr_type_node, offset));
5314 tree addr = build1 (ADDR_EXPR, type, ref);
5315 recompute_tree_invariant_for_addr_expr (addr);
5316 return addr;
5319 /* Similar except don't specify the TREE_TYPE
5320 and leave the TREE_SIDE_EFFECTS as 0.
5321 It is permissible for arguments to be null,
5322 or even garbage if their values do not matter. */
5324 tree
5325 build_nt (enum tree_code code, ...)
5327 tree t;
5328 int length;
5329 int i;
5330 va_list p;
5332 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
5334 va_start (p, code);
5336 t = make_node (code);
5337 length = TREE_CODE_LENGTH (code);
5339 for (i = 0; i < length; i++)
5340 TREE_OPERAND (t, i) = va_arg (p, tree);
5342 va_end (p);
5343 return t;
5346 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5347 tree vec. */
5349 tree
5350 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
5352 tree ret, t;
5353 unsigned int ix;
5355 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
5356 CALL_EXPR_FN (ret) = fn;
5357 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
5358 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
5359 CALL_EXPR_ARG (ret, ix) = t;
5360 return ret;
5363 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5364 and data type TYPE.
5365 We do NOT enter this node in any sort of symbol table.
5367 LOC is the location of the decl.
5369 layout_decl is used to set up the decl's storage layout.
5370 Other slots are initialized to 0 or null pointers. */
5372 tree
5373 build_decl (location_t loc, enum tree_code code, tree name,
5374 tree type MEM_STAT_DECL)
5376 tree t;
5378 t = make_node (code PASS_MEM_STAT);
5379 DECL_SOURCE_LOCATION (t) = loc;
5381 /* if (type == error_mark_node)
5382 type = integer_type_node; */
5383 /* That is not done, deliberately, so that having error_mark_node
5384 as the type can suppress useless errors in the use of this variable. */
5386 DECL_NAME (t) = name;
5387 TREE_TYPE (t) = type;
5389 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
5390 layout_decl (t, 0);
5392 return t;
5395 /* Create and return a DEBUG_EXPR_DECL node of the given TYPE. */
5397 tree
5398 build_debug_expr_decl (tree type)
5400 tree vexpr = make_node (DEBUG_EXPR_DECL);
5401 DECL_ARTIFICIAL (vexpr) = 1;
5402 TREE_TYPE (vexpr) = type;
5403 SET_DECL_MODE (vexpr, TYPE_MODE (type));
5404 return vexpr;
5407 /* Builds and returns function declaration with NAME and TYPE. */
5409 tree
5410 build_fn_decl (const char *name, tree type)
5412 tree id = get_identifier (name);
5413 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
5415 DECL_EXTERNAL (decl) = 1;
5416 TREE_PUBLIC (decl) = 1;
5417 DECL_ARTIFICIAL (decl) = 1;
5418 TREE_NOTHROW (decl) = 1;
5420 return decl;
5423 vec<tree, va_gc> *all_translation_units;
5425 /* Builds a new translation-unit decl with name NAME, queues it in the
5426 global list of translation-unit decls and returns it. */
5428 tree
5429 build_translation_unit_decl (tree name)
5431 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
5432 name, NULL_TREE);
5433 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
5434 vec_safe_push (all_translation_units, tu);
5435 return tu;
5439 /* BLOCK nodes are used to represent the structure of binding contours
5440 and declarations, once those contours have been exited and their contents
5441 compiled. This information is used for outputting debugging info. */
5443 tree
5444 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
5446 tree block = make_node (BLOCK);
5448 BLOCK_VARS (block) = vars;
5449 BLOCK_SUBBLOCKS (block) = subblocks;
5450 BLOCK_SUPERCONTEXT (block) = supercontext;
5451 BLOCK_CHAIN (block) = chain;
5452 return block;
5456 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5458 LOC is the location to use in tree T. */
5460 void
5461 protected_set_expr_location (tree t, location_t loc)
5463 if (CAN_HAVE_LOCATION_P (t))
5464 SET_EXPR_LOCATION (t, loc);
5465 else if (t && TREE_CODE (t) == STATEMENT_LIST)
5467 t = expr_single (t);
5468 if (t && CAN_HAVE_LOCATION_P (t))
5469 SET_EXPR_LOCATION (t, loc);
5473 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
5474 UNKNOWN_LOCATION. */
5476 void
5477 protected_set_expr_location_if_unset (tree t, location_t loc)
5479 t = expr_single (t);
5480 if (t && !EXPR_HAS_LOCATION (t))
5481 protected_set_expr_location (t, loc);
5484 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5485 of the various TYPE_QUAL values. */
5487 static void
5488 set_type_quals (tree type, int type_quals)
5490 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
5491 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
5492 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
5493 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
5494 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
5497 /* Returns true iff CAND and BASE have equivalent language-specific
5498 qualifiers. */
5500 bool
5501 check_lang_type (const_tree cand, const_tree base)
5503 if (lang_hooks.types.type_hash_eq == NULL)
5504 return true;
5505 /* type_hash_eq currently only applies to these types. */
5506 if (TREE_CODE (cand) != FUNCTION_TYPE
5507 && TREE_CODE (cand) != METHOD_TYPE)
5508 return true;
5509 return lang_hooks.types.type_hash_eq (cand, base);
5512 /* This function checks to see if TYPE matches the size one of the built-in
5513 atomic types, and returns that core atomic type. */
5515 static tree
5516 find_atomic_core_type (const_tree type)
5518 tree base_atomic_type;
5520 /* Only handle complete types. */
5521 if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
5522 return NULL_TREE;
5524 switch (tree_to_uhwi (TYPE_SIZE (type)))
5526 case 8:
5527 base_atomic_type = atomicQI_type_node;
5528 break;
5530 case 16:
5531 base_atomic_type = atomicHI_type_node;
5532 break;
5534 case 32:
5535 base_atomic_type = atomicSI_type_node;
5536 break;
5538 case 64:
5539 base_atomic_type = atomicDI_type_node;
5540 break;
5542 case 128:
5543 base_atomic_type = atomicTI_type_node;
5544 break;
5546 default:
5547 base_atomic_type = NULL_TREE;
5550 return base_atomic_type;
5553 /* Returns true iff unqualified CAND and BASE are equivalent. */
5555 bool
5556 check_base_type (const_tree cand, const_tree base)
5558 if (TYPE_NAME (cand) != TYPE_NAME (base)
5559 /* Apparently this is needed for Objective-C. */
5560 || TYPE_CONTEXT (cand) != TYPE_CONTEXT (base)
5561 || !attribute_list_equal (TYPE_ATTRIBUTES (cand),
5562 TYPE_ATTRIBUTES (base)))
5563 return false;
5564 /* Check alignment. */
5565 if (TYPE_ALIGN (cand) == TYPE_ALIGN (base)
5566 && TYPE_USER_ALIGN (cand) == TYPE_USER_ALIGN (base))
5567 return true;
5568 /* Atomic types increase minimal alignment. We must to do so as well
5569 or we get duplicated canonical types. See PR88686. */
5570 if ((TYPE_QUALS (cand) & TYPE_QUAL_ATOMIC))
5572 /* See if this object can map to a basic atomic type. */
5573 tree atomic_type = find_atomic_core_type (cand);
5574 if (atomic_type && TYPE_ALIGN (atomic_type) == TYPE_ALIGN (cand))
5575 return true;
5577 return false;
5580 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5582 bool
5583 check_qualified_type (const_tree cand, const_tree base, int type_quals)
5585 return (TYPE_QUALS (cand) == type_quals
5586 && check_base_type (cand, base)
5587 && check_lang_type (cand, base));
5590 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
5592 static bool
5593 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
5595 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
5596 && TYPE_NAME (cand) == TYPE_NAME (base)
5597 /* Apparently this is needed for Objective-C. */
5598 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
5599 /* Check alignment. */
5600 && TYPE_ALIGN (cand) == align
5601 /* Check this is a user-aligned type as build_aligned_type
5602 would create. */
5603 && TYPE_USER_ALIGN (cand)
5604 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
5605 TYPE_ATTRIBUTES (base))
5606 && check_lang_type (cand, base));
5609 /* Return a version of the TYPE, qualified as indicated by the
5610 TYPE_QUALS, if one exists. If no qualified version exists yet,
5611 return NULL_TREE. */
5613 tree
5614 get_qualified_type (tree type, int type_quals)
5616 if (TYPE_QUALS (type) == type_quals)
5617 return type;
5619 tree mv = TYPE_MAIN_VARIANT (type);
5620 if (check_qualified_type (mv, type, type_quals))
5621 return mv;
5623 /* Search the chain of variants to see if there is already one there just
5624 like the one we need to have. If so, use that existing one. We must
5625 preserve the TYPE_NAME, since there is code that depends on this. */
5626 for (tree *tp = &TYPE_NEXT_VARIANT (mv); *tp; tp = &TYPE_NEXT_VARIANT (*tp))
5627 if (check_qualified_type (*tp, type, type_quals))
5629 /* Put the found variant at the head of the variant list so
5630 frequently searched variants get found faster. The C++ FE
5631 benefits greatly from this. */
5632 tree t = *tp;
5633 *tp = TYPE_NEXT_VARIANT (t);
5634 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
5635 TYPE_NEXT_VARIANT (mv) = t;
5636 return t;
5639 return NULL_TREE;
5642 /* Like get_qualified_type, but creates the type if it does not
5643 exist. This function never returns NULL_TREE. */
5645 tree
5646 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
5648 tree t;
5650 /* See if we already have the appropriate qualified variant. */
5651 t = get_qualified_type (type, type_quals);
5653 /* If not, build it. */
5654 if (!t)
5656 t = build_variant_type_copy (type PASS_MEM_STAT);
5657 set_type_quals (t, type_quals);
5659 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
5661 /* See if this object can map to a basic atomic type. */
5662 tree atomic_type = find_atomic_core_type (type);
5663 if (atomic_type)
5665 /* Ensure the alignment of this type is compatible with
5666 the required alignment of the atomic type. */
5667 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
5668 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
5672 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5673 /* Propagate structural equality. */
5674 SET_TYPE_STRUCTURAL_EQUALITY (t);
5675 else if (TYPE_CANONICAL (type) != type)
5676 /* Build the underlying canonical type, since it is different
5677 from TYPE. */
5679 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
5680 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
5682 else
5683 /* T is its own canonical type. */
5684 TYPE_CANONICAL (t) = t;
5688 return t;
5691 /* Create a variant of type T with alignment ALIGN. */
5693 tree
5694 build_aligned_type (tree type, unsigned int align)
5696 tree t;
5698 if (TYPE_PACKED (type)
5699 || TYPE_ALIGN (type) == align)
5700 return type;
5702 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
5703 if (check_aligned_type (t, type, align))
5704 return t;
5706 t = build_variant_type_copy (type);
5707 SET_TYPE_ALIGN (t, align);
5708 TYPE_USER_ALIGN (t) = 1;
5710 return t;
5713 /* Create a new distinct copy of TYPE. The new type is made its own
5714 MAIN_VARIANT. If TYPE requires structural equality checks, the
5715 resulting type requires structural equality checks; otherwise, its
5716 TYPE_CANONICAL points to itself. */
5718 tree
5719 build_distinct_type_copy (tree type MEM_STAT_DECL)
5721 tree t = copy_node (type PASS_MEM_STAT);
5723 TYPE_POINTER_TO (t) = 0;
5724 TYPE_REFERENCE_TO (t) = 0;
5726 /* Set the canonical type either to a new equivalence class, or
5727 propagate the need for structural equality checks. */
5728 if (TYPE_STRUCTURAL_EQUALITY_P (type))
5729 SET_TYPE_STRUCTURAL_EQUALITY (t);
5730 else
5731 TYPE_CANONICAL (t) = t;
5733 /* Make it its own variant. */
5734 TYPE_MAIN_VARIANT (t) = t;
5735 TYPE_NEXT_VARIANT (t) = 0;
5737 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
5738 whose TREE_TYPE is not t. This can also happen in the Ada
5739 frontend when using subtypes. */
5741 return t;
5744 /* Create a new variant of TYPE, equivalent but distinct. This is so
5745 the caller can modify it. TYPE_CANONICAL for the return type will
5746 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
5747 are considered equal by the language itself (or that both types
5748 require structural equality checks). */
5750 tree
5751 build_variant_type_copy (tree type MEM_STAT_DECL)
5753 tree t, m = TYPE_MAIN_VARIANT (type);
5755 t = build_distinct_type_copy (type PASS_MEM_STAT);
5757 /* Since we're building a variant, assume that it is a non-semantic
5758 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
5759 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
5760 /* Type variants have no alias set defined. */
5761 TYPE_ALIAS_SET (t) = -1;
5763 /* Add the new type to the chain of variants of TYPE. */
5764 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
5765 TYPE_NEXT_VARIANT (m) = t;
5766 TYPE_MAIN_VARIANT (t) = m;
5768 return t;
5771 /* Return true if the from tree in both tree maps are equal. */
5774 tree_map_base_eq (const void *va, const void *vb)
5776 const struct tree_map_base *const a = (const struct tree_map_base *) va,
5777 *const b = (const struct tree_map_base *) vb;
5778 return (a->from == b->from);
5781 /* Hash a from tree in a tree_base_map. */
5783 unsigned int
5784 tree_map_base_hash (const void *item)
5786 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
5789 /* Return true if this tree map structure is marked for garbage collection
5790 purposes. We simply return true if the from tree is marked, so that this
5791 structure goes away when the from tree goes away. */
5794 tree_map_base_marked_p (const void *p)
5796 return ggc_marked_p (((const struct tree_map_base *) p)->from);
5799 /* Hash a from tree in a tree_map. */
5801 unsigned int
5802 tree_map_hash (const void *item)
5804 return (((const struct tree_map *) item)->hash);
5807 /* Hash a from tree in a tree_decl_map. */
5809 unsigned int
5810 tree_decl_map_hash (const void *item)
5812 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
5815 /* Return the initialization priority for DECL. */
5817 priority_type
5818 decl_init_priority_lookup (tree decl)
5820 symtab_node *snode = symtab_node::get (decl);
5822 if (!snode)
5823 return DEFAULT_INIT_PRIORITY;
5824 return
5825 snode->get_init_priority ();
5828 /* Return the finalization priority for DECL. */
5830 priority_type
5831 decl_fini_priority_lookup (tree decl)
5833 cgraph_node *node = cgraph_node::get (decl);
5835 if (!node)
5836 return DEFAULT_INIT_PRIORITY;
5837 return
5838 node->get_fini_priority ();
5841 /* Set the initialization priority for DECL to PRIORITY. */
5843 void
5844 decl_init_priority_insert (tree decl, priority_type priority)
5846 struct symtab_node *snode;
5848 if (priority == DEFAULT_INIT_PRIORITY)
5850 snode = symtab_node::get (decl);
5851 if (!snode)
5852 return;
5854 else if (VAR_P (decl))
5855 snode = varpool_node::get_create (decl);
5856 else
5857 snode = cgraph_node::get_create (decl);
5858 snode->set_init_priority (priority);
5861 /* Set the finalization priority for DECL to PRIORITY. */
5863 void
5864 decl_fini_priority_insert (tree decl, priority_type priority)
5866 struct cgraph_node *node;
5868 if (priority == DEFAULT_INIT_PRIORITY)
5870 node = cgraph_node::get (decl);
5871 if (!node)
5872 return;
5874 else
5875 node = cgraph_node::get_create (decl);
5876 node->set_fini_priority (priority);
5879 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
5881 static void
5882 print_debug_expr_statistics (void)
5884 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
5885 (long) debug_expr_for_decl->size (),
5886 (long) debug_expr_for_decl->elements (),
5887 debug_expr_for_decl->collisions ());
5890 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
5892 static void
5893 print_value_expr_statistics (void)
5895 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
5896 (long) value_expr_for_decl->size (),
5897 (long) value_expr_for_decl->elements (),
5898 value_expr_for_decl->collisions ());
5901 /* Lookup a debug expression for FROM, and return it if we find one. */
5903 tree
5904 decl_debug_expr_lookup (tree from)
5906 struct tree_decl_map *h, in;
5907 in.base.from = from;
5909 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5910 if (h)
5911 return h->to;
5912 return NULL_TREE;
5915 /* Insert a mapping FROM->TO in the debug expression hashtable. */
5917 void
5918 decl_debug_expr_insert (tree from, tree to)
5920 struct tree_decl_map *h;
5922 h = ggc_alloc<tree_decl_map> ();
5923 h->base.from = from;
5924 h->to = to;
5925 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5928 /* Lookup a value expression for FROM, and return it if we find one. */
5930 tree
5931 decl_value_expr_lookup (tree from)
5933 struct tree_decl_map *h, in;
5934 in.base.from = from;
5936 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
5937 if (h)
5938 return h->to;
5939 return NULL_TREE;
5942 /* Insert a mapping FROM->TO in the value expression hashtable. */
5944 void
5945 decl_value_expr_insert (tree from, tree to)
5947 struct tree_decl_map *h;
5949 /* Uses of FROM shouldn't look like they happen at the location of TO. */
5950 to = protected_set_expr_location_unshare (to, UNKNOWN_LOCATION);
5952 h = ggc_alloc<tree_decl_map> ();
5953 h->base.from = from;
5954 h->to = to;
5955 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
5958 /* Lookup a vector of debug arguments for FROM, and return it if we
5959 find one. */
5961 vec<tree, va_gc> **
5962 decl_debug_args_lookup (tree from)
5964 struct tree_vec_map *h, in;
5966 if (!DECL_HAS_DEBUG_ARGS_P (from))
5967 return NULL;
5968 gcc_checking_assert (debug_args_for_decl != NULL);
5969 in.base.from = from;
5970 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
5971 if (h)
5972 return &h->to;
5973 return NULL;
5976 /* Insert a mapping FROM->empty vector of debug arguments in the value
5977 expression hashtable. */
5979 vec<tree, va_gc> **
5980 decl_debug_args_insert (tree from)
5982 struct tree_vec_map *h;
5983 tree_vec_map **loc;
5985 if (DECL_HAS_DEBUG_ARGS_P (from))
5986 return decl_debug_args_lookup (from);
5987 if (debug_args_for_decl == NULL)
5988 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
5989 h = ggc_alloc<tree_vec_map> ();
5990 h->base.from = from;
5991 h->to = NULL;
5992 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
5993 *loc = h;
5994 DECL_HAS_DEBUG_ARGS_P (from) = 1;
5995 return &h->to;
5998 /* Hashing of types so that we don't make duplicates.
5999 The entry point is `type_hash_canon'. */
6001 /* Generate the default hash code for TYPE. This is designed for
6002 speed, rather than maximum entropy. */
6004 hashval_t
6005 type_hash_canon_hash (tree type)
6007 inchash::hash hstate;
6009 hstate.add_int (TREE_CODE (type));
6011 if (TREE_TYPE (type))
6012 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
6014 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
6015 /* Just the identifier is adequate to distinguish. */
6016 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
6018 switch (TREE_CODE (type))
6020 case METHOD_TYPE:
6021 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
6022 /* FALLTHROUGH. */
6023 case FUNCTION_TYPE:
6024 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6025 if (TREE_VALUE (t) != error_mark_node)
6026 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
6027 break;
6029 case OFFSET_TYPE:
6030 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
6031 break;
6033 case ARRAY_TYPE:
6035 if (TYPE_DOMAIN (type))
6036 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
6037 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
6039 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
6040 hstate.add_object (typeless);
6043 break;
6045 case INTEGER_TYPE:
6047 tree t = TYPE_MAX_VALUE (type);
6048 if (!t)
6049 t = TYPE_MIN_VALUE (type);
6050 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
6051 hstate.add_object (TREE_INT_CST_ELT (t, i));
6052 break;
6055 case REAL_TYPE:
6056 case FIXED_POINT_TYPE:
6058 unsigned prec = TYPE_PRECISION (type);
6059 hstate.add_object (prec);
6060 break;
6063 case VECTOR_TYPE:
6064 hstate.add_poly_int (TYPE_VECTOR_SUBPARTS (type));
6065 break;
6067 default:
6068 break;
6071 return hstate.end ();
6074 /* These are the Hashtable callback functions. */
6076 /* Returns true iff the types are equivalent. */
6078 bool
6079 type_cache_hasher::equal (type_hash *a, type_hash *b)
6081 /* First test the things that are the same for all types. */
6082 if (a->hash != b->hash
6083 || TREE_CODE (a->type) != TREE_CODE (b->type)
6084 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6085 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6086 TYPE_ATTRIBUTES (b->type))
6087 || (TREE_CODE (a->type) != COMPLEX_TYPE
6088 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6089 return 0;
6091 /* Be careful about comparing arrays before and after the element type
6092 has been completed; don't compare TYPE_ALIGN unless both types are
6093 complete. */
6094 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6095 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6096 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6097 return 0;
6099 switch (TREE_CODE (a->type))
6101 case VOID_TYPE:
6102 case OPAQUE_TYPE:
6103 case COMPLEX_TYPE:
6104 case POINTER_TYPE:
6105 case REFERENCE_TYPE:
6106 case NULLPTR_TYPE:
6107 return 1;
6109 case VECTOR_TYPE:
6110 return known_eq (TYPE_VECTOR_SUBPARTS (a->type),
6111 TYPE_VECTOR_SUBPARTS (b->type));
6113 case ENUMERAL_TYPE:
6114 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6115 && !(TYPE_VALUES (a->type)
6116 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6117 && TYPE_VALUES (b->type)
6118 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6119 && type_list_equal (TYPE_VALUES (a->type),
6120 TYPE_VALUES (b->type))))
6121 return 0;
6123 /* fall through */
6125 case INTEGER_TYPE:
6126 case REAL_TYPE:
6127 case BOOLEAN_TYPE:
6128 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6129 return false;
6130 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6131 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6132 TYPE_MAX_VALUE (b->type)))
6133 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6134 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6135 TYPE_MIN_VALUE (b->type))));
6137 case FIXED_POINT_TYPE:
6138 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6140 case OFFSET_TYPE:
6141 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6143 case METHOD_TYPE:
6144 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6145 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6146 || (TYPE_ARG_TYPES (a->type)
6147 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6148 && TYPE_ARG_TYPES (b->type)
6149 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6150 && type_list_equal (TYPE_ARG_TYPES (a->type),
6151 TYPE_ARG_TYPES (b->type)))))
6152 break;
6153 return 0;
6154 case ARRAY_TYPE:
6155 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6156 where the flag should be inherited from the element type
6157 and can change after ARRAY_TYPEs are created; on non-aggregates
6158 compare it and hash it, scalars will never have that flag set
6159 and we need to differentiate between arrays created by different
6160 front-ends or middle-end created arrays. */
6161 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
6162 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
6163 || (TYPE_TYPELESS_STORAGE (a->type)
6164 == TYPE_TYPELESS_STORAGE (b->type))));
6166 case RECORD_TYPE:
6167 case UNION_TYPE:
6168 case QUAL_UNION_TYPE:
6169 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6170 || (TYPE_FIELDS (a->type)
6171 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6172 && TYPE_FIELDS (b->type)
6173 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6174 && type_list_equal (TYPE_FIELDS (a->type),
6175 TYPE_FIELDS (b->type))));
6177 case FUNCTION_TYPE:
6178 if ((TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6179 && (TYPE_NO_NAMED_ARGS_STDARG_P (a->type)
6180 == TYPE_NO_NAMED_ARGS_STDARG_P (b->type)))
6181 || (TYPE_ARG_TYPES (a->type)
6182 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6183 && TYPE_ARG_TYPES (b->type)
6184 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6185 && type_list_equal (TYPE_ARG_TYPES (a->type),
6186 TYPE_ARG_TYPES (b->type))))
6187 break;
6188 return 0;
6190 default:
6191 return 0;
6194 if (lang_hooks.types.type_hash_eq != NULL)
6195 return lang_hooks.types.type_hash_eq (a->type, b->type);
6197 return 1;
6200 /* Given TYPE, and HASHCODE its hash code, return the canonical
6201 object for an identical type if one already exists.
6202 Otherwise, return TYPE, and record it as the canonical object.
6204 To use this function, first create a type of the sort you want.
6205 Then compute its hash code from the fields of the type that
6206 make it different from other similar types.
6207 Then call this function and use the value. */
6209 tree
6210 type_hash_canon (unsigned int hashcode, tree type)
6212 type_hash in;
6213 type_hash **loc;
6215 /* The hash table only contains main variants, so ensure that's what we're
6216 being passed. */
6217 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6219 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6220 must call that routine before comparing TYPE_ALIGNs. */
6221 layout_type (type);
6223 in.hash = hashcode;
6224 in.type = type;
6226 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6227 if (*loc)
6229 tree t1 = ((type_hash *) *loc)->type;
6230 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1
6231 && t1 != type);
6232 if (TYPE_UID (type) + 1 == next_type_uid)
6233 --next_type_uid;
6234 /* Free also min/max values and the cache for integer
6235 types. This can't be done in free_node, as LTO frees
6236 those on its own. */
6237 if (TREE_CODE (type) == INTEGER_TYPE)
6239 if (TYPE_MIN_VALUE (type)
6240 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
6242 /* Zero is always in TYPE_CACHED_VALUES. */
6243 if (! TYPE_UNSIGNED (type))
6244 int_cst_hash_table->remove_elt (TYPE_MIN_VALUE (type));
6245 ggc_free (TYPE_MIN_VALUE (type));
6247 if (TYPE_MAX_VALUE (type)
6248 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
6250 int_cst_hash_table->remove_elt (TYPE_MAX_VALUE (type));
6251 ggc_free (TYPE_MAX_VALUE (type));
6253 if (TYPE_CACHED_VALUES_P (type))
6254 ggc_free (TYPE_CACHED_VALUES (type));
6256 free_node (type);
6257 return t1;
6259 else
6261 struct type_hash *h;
6263 h = ggc_alloc<type_hash> ();
6264 h->hash = hashcode;
6265 h->type = type;
6266 *loc = h;
6268 return type;
6272 static void
6273 print_type_hash_statistics (void)
6275 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6276 (long) type_hash_table->size (),
6277 (long) type_hash_table->elements (),
6278 type_hash_table->collisions ());
6281 /* Given two lists of types
6282 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6283 return 1 if the lists contain the same types in the same order.
6284 Also, the TREE_PURPOSEs must match. */
6286 bool
6287 type_list_equal (const_tree l1, const_tree l2)
6289 const_tree t1, t2;
6291 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6292 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6293 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6294 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6295 && (TREE_TYPE (TREE_PURPOSE (t1))
6296 == TREE_TYPE (TREE_PURPOSE (t2))))))
6297 return false;
6299 return t1 == t2;
6302 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6303 given by TYPE. If the argument list accepts variable arguments,
6304 then this function counts only the ordinary arguments. */
6307 type_num_arguments (const_tree fntype)
6309 int i = 0;
6311 for (tree t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
6312 /* If the function does not take a variable number of arguments,
6313 the last element in the list will have type `void'. */
6314 if (VOID_TYPE_P (TREE_VALUE (t)))
6315 break;
6316 else
6317 ++i;
6319 return i;
6322 /* Return the type of the function TYPE's argument ARGNO if known.
6323 For vararg function's where ARGNO refers to one of the variadic
6324 arguments return null. Otherwise, return a void_type_node for
6325 out-of-bounds ARGNO. */
6327 tree
6328 type_argument_type (const_tree fntype, unsigned argno)
6330 /* Treat zero the same as an out-of-bounds argument number. */
6331 if (!argno)
6332 return void_type_node;
6334 function_args_iterator iter;
6336 tree argtype;
6337 unsigned i = 1;
6338 FOREACH_FUNCTION_ARGS (fntype, argtype, iter)
6340 /* A vararg function's argument list ends in a null. Otherwise,
6341 an ordinary function's argument list ends with void. Return
6342 null if ARGNO refers to a vararg argument, void_type_node if
6343 it's out of bounds, and the formal argument type otherwise. */
6344 if (!argtype)
6345 break;
6347 if (i == argno || VOID_TYPE_P (argtype))
6348 return argtype;
6350 ++i;
6353 return NULL_TREE;
6356 /* Nonzero if integer constants T1 and T2
6357 represent the same constant value. */
6360 tree_int_cst_equal (const_tree t1, const_tree t2)
6362 if (t1 == t2)
6363 return 1;
6365 if (t1 == 0 || t2 == 0)
6366 return 0;
6368 STRIP_ANY_LOCATION_WRAPPER (t1);
6369 STRIP_ANY_LOCATION_WRAPPER (t2);
6371 if (TREE_CODE (t1) == INTEGER_CST
6372 && TREE_CODE (t2) == INTEGER_CST
6373 && wi::to_widest (t1) == wi::to_widest (t2))
6374 return 1;
6376 return 0;
6379 /* Return true if T is an INTEGER_CST whose numerical value (extended
6380 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6382 bool
6383 tree_fits_shwi_p (const_tree t)
6385 return (t != NULL_TREE
6386 && TREE_CODE (t) == INTEGER_CST
6387 && wi::fits_shwi_p (wi::to_widest (t)));
6390 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6391 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
6393 bool
6394 tree_fits_poly_int64_p (const_tree t)
6396 if (t == NULL_TREE)
6397 return false;
6398 if (POLY_INT_CST_P (t))
6400 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6401 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
6402 return false;
6403 return true;
6405 return (TREE_CODE (t) == INTEGER_CST
6406 && wi::fits_shwi_p (wi::to_widest (t)));
6409 /* Return true if T is an INTEGER_CST whose numerical value (extended
6410 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6412 bool
6413 tree_fits_uhwi_p (const_tree t)
6415 return (t != NULL_TREE
6416 && TREE_CODE (t) == INTEGER_CST
6417 && wi::fits_uhwi_p (wi::to_widest (t)));
6420 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6421 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
6423 bool
6424 tree_fits_poly_uint64_p (const_tree t)
6426 if (t == NULL_TREE)
6427 return false;
6428 if (POLY_INT_CST_P (t))
6430 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
6431 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
6432 return false;
6433 return true;
6435 return (TREE_CODE (t) == INTEGER_CST
6436 && wi::fits_uhwi_p (wi::to_widest (t)));
6439 /* T is an INTEGER_CST whose numerical value (extended according to
6440 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6441 HOST_WIDE_INT. */
6443 HOST_WIDE_INT
6444 tree_to_shwi (const_tree t)
6446 gcc_assert (tree_fits_shwi_p (t));
6447 return TREE_INT_CST_LOW (t);
6450 /* T is an INTEGER_CST whose numerical value (extended according to
6451 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6452 HOST_WIDE_INT. */
6454 unsigned HOST_WIDE_INT
6455 tree_to_uhwi (const_tree t)
6457 gcc_assert (tree_fits_uhwi_p (t));
6458 return TREE_INT_CST_LOW (t);
6461 /* Return the most significant (sign) bit of T. */
6464 tree_int_cst_sign_bit (const_tree t)
6466 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
6468 return wi::extract_uhwi (wi::to_wide (t), bitno, 1);
6471 /* Return an indication of the sign of the integer constant T.
6472 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6473 Note that -1 will never be returned if T's type is unsigned. */
6476 tree_int_cst_sgn (const_tree t)
6478 if (wi::to_wide (t) == 0)
6479 return 0;
6480 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
6481 return 1;
6482 else if (wi::neg_p (wi::to_wide (t)))
6483 return -1;
6484 else
6485 return 1;
6488 /* Return the minimum number of bits needed to represent VALUE in a
6489 signed or unsigned type, UNSIGNEDP says which. */
6491 unsigned int
6492 tree_int_cst_min_precision (tree value, signop sgn)
6494 /* If the value is negative, compute its negative minus 1. The latter
6495 adjustment is because the absolute value of the largest negative value
6496 is one larger than the largest positive value. This is equivalent to
6497 a bit-wise negation, so use that operation instead. */
6499 if (tree_int_cst_sgn (value) < 0)
6500 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
6502 /* Return the number of bits needed, taking into account the fact
6503 that we need one more bit for a signed than unsigned type.
6504 If value is 0 or -1, the minimum precision is 1 no matter
6505 whether unsignedp is true or false. */
6507 if (integer_zerop (value))
6508 return 1;
6509 else
6510 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
6513 /* Return truthvalue of whether T1 is the same tree structure as T2.
6514 Return 1 if they are the same.
6515 Return 0 if they are understandably different.
6516 Return -1 if either contains tree structure not understood by
6517 this function. */
6520 simple_cst_equal (const_tree t1, const_tree t2)
6522 enum tree_code code1, code2;
6523 int cmp;
6524 int i;
6526 if (t1 == t2)
6527 return 1;
6528 if (t1 == 0 || t2 == 0)
6529 return 0;
6531 /* For location wrappers to be the same, they must be at the same
6532 source location (and wrap the same thing). */
6533 if (location_wrapper_p (t1) && location_wrapper_p (t2))
6535 if (EXPR_LOCATION (t1) != EXPR_LOCATION (t2))
6536 return 0;
6537 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6540 code1 = TREE_CODE (t1);
6541 code2 = TREE_CODE (t2);
6543 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
6545 if (CONVERT_EXPR_CODE_P (code2)
6546 || code2 == NON_LVALUE_EXPR)
6547 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6548 else
6549 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
6552 else if (CONVERT_EXPR_CODE_P (code2)
6553 || code2 == NON_LVALUE_EXPR)
6554 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
6556 if (code1 != code2)
6557 return 0;
6559 switch (code1)
6561 case INTEGER_CST:
6562 return wi::to_widest (t1) == wi::to_widest (t2);
6564 case REAL_CST:
6565 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
6567 case FIXED_CST:
6568 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
6570 case STRING_CST:
6571 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
6572 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
6573 TREE_STRING_LENGTH (t1)));
6575 case CONSTRUCTOR:
6577 unsigned HOST_WIDE_INT idx;
6578 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
6579 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
6581 if (vec_safe_length (v1) != vec_safe_length (v2))
6582 return false;
6584 for (idx = 0; idx < vec_safe_length (v1); ++idx)
6585 /* ??? Should we handle also fields here? */
6586 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
6587 return false;
6588 return true;
6591 case SAVE_EXPR:
6592 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6594 case CALL_EXPR:
6595 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
6596 if (cmp <= 0)
6597 return cmp;
6598 if (call_expr_nargs (t1) != call_expr_nargs (t2))
6599 return 0;
6601 const_tree arg1, arg2;
6602 const_call_expr_arg_iterator iter1, iter2;
6603 for (arg1 = first_const_call_expr_arg (t1, &iter1),
6604 arg2 = first_const_call_expr_arg (t2, &iter2);
6605 arg1 && arg2;
6606 arg1 = next_const_call_expr_arg (&iter1),
6607 arg2 = next_const_call_expr_arg (&iter2))
6609 cmp = simple_cst_equal (arg1, arg2);
6610 if (cmp <= 0)
6611 return cmp;
6613 return arg1 == arg2;
6616 case TARGET_EXPR:
6617 /* Special case: if either target is an unallocated VAR_DECL,
6618 it means that it's going to be unified with whatever the
6619 TARGET_EXPR is really supposed to initialize, so treat it
6620 as being equivalent to anything. */
6621 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
6622 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
6623 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
6624 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
6625 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
6626 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
6627 cmp = 1;
6628 else
6629 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6631 if (cmp <= 0)
6632 return cmp;
6634 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
6636 case WITH_CLEANUP_EXPR:
6637 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6638 if (cmp <= 0)
6639 return cmp;
6641 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
6643 case COMPONENT_REF:
6644 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
6645 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
6647 return 0;
6649 case VAR_DECL:
6650 case PARM_DECL:
6651 case CONST_DECL:
6652 case FUNCTION_DECL:
6653 return 0;
6655 default:
6656 if (POLY_INT_CST_P (t1))
6657 /* A false return means maybe_ne rather than known_ne. */
6658 return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
6659 TYPE_SIGN (TREE_TYPE (t1))),
6660 poly_widest_int::from (poly_int_cst_value (t2),
6661 TYPE_SIGN (TREE_TYPE (t2))));
6662 break;
6665 /* This general rule works for most tree codes. All exceptions should be
6666 handled above. If this is a language-specific tree code, we can't
6667 trust what might be in the operand, so say we don't know
6668 the situation. */
6669 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
6670 return -1;
6672 switch (TREE_CODE_CLASS (code1))
6674 case tcc_unary:
6675 case tcc_binary:
6676 case tcc_comparison:
6677 case tcc_expression:
6678 case tcc_reference:
6679 case tcc_statement:
6680 cmp = 1;
6681 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
6683 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
6684 if (cmp <= 0)
6685 return cmp;
6688 return cmp;
6690 default:
6691 return -1;
6695 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
6696 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
6697 than U, respectively. */
6700 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
6702 if (tree_int_cst_sgn (t) < 0)
6703 return -1;
6704 else if (!tree_fits_uhwi_p (t))
6705 return 1;
6706 else if (TREE_INT_CST_LOW (t) == u)
6707 return 0;
6708 else if (TREE_INT_CST_LOW (t) < u)
6709 return -1;
6710 else
6711 return 1;
6714 /* Return true if SIZE represents a constant size that is in bounds of
6715 what the middle-end and the backend accepts (covering not more than
6716 half of the address-space).
6717 When PERR is non-null, set *PERR on failure to the description of
6718 why SIZE is not valid. */
6720 bool
6721 valid_constant_size_p (const_tree size, cst_size_error *perr /* = NULL */)
6723 if (POLY_INT_CST_P (size))
6725 if (TREE_OVERFLOW (size))
6726 return false;
6727 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
6728 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
6729 return false;
6730 return true;
6733 cst_size_error error;
6734 if (!perr)
6735 perr = &error;
6737 if (TREE_CODE (size) != INTEGER_CST)
6739 *perr = cst_size_not_constant;
6740 return false;
6743 if (TREE_OVERFLOW_P (size))
6745 *perr = cst_size_overflow;
6746 return false;
6749 if (tree_int_cst_sgn (size) < 0)
6751 *perr = cst_size_negative;
6752 return false;
6754 if (!tree_fits_uhwi_p (size)
6755 || (wi::to_widest (TYPE_MAX_VALUE (sizetype))
6756 < wi::to_widest (size) * 2))
6758 *perr = cst_size_too_big;
6759 return false;
6762 return true;
6765 /* Return the precision of the type, or for a complex or vector type the
6766 precision of the type of its elements. */
6768 unsigned int
6769 element_precision (const_tree type)
6771 if (!TYPE_P (type))
6772 type = TREE_TYPE (type);
6773 enum tree_code code = TREE_CODE (type);
6774 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
6775 type = TREE_TYPE (type);
6777 return TYPE_PRECISION (type);
6780 /* Return true if CODE represents an associative tree code. Otherwise
6781 return false. */
6782 bool
6783 associative_tree_code (enum tree_code code)
6785 switch (code)
6787 case BIT_IOR_EXPR:
6788 case BIT_AND_EXPR:
6789 case BIT_XOR_EXPR:
6790 case PLUS_EXPR:
6791 case MULT_EXPR:
6792 case MIN_EXPR:
6793 case MAX_EXPR:
6794 return true;
6796 default:
6797 break;
6799 return false;
6802 /* Return true if CODE represents a commutative tree code. Otherwise
6803 return false. */
6804 bool
6805 commutative_tree_code (enum tree_code code)
6807 switch (code)
6809 case PLUS_EXPR:
6810 case MULT_EXPR:
6811 case MULT_HIGHPART_EXPR:
6812 case MIN_EXPR:
6813 case MAX_EXPR:
6814 case BIT_IOR_EXPR:
6815 case BIT_XOR_EXPR:
6816 case BIT_AND_EXPR:
6817 case NE_EXPR:
6818 case EQ_EXPR:
6819 case UNORDERED_EXPR:
6820 case ORDERED_EXPR:
6821 case UNEQ_EXPR:
6822 case LTGT_EXPR:
6823 case TRUTH_AND_EXPR:
6824 case TRUTH_XOR_EXPR:
6825 case TRUTH_OR_EXPR:
6826 case WIDEN_MULT_EXPR:
6827 case VEC_WIDEN_MULT_HI_EXPR:
6828 case VEC_WIDEN_MULT_LO_EXPR:
6829 case VEC_WIDEN_MULT_EVEN_EXPR:
6830 case VEC_WIDEN_MULT_ODD_EXPR:
6831 return true;
6833 default:
6834 break;
6836 return false;
6839 /* Return true if CODE represents a ternary tree code for which the
6840 first two operands are commutative. Otherwise return false. */
6841 bool
6842 commutative_ternary_tree_code (enum tree_code code)
6844 switch (code)
6846 case WIDEN_MULT_PLUS_EXPR:
6847 case WIDEN_MULT_MINUS_EXPR:
6848 case DOT_PROD_EXPR:
6849 return true;
6851 default:
6852 break;
6854 return false;
6857 /* Returns true if CODE can overflow. */
6859 bool
6860 operation_can_overflow (enum tree_code code)
6862 switch (code)
6864 case PLUS_EXPR:
6865 case MINUS_EXPR:
6866 case MULT_EXPR:
6867 case LSHIFT_EXPR:
6868 /* Can overflow in various ways. */
6869 return true;
6870 case TRUNC_DIV_EXPR:
6871 case EXACT_DIV_EXPR:
6872 case FLOOR_DIV_EXPR:
6873 case CEIL_DIV_EXPR:
6874 /* For INT_MIN / -1. */
6875 return true;
6876 case NEGATE_EXPR:
6877 case ABS_EXPR:
6878 /* For -INT_MIN. */
6879 return true;
6880 default:
6881 /* These operators cannot overflow. */
6882 return false;
6886 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
6887 ftrapv doesn't generate trapping insns for CODE. */
6889 bool
6890 operation_no_trapping_overflow (tree type, enum tree_code code)
6892 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
6894 /* We don't generate instructions that trap on overflow for complex or vector
6895 types. */
6896 if (!INTEGRAL_TYPE_P (type))
6897 return true;
6899 if (!TYPE_OVERFLOW_TRAPS (type))
6900 return true;
6902 switch (code)
6904 case PLUS_EXPR:
6905 case MINUS_EXPR:
6906 case MULT_EXPR:
6907 case NEGATE_EXPR:
6908 case ABS_EXPR:
6909 /* These operators can overflow, and -ftrapv generates trapping code for
6910 these. */
6911 return false;
6912 case TRUNC_DIV_EXPR:
6913 case EXACT_DIV_EXPR:
6914 case FLOOR_DIV_EXPR:
6915 case CEIL_DIV_EXPR:
6916 case LSHIFT_EXPR:
6917 /* These operators can overflow, but -ftrapv does not generate trapping
6918 code for these. */
6919 return true;
6920 default:
6921 /* These operators cannot overflow. */
6922 return true;
6926 /* Constructors for pointer, array and function types.
6927 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
6928 constructed by language-dependent code, not here.) */
6930 /* Construct, lay out and return the type of pointers to TO_TYPE with
6931 mode MODE. If MODE is VOIDmode, a pointer mode for the address
6932 space of TO_TYPE will be picked. If CAN_ALIAS_ALL is TRUE,
6933 indicate this type can reference all of memory. If such a type has
6934 already been constructed, reuse it. */
6936 tree
6937 build_pointer_type_for_mode (tree to_type, machine_mode mode,
6938 bool can_alias_all)
6940 tree t;
6941 bool could_alias = can_alias_all;
6943 if (to_type == error_mark_node)
6944 return error_mark_node;
6946 if (mode == VOIDmode)
6948 addr_space_t as = TYPE_ADDR_SPACE (to_type);
6949 mode = targetm.addr_space.pointer_mode (as);
6952 /* If the pointed-to type has the may_alias attribute set, force
6953 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6954 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
6955 can_alias_all = true;
6957 /* In some cases, languages will have things that aren't a POINTER_TYPE
6958 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
6959 In that case, return that type without regard to the rest of our
6960 operands.
6962 ??? This is a kludge, but consistent with the way this function has
6963 always operated and there doesn't seem to be a good way to avoid this
6964 at the moment. */
6965 if (TYPE_POINTER_TO (to_type) != 0
6966 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
6967 return TYPE_POINTER_TO (to_type);
6969 /* First, if we already have a type for pointers to TO_TYPE and it's
6970 the proper mode, use it. */
6971 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
6972 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
6973 return t;
6975 t = make_node (POINTER_TYPE);
6977 TREE_TYPE (t) = to_type;
6978 SET_TYPE_MODE (t, mode);
6979 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
6980 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
6981 TYPE_POINTER_TO (to_type) = t;
6983 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6984 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
6985 SET_TYPE_STRUCTURAL_EQUALITY (t);
6986 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
6987 TYPE_CANONICAL (t)
6988 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
6989 mode, false);
6991 /* Lay out the type. This function has many callers that are concerned
6992 with expression-construction, and this simplifies them all. */
6993 layout_type (t);
6995 return t;
6998 /* By default build pointers in ptr_mode. */
7000 tree
7001 build_pointer_type (tree to_type)
7003 return build_pointer_type_for_mode (to_type, VOIDmode, false);
7006 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7008 tree
7009 build_reference_type_for_mode (tree to_type, machine_mode mode,
7010 bool can_alias_all)
7012 tree t;
7013 bool could_alias = can_alias_all;
7015 if (to_type == error_mark_node)
7016 return error_mark_node;
7018 if (mode == VOIDmode)
7020 addr_space_t as = TYPE_ADDR_SPACE (to_type);
7021 mode = targetm.addr_space.pointer_mode (as);
7024 /* If the pointed-to type has the may_alias attribute set, force
7025 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7026 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7027 can_alias_all = true;
7029 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7030 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7031 In that case, return that type without regard to the rest of our
7032 operands.
7034 ??? This is a kludge, but consistent with the way this function has
7035 always operated and there doesn't seem to be a good way to avoid this
7036 at the moment. */
7037 if (TYPE_REFERENCE_TO (to_type) != 0
7038 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7039 return TYPE_REFERENCE_TO (to_type);
7041 /* First, if we already have a type for pointers to TO_TYPE and it's
7042 the proper mode, use it. */
7043 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7044 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7045 return t;
7047 t = make_node (REFERENCE_TYPE);
7049 TREE_TYPE (t) = to_type;
7050 SET_TYPE_MODE (t, mode);
7051 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7052 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7053 TYPE_REFERENCE_TO (to_type) = t;
7055 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7056 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7057 SET_TYPE_STRUCTURAL_EQUALITY (t);
7058 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7059 TYPE_CANONICAL (t)
7060 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7061 mode, false);
7063 layout_type (t);
7065 return t;
7069 /* Build the node for the type of references-to-TO_TYPE by default
7070 in ptr_mode. */
7072 tree
7073 build_reference_type (tree to_type)
7075 return build_reference_type_for_mode (to_type, VOIDmode, false);
7078 #define MAX_INT_CACHED_PREC \
7079 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7080 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7082 static void
7083 clear_nonstandard_integer_type_cache (void)
7085 for (size_t i = 0 ; i < 2 * MAX_INT_CACHED_PREC + 2 ; i++)
7087 nonstandard_integer_type_cache[i] = NULL;
7091 /* Builds a signed or unsigned integer type of precision PRECISION.
7092 Used for C bitfields whose precision does not match that of
7093 built-in target types. */
7094 tree
7095 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7096 int unsignedp)
7098 tree itype, ret;
7100 if (unsignedp)
7101 unsignedp = MAX_INT_CACHED_PREC + 1;
7103 if (precision <= MAX_INT_CACHED_PREC)
7105 itype = nonstandard_integer_type_cache[precision + unsignedp];
7106 if (itype)
7107 return itype;
7110 itype = make_node (INTEGER_TYPE);
7111 TYPE_PRECISION (itype) = precision;
7113 if (unsignedp)
7114 fixup_unsigned_type (itype);
7115 else
7116 fixup_signed_type (itype);
7118 inchash::hash hstate;
7119 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7120 ret = type_hash_canon (hstate.end (), itype);
7121 if (precision <= MAX_INT_CACHED_PREC)
7122 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7124 return ret;
7127 #define MAX_BOOL_CACHED_PREC \
7128 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7129 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
7131 /* Builds a boolean type of precision PRECISION.
7132 Used for boolean vectors to choose proper vector element size. */
7133 tree
7134 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
7136 tree type;
7138 if (precision <= MAX_BOOL_CACHED_PREC)
7140 type = nonstandard_boolean_type_cache[precision];
7141 if (type)
7142 return type;
7145 type = make_node (BOOLEAN_TYPE);
7146 TYPE_PRECISION (type) = precision;
7147 fixup_signed_type (type);
7149 if (precision <= MAX_INT_CACHED_PREC)
7150 nonstandard_boolean_type_cache[precision] = type;
7152 return type;
7155 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7156 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7157 is true, reuse such a type that has already been constructed. */
7159 static tree
7160 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7162 tree itype = make_node (INTEGER_TYPE);
7164 TREE_TYPE (itype) = type;
7166 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7167 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7169 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7170 SET_TYPE_MODE (itype, TYPE_MODE (type));
7171 TYPE_SIZE (itype) = TYPE_SIZE (type);
7172 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7173 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
7174 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7175 SET_TYPE_WARN_IF_NOT_ALIGN (itype, TYPE_WARN_IF_NOT_ALIGN (type));
7177 if (!shared)
7178 return itype;
7180 if ((TYPE_MIN_VALUE (itype)
7181 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7182 || (TYPE_MAX_VALUE (itype)
7183 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7185 /* Since we cannot reliably merge this type, we need to compare it using
7186 structural equality checks. */
7187 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7188 return itype;
7191 hashval_t hash = type_hash_canon_hash (itype);
7192 itype = type_hash_canon (hash, itype);
7194 return itype;
7197 /* Wrapper around build_range_type_1 with SHARED set to true. */
7199 tree
7200 build_range_type (tree type, tree lowval, tree highval)
7202 return build_range_type_1 (type, lowval, highval, true);
7205 /* Wrapper around build_range_type_1 with SHARED set to false. */
7207 tree
7208 build_nonshared_range_type (tree type, tree lowval, tree highval)
7210 return build_range_type_1 (type, lowval, highval, false);
7213 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7214 MAXVAL should be the maximum value in the domain
7215 (one less than the length of the array).
7217 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7218 We don't enforce this limit, that is up to caller (e.g. language front end).
7219 The limit exists because the result is a signed type and we don't handle
7220 sizes that use more than one HOST_WIDE_INT. */
7222 tree
7223 build_index_type (tree maxval)
7225 return build_range_type (sizetype, size_zero_node, maxval);
7228 /* Return true if the debug information for TYPE, a subtype, should be emitted
7229 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7230 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7231 debug info and doesn't reflect the source code. */
7233 bool
7234 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7236 tree base_type = TREE_TYPE (type), low, high;
7238 /* Subrange types have a base type which is an integral type. */
7239 if (!INTEGRAL_TYPE_P (base_type))
7240 return false;
7242 /* Get the real bounds of the subtype. */
7243 if (lang_hooks.types.get_subrange_bounds)
7244 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7245 else
7247 low = TYPE_MIN_VALUE (type);
7248 high = TYPE_MAX_VALUE (type);
7251 /* If the type and its base type have the same representation and the same
7252 name, then the type is not a subrange but a copy of the base type. */
7253 if ((TREE_CODE (base_type) == INTEGER_TYPE
7254 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7255 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7256 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7257 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7258 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7259 return false;
7261 if (lowval)
7262 *lowval = low;
7263 if (highval)
7264 *highval = high;
7265 return true;
7268 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7269 and number of elements specified by the range of values of INDEX_TYPE.
7270 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7271 If SHARED is true, reuse such a type that has already been constructed.
7272 If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type. */
7274 tree
7275 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
7276 bool shared, bool set_canonical)
7278 tree t;
7280 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7282 error ("arrays of functions are not meaningful");
7283 elt_type = integer_type_node;
7286 t = make_node (ARRAY_TYPE);
7287 TREE_TYPE (t) = elt_type;
7288 TYPE_DOMAIN (t) = index_type;
7289 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7290 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
7291 layout_type (t);
7293 if (shared)
7295 hashval_t hash = type_hash_canon_hash (t);
7296 t = type_hash_canon (hash, t);
7299 if (TYPE_CANONICAL (t) == t && set_canonical)
7301 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7302 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
7303 || in_lto_p)
7304 SET_TYPE_STRUCTURAL_EQUALITY (t);
7305 else if (TYPE_CANONICAL (elt_type) != elt_type
7306 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7307 TYPE_CANONICAL (t)
7308 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7309 index_type
7310 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7311 typeless_storage, shared, set_canonical);
7314 return t;
7317 /* Wrapper around build_array_type_1 with SHARED set to true. */
7319 tree
7320 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
7322 return
7323 build_array_type_1 (elt_type, index_type, typeless_storage, true, true);
7326 /* Wrapper around build_array_type_1 with SHARED set to false. */
7328 tree
7329 build_nonshared_array_type (tree elt_type, tree index_type)
7331 return build_array_type_1 (elt_type, index_type, false, false, true);
7334 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7335 sizetype. */
7337 tree
7338 build_array_type_nelts (tree elt_type, poly_uint64 nelts)
7340 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7343 /* Recursively examines the array elements of TYPE, until a non-array
7344 element type is found. */
7346 tree
7347 strip_array_types (tree type)
7349 while (TREE_CODE (type) == ARRAY_TYPE)
7350 type = TREE_TYPE (type);
7352 return type;
7355 /* Computes the canonical argument types from the argument type list
7356 ARGTYPES.
7358 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7359 on entry to this function, or if any of the ARGTYPES are
7360 structural.
7362 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7363 true on entry to this function, or if any of the ARGTYPES are
7364 non-canonical.
7366 Returns a canonical argument list, which may be ARGTYPES when the
7367 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7368 true) or would not differ from ARGTYPES. */
7370 static tree
7371 maybe_canonicalize_argtypes (tree argtypes,
7372 bool *any_structural_p,
7373 bool *any_noncanonical_p)
7375 tree arg;
7376 bool any_noncanonical_argtypes_p = false;
7378 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7380 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7381 /* Fail gracefully by stating that the type is structural. */
7382 *any_structural_p = true;
7383 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7384 *any_structural_p = true;
7385 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7386 || TREE_PURPOSE (arg))
7387 /* If the argument has a default argument, we consider it
7388 non-canonical even though the type itself is canonical.
7389 That way, different variants of function and method types
7390 with default arguments will all point to the variant with
7391 no defaults as their canonical type. */
7392 any_noncanonical_argtypes_p = true;
7395 if (*any_structural_p)
7396 return argtypes;
7398 if (any_noncanonical_argtypes_p)
7400 /* Build the canonical list of argument types. */
7401 tree canon_argtypes = NULL_TREE;
7402 bool is_void = false;
7404 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7406 if (arg == void_list_node)
7407 is_void = true;
7408 else
7409 canon_argtypes = tree_cons (NULL_TREE,
7410 TYPE_CANONICAL (TREE_VALUE (arg)),
7411 canon_argtypes);
7414 canon_argtypes = nreverse (canon_argtypes);
7415 if (is_void)
7416 canon_argtypes = chainon (canon_argtypes, void_list_node);
7418 /* There is a non-canonical type. */
7419 *any_noncanonical_p = true;
7420 return canon_argtypes;
7423 /* The canonical argument types are the same as ARGTYPES. */
7424 return argtypes;
7427 /* Construct, lay out and return
7428 the type of functions returning type VALUE_TYPE
7429 given arguments of types ARG_TYPES.
7430 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7431 are data type nodes for the arguments of the function.
7432 NO_NAMED_ARGS_STDARG_P is true if this is a prototyped
7433 variable-arguments function with (...) prototype (no named arguments).
7434 If such a type has already been constructed, reuse it. */
7436 tree
7437 build_function_type (tree value_type, tree arg_types,
7438 bool no_named_args_stdarg_p)
7440 tree t;
7441 inchash::hash hstate;
7442 bool any_structural_p, any_noncanonical_p;
7443 tree canon_argtypes;
7445 gcc_assert (arg_types != error_mark_node);
7447 if (TREE_CODE (value_type) == FUNCTION_TYPE)
7449 error ("function return type cannot be function");
7450 value_type = integer_type_node;
7453 /* Make a node of the sort we want. */
7454 t = make_node (FUNCTION_TYPE);
7455 TREE_TYPE (t) = value_type;
7456 TYPE_ARG_TYPES (t) = arg_types;
7457 if (no_named_args_stdarg_p)
7459 gcc_assert (arg_types == NULL_TREE);
7460 TYPE_NO_NAMED_ARGS_STDARG_P (t) = 1;
7463 /* If we already have such a type, use the old one. */
7464 hashval_t hash = type_hash_canon_hash (t);
7465 t = type_hash_canon (hash, t);
7467 /* Set up the canonical type. */
7468 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
7469 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
7470 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
7471 &any_structural_p,
7472 &any_noncanonical_p);
7473 if (any_structural_p)
7474 SET_TYPE_STRUCTURAL_EQUALITY (t);
7475 else if (any_noncanonical_p)
7476 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
7477 canon_argtypes);
7479 if (!COMPLETE_TYPE_P (t))
7480 layout_type (t);
7481 return t;
7484 /* Build a function type. The RETURN_TYPE is the type returned by the
7485 function. If VAARGS is set, no void_type_node is appended to the
7486 list. ARGP must be always be terminated be a NULL_TREE. */
7488 static tree
7489 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
7491 tree t, args, last;
7493 t = va_arg (argp, tree);
7494 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
7495 args = tree_cons (NULL_TREE, t, args);
7497 if (vaargs)
7499 last = args;
7500 if (args != NULL_TREE)
7501 args = nreverse (args);
7502 gcc_assert (last != void_list_node);
7504 else if (args == NULL_TREE)
7505 args = void_list_node;
7506 else
7508 last = args;
7509 args = nreverse (args);
7510 TREE_CHAIN (last) = void_list_node;
7512 args = build_function_type (return_type, args, vaargs && args == NULL_TREE);
7514 return args;
7517 /* Build a function type. The RETURN_TYPE is the type returned by the
7518 function. If additional arguments are provided, they are
7519 additional argument types. The list of argument types must always
7520 be terminated by NULL_TREE. */
7522 tree
7523 build_function_type_list (tree return_type, ...)
7525 tree args;
7526 va_list p;
7528 va_start (p, return_type);
7529 args = build_function_type_list_1 (false, return_type, p);
7530 va_end (p);
7531 return args;
7534 /* Build a variable argument function type. The RETURN_TYPE is the
7535 type returned by the function. If additional arguments are provided,
7536 they are additional argument types. The list of argument types must
7537 always be terminated by NULL_TREE. */
7539 tree
7540 build_varargs_function_type_list (tree return_type, ...)
7542 tree args;
7543 va_list p;
7545 va_start (p, return_type);
7546 args = build_function_type_list_1 (true, return_type, p);
7547 va_end (p);
7549 return args;
7552 /* Build a function type. RETURN_TYPE is the type returned by the
7553 function; VAARGS indicates whether the function takes varargs. The
7554 function takes N named arguments, the types of which are provided in
7555 ARG_TYPES. */
7557 static tree
7558 build_function_type_array_1 (bool vaargs, tree return_type, int n,
7559 tree *arg_types)
7561 int i;
7562 tree t = vaargs ? NULL_TREE : void_list_node;
7564 for (i = n - 1; i >= 0; i--)
7565 t = tree_cons (NULL_TREE, arg_types[i], t);
7567 return build_function_type (return_type, t, vaargs && n == 0);
7570 /* Build a function type. RETURN_TYPE is the type returned by the
7571 function. The function takes N named arguments, the types of which
7572 are provided in ARG_TYPES. */
7574 tree
7575 build_function_type_array (tree return_type, int n, tree *arg_types)
7577 return build_function_type_array_1 (false, return_type, n, arg_types);
7580 /* Build a variable argument function type. RETURN_TYPE is the type
7581 returned by the function. The function takes N named arguments, the
7582 types of which are provided in ARG_TYPES. */
7584 tree
7585 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
7587 return build_function_type_array_1 (true, return_type, n, arg_types);
7590 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
7591 and ARGTYPES (a TREE_LIST) are the return type and arguments types
7592 for the method. An implicit additional parameter (of type
7593 pointer-to-BASETYPE) is added to the ARGTYPES. */
7595 tree
7596 build_method_type_directly (tree basetype,
7597 tree rettype,
7598 tree argtypes)
7600 tree t;
7601 tree ptype;
7602 bool any_structural_p, any_noncanonical_p;
7603 tree canon_argtypes;
7605 /* Make a node of the sort we want. */
7606 t = make_node (METHOD_TYPE);
7608 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7609 TREE_TYPE (t) = rettype;
7610 ptype = build_pointer_type (basetype);
7612 /* The actual arglist for this function includes a "hidden" argument
7613 which is "this". Put it into the list of argument types. */
7614 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
7615 TYPE_ARG_TYPES (t) = argtypes;
7617 /* If we already have such a type, use the old one. */
7618 hashval_t hash = type_hash_canon_hash (t);
7619 t = type_hash_canon (hash, t);
7621 /* Set up the canonical type. */
7622 any_structural_p
7623 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7624 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
7625 any_noncanonical_p
7626 = (TYPE_CANONICAL (basetype) != basetype
7627 || TYPE_CANONICAL (rettype) != rettype);
7628 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
7629 &any_structural_p,
7630 &any_noncanonical_p);
7631 if (any_structural_p)
7632 SET_TYPE_STRUCTURAL_EQUALITY (t);
7633 else if (any_noncanonical_p)
7634 TYPE_CANONICAL (t)
7635 = build_method_type_directly (TYPE_CANONICAL (basetype),
7636 TYPE_CANONICAL (rettype),
7637 canon_argtypes);
7638 if (!COMPLETE_TYPE_P (t))
7639 layout_type (t);
7641 return t;
7644 /* Construct, lay out and return the type of methods belonging to class
7645 BASETYPE and whose arguments and values are described by TYPE.
7646 If that type exists already, reuse it.
7647 TYPE must be a FUNCTION_TYPE node. */
7649 tree
7650 build_method_type (tree basetype, tree type)
7652 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
7654 return build_method_type_directly (basetype,
7655 TREE_TYPE (type),
7656 TYPE_ARG_TYPES (type));
7659 /* Construct, lay out and return the type of offsets to a value
7660 of type TYPE, within an object of type BASETYPE.
7661 If a suitable offset type exists already, reuse it. */
7663 tree
7664 build_offset_type (tree basetype, tree type)
7666 tree t;
7668 /* Make a node of the sort we want. */
7669 t = make_node (OFFSET_TYPE);
7671 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
7672 TREE_TYPE (t) = type;
7674 /* If we already have such a type, use the old one. */
7675 hashval_t hash = type_hash_canon_hash (t);
7676 t = type_hash_canon (hash, t);
7678 if (!COMPLETE_TYPE_P (t))
7679 layout_type (t);
7681 if (TYPE_CANONICAL (t) == t)
7683 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
7684 || TYPE_STRUCTURAL_EQUALITY_P (type))
7685 SET_TYPE_STRUCTURAL_EQUALITY (t);
7686 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
7687 || TYPE_CANONICAL (type) != type)
7688 TYPE_CANONICAL (t)
7689 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
7690 TYPE_CANONICAL (type));
7693 return t;
7696 /* Create a complex type whose components are COMPONENT_TYPE.
7698 If NAMED is true, the type is given a TYPE_NAME. We do not always
7699 do so because this creates a DECL node and thus make the DECL_UIDs
7700 dependent on the type canonicalization hashtable, which is GC-ed,
7701 so the DECL_UIDs would not be stable wrt garbage collection. */
7703 tree
7704 build_complex_type (tree component_type, bool named)
7706 gcc_assert (INTEGRAL_TYPE_P (component_type)
7707 || SCALAR_FLOAT_TYPE_P (component_type)
7708 || FIXED_POINT_TYPE_P (component_type));
7710 /* Make a node of the sort we want. */
7711 tree probe = make_node (COMPLEX_TYPE);
7713 TREE_TYPE (probe) = TYPE_MAIN_VARIANT (component_type);
7715 /* If we already have such a type, use the old one. */
7716 hashval_t hash = type_hash_canon_hash (probe);
7717 tree t = type_hash_canon (hash, probe);
7719 if (t == probe)
7721 /* We created a new type. The hash insertion will have laid
7722 out the type. We need to check the canonicalization and
7723 maybe set the name. */
7724 gcc_checking_assert (COMPLETE_TYPE_P (t)
7725 && !TYPE_NAME (t)
7726 && TYPE_CANONICAL (t) == t);
7728 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t)))
7729 SET_TYPE_STRUCTURAL_EQUALITY (t);
7730 else if (TYPE_CANONICAL (TREE_TYPE (t)) != TREE_TYPE (t))
7731 TYPE_CANONICAL (t)
7732 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t)), named);
7734 /* We need to create a name, since complex is a fundamental type. */
7735 if (named)
7737 const char *name = NULL;
7739 if (TREE_TYPE (t) == char_type_node)
7740 name = "complex char";
7741 else if (TREE_TYPE (t) == signed_char_type_node)
7742 name = "complex signed char";
7743 else if (TREE_TYPE (t) == unsigned_char_type_node)
7744 name = "complex unsigned char";
7745 else if (TREE_TYPE (t) == short_integer_type_node)
7746 name = "complex short int";
7747 else if (TREE_TYPE (t) == short_unsigned_type_node)
7748 name = "complex short unsigned int";
7749 else if (TREE_TYPE (t) == integer_type_node)
7750 name = "complex int";
7751 else if (TREE_TYPE (t) == unsigned_type_node)
7752 name = "complex unsigned int";
7753 else if (TREE_TYPE (t) == long_integer_type_node)
7754 name = "complex long int";
7755 else if (TREE_TYPE (t) == long_unsigned_type_node)
7756 name = "complex long unsigned int";
7757 else if (TREE_TYPE (t) == long_long_integer_type_node)
7758 name = "complex long long int";
7759 else if (TREE_TYPE (t) == long_long_unsigned_type_node)
7760 name = "complex long long unsigned int";
7762 if (name != NULL)
7763 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
7764 get_identifier (name), t);
7768 return build_qualified_type (t, TYPE_QUALS (component_type));
7771 /* If TYPE is a real or complex floating-point type and the target
7772 does not directly support arithmetic on TYPE then return the wider
7773 type to be used for arithmetic on TYPE. Otherwise, return
7774 NULL_TREE. */
7776 tree
7777 excess_precision_type (tree type)
7779 /* The target can give two different responses to the question of
7780 which excess precision mode it would like depending on whether we
7781 are in -fexcess-precision=standard or -fexcess-precision=fast. */
7783 enum excess_precision_type requested_type
7784 = (flag_excess_precision == EXCESS_PRECISION_FAST
7785 ? EXCESS_PRECISION_TYPE_FAST
7786 : (flag_excess_precision == EXCESS_PRECISION_FLOAT16
7787 ? EXCESS_PRECISION_TYPE_FLOAT16 : EXCESS_PRECISION_TYPE_STANDARD));
7789 enum flt_eval_method target_flt_eval_method
7790 = targetm.c.excess_precision (requested_type);
7792 /* The target should not ask for unpredictable float evaluation (though
7793 it might advertise that implicitly the evaluation is unpredictable,
7794 but we don't care about that here, it will have been reported
7795 elsewhere). If it does ask for unpredictable evaluation, we have
7796 nothing to do here. */
7797 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
7799 /* Nothing to do. The target has asked for all types we know about
7800 to be computed with their native precision and range. */
7801 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
7802 return NULL_TREE;
7804 /* The target will promote this type in a target-dependent way, so excess
7805 precision ought to leave it alone. */
7806 if (targetm.promoted_type (type) != NULL_TREE)
7807 return NULL_TREE;
7809 machine_mode float16_type_mode = (float16_type_node
7810 ? TYPE_MODE (float16_type_node)
7811 : VOIDmode);
7812 machine_mode bfloat16_type_mode = (bfloat16_type_node
7813 ? TYPE_MODE (bfloat16_type_node)
7814 : VOIDmode);
7815 machine_mode float_type_mode = TYPE_MODE (float_type_node);
7816 machine_mode double_type_mode = TYPE_MODE (double_type_node);
7818 switch (TREE_CODE (type))
7820 case REAL_TYPE:
7822 machine_mode type_mode = TYPE_MODE (type);
7823 switch (target_flt_eval_method)
7825 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7826 if (type_mode == float16_type_mode
7827 || type_mode == bfloat16_type_mode)
7828 return float_type_node;
7829 break;
7830 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7831 if (type_mode == float16_type_mode
7832 || type_mode == bfloat16_type_mode
7833 || type_mode == float_type_mode)
7834 return double_type_node;
7835 break;
7836 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7837 if (type_mode == float16_type_mode
7838 || type_mode == bfloat16_type_mode
7839 || type_mode == float_type_mode
7840 || type_mode == double_type_mode)
7841 return long_double_type_node;
7842 break;
7843 default:
7844 gcc_unreachable ();
7846 break;
7848 case COMPLEX_TYPE:
7850 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
7851 return NULL_TREE;
7852 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
7853 switch (target_flt_eval_method)
7855 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
7856 if (type_mode == float16_type_mode
7857 || type_mode == bfloat16_type_mode)
7858 return complex_float_type_node;
7859 break;
7860 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
7861 if (type_mode == float16_type_mode
7862 || type_mode == bfloat16_type_mode
7863 || type_mode == float_type_mode)
7864 return complex_double_type_node;
7865 break;
7866 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
7867 if (type_mode == float16_type_mode
7868 || type_mode == bfloat16_type_mode
7869 || type_mode == float_type_mode
7870 || type_mode == double_type_mode)
7871 return complex_long_double_type_node;
7872 break;
7873 default:
7874 gcc_unreachable ();
7876 break;
7878 default:
7879 break;
7882 return NULL_TREE;
7885 /* Return OP, stripped of any conversions to wider types as much as is safe.
7886 Converting the value back to OP's type makes a value equivalent to OP.
7888 If FOR_TYPE is nonzero, we return a value which, if converted to
7889 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
7891 OP must have integer, real or enumeral type. Pointers are not allowed!
7893 There are some cases where the obvious value we could return
7894 would regenerate to OP if converted to OP's type,
7895 but would not extend like OP to wider types.
7896 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
7897 For example, if OP is (unsigned short)(signed char)-1,
7898 we avoid returning (signed char)-1 if FOR_TYPE is int,
7899 even though extending that to an unsigned short would regenerate OP,
7900 since the result of extending (signed char)-1 to (int)
7901 is different from (int) OP. */
7903 tree
7904 get_unwidened (tree op, tree for_type)
7906 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
7907 tree type = TREE_TYPE (op);
7908 unsigned final_prec
7909 = TYPE_PRECISION (for_type != 0 ? for_type : type);
7910 int uns
7911 = (for_type != 0 && for_type != type
7912 && final_prec > TYPE_PRECISION (type)
7913 && TYPE_UNSIGNED (type));
7914 tree win = op;
7916 while (CONVERT_EXPR_P (op))
7918 int bitschange;
7920 /* TYPE_PRECISION on vector types has different meaning
7921 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
7922 so avoid them here. */
7923 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
7924 break;
7926 bitschange = TYPE_PRECISION (TREE_TYPE (op))
7927 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
7929 /* Truncations are many-one so cannot be removed.
7930 Unless we are later going to truncate down even farther. */
7931 if (bitschange < 0
7932 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
7933 break;
7935 /* See what's inside this conversion. If we decide to strip it,
7936 we will set WIN. */
7937 op = TREE_OPERAND (op, 0);
7939 /* If we have not stripped any zero-extensions (uns is 0),
7940 we can strip any kind of extension.
7941 If we have previously stripped a zero-extension,
7942 only zero-extensions can safely be stripped.
7943 Any extension can be stripped if the bits it would produce
7944 are all going to be discarded later by truncating to FOR_TYPE. */
7946 if (bitschange > 0)
7948 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
7949 win = op;
7950 /* TYPE_UNSIGNED says whether this is a zero-extension.
7951 Let's avoid computing it if it does not affect WIN
7952 and if UNS will not be needed again. */
7953 if ((uns
7954 || CONVERT_EXPR_P (op))
7955 && TYPE_UNSIGNED (TREE_TYPE (op)))
7957 uns = 1;
7958 win = op;
7963 /* If we finally reach a constant see if it fits in sth smaller and
7964 in that case convert it. */
7965 if (TREE_CODE (win) == INTEGER_CST)
7967 tree wtype = TREE_TYPE (win);
7968 unsigned prec = wi::min_precision (wi::to_wide (win), TYPE_SIGN (wtype));
7969 if (for_type)
7970 prec = MAX (prec, final_prec);
7971 if (prec < TYPE_PRECISION (wtype))
7973 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
7974 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
7975 win = fold_convert (t, win);
7979 return win;
7982 /* Return OP or a simpler expression for a narrower value
7983 which can be sign-extended or zero-extended to give back OP.
7984 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
7985 or 0 if the value should be sign-extended. */
7987 tree
7988 get_narrower (tree op, int *unsignedp_ptr)
7990 int uns = 0;
7991 int first = 1;
7992 tree win = op;
7993 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
7995 if (TREE_CODE (op) == COMPOUND_EXPR)
7998 op = TREE_OPERAND (op, 1);
7999 while (TREE_CODE (op) == COMPOUND_EXPR);
8000 tree ret = get_narrower (op, unsignedp_ptr);
8001 if (ret == op)
8002 return win;
8003 auto_vec <tree, 16> v;
8004 unsigned int i;
8005 for (op = win; TREE_CODE (op) == COMPOUND_EXPR;
8006 op = TREE_OPERAND (op, 1))
8007 v.safe_push (op);
8008 FOR_EACH_VEC_ELT_REVERSE (v, i, op)
8009 ret = build2_loc (EXPR_LOCATION (op), COMPOUND_EXPR,
8010 TREE_TYPE (ret), TREE_OPERAND (op, 0),
8011 ret);
8012 return ret;
8014 while (TREE_CODE (op) == NOP_EXPR)
8016 int bitschange
8017 = (TYPE_PRECISION (TREE_TYPE (op))
8018 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8020 /* Truncations are many-one so cannot be removed. */
8021 if (bitschange < 0)
8022 break;
8024 /* See what's inside this conversion. If we decide to strip it,
8025 we will set WIN. */
8027 if (bitschange > 0)
8029 op = TREE_OPERAND (op, 0);
8030 /* An extension: the outermost one can be stripped,
8031 but remember whether it is zero or sign extension. */
8032 if (first)
8033 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8034 /* Otherwise, if a sign extension has been stripped,
8035 only sign extensions can now be stripped;
8036 if a zero extension has been stripped, only zero-extensions. */
8037 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8038 break;
8039 first = 0;
8041 else /* bitschange == 0 */
8043 /* A change in nominal type can always be stripped, but we must
8044 preserve the unsignedness. */
8045 if (first)
8046 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8047 first = 0;
8048 op = TREE_OPERAND (op, 0);
8049 /* Keep trying to narrow, but don't assign op to win if it
8050 would turn an integral type into something else. */
8051 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8052 continue;
8055 win = op;
8058 if (TREE_CODE (op) == COMPONENT_REF
8059 /* Since type_for_size always gives an integer type. */
8060 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8061 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8062 /* Ensure field is laid out already. */
8063 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8064 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8066 unsigned HOST_WIDE_INT innerprec
8067 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8068 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8069 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8070 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8072 /* We can get this structure field in a narrower type that fits it,
8073 but the resulting extension to its nominal type (a fullword type)
8074 must satisfy the same conditions as for other extensions.
8076 Do this only for fields that are aligned (not bit-fields),
8077 because when bit-field insns will be used there is no
8078 advantage in doing this. */
8080 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8081 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8082 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8083 && type != 0)
8085 if (first)
8086 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8087 win = fold_convert (type, op);
8091 *unsignedp_ptr = uns;
8092 return win;
8095 /* Return true if integer constant C has a value that is permissible
8096 for TYPE, an integral type. */
8098 bool
8099 int_fits_type_p (const_tree c, const_tree type)
8101 tree type_low_bound, type_high_bound;
8102 bool ok_for_low_bound, ok_for_high_bound;
8103 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8105 /* Non-standard boolean types can have arbitrary precision but various
8106 transformations assume that they can only take values 0 and +/-1. */
8107 if (TREE_CODE (type) == BOOLEAN_TYPE)
8108 return wi::fits_to_boolean_p (wi::to_wide (c), type);
8110 retry:
8111 type_low_bound = TYPE_MIN_VALUE (type);
8112 type_high_bound = TYPE_MAX_VALUE (type);
8114 /* If at least one bound of the type is a constant integer, we can check
8115 ourselves and maybe make a decision. If no such decision is possible, but
8116 this type is a subtype, try checking against that. Otherwise, use
8117 fits_to_tree_p, which checks against the precision.
8119 Compute the status for each possibly constant bound, and return if we see
8120 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8121 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8122 for "constant known to fit". */
8124 /* Check if c >= type_low_bound. */
8125 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8127 if (tree_int_cst_lt (c, type_low_bound))
8128 return false;
8129 ok_for_low_bound = true;
8131 else
8132 ok_for_low_bound = false;
8134 /* Check if c <= type_high_bound. */
8135 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8137 if (tree_int_cst_lt (type_high_bound, c))
8138 return false;
8139 ok_for_high_bound = true;
8141 else
8142 ok_for_high_bound = false;
8144 /* If the constant fits both bounds, the result is known. */
8145 if (ok_for_low_bound && ok_for_high_bound)
8146 return true;
8148 /* Perform some generic filtering which may allow making a decision
8149 even if the bounds are not constant. First, negative integers
8150 never fit in unsigned types, */
8151 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (wi::to_wide (c)))
8152 return false;
8154 /* Second, narrower types always fit in wider ones. */
8155 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8156 return true;
8158 /* Third, unsigned integers with top bit set never fit signed types. */
8159 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8161 int prec = GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c))) - 1;
8162 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8164 /* When a tree_cst is converted to a wide-int, the precision
8165 is taken from the type. However, if the precision of the
8166 mode underneath the type is smaller than that, it is
8167 possible that the value will not fit. The test below
8168 fails if any bit is set between the sign bit of the
8169 underlying mode and the top bit of the type. */
8170 if (wi::zext (wi::to_wide (c), prec - 1) != wi::to_wide (c))
8171 return false;
8173 else if (wi::neg_p (wi::to_wide (c)))
8174 return false;
8177 /* If we haven't been able to decide at this point, there nothing more we
8178 can check ourselves here. Look at the base type if we have one and it
8179 has the same precision. */
8180 if (TREE_CODE (type) == INTEGER_TYPE
8181 && TREE_TYPE (type) != 0
8182 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8184 type = TREE_TYPE (type);
8185 goto retry;
8188 /* Or to fits_to_tree_p, if nothing else. */
8189 return wi::fits_to_tree_p (wi::to_wide (c), type);
8192 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8193 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8194 represented (assuming two's-complement arithmetic) within the bit
8195 precision of the type are returned instead. */
8197 void
8198 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8200 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8201 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8202 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type)), min, TYPE_SIGN (type));
8203 else
8205 if (TYPE_UNSIGNED (type))
8206 mpz_set_ui (min, 0);
8207 else
8209 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8210 wi::to_mpz (mn, min, SIGNED);
8214 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8215 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8216 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type)), max, TYPE_SIGN (type));
8217 else
8219 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8220 wi::to_mpz (mn, max, TYPE_SIGN (type));
8224 /* Return true if VAR is an automatic variable. */
8226 bool
8227 auto_var_p (const_tree var)
8229 return ((((VAR_P (var) && ! DECL_EXTERNAL (var))
8230 || TREE_CODE (var) == PARM_DECL)
8231 && ! TREE_STATIC (var))
8232 || TREE_CODE (var) == RESULT_DECL);
8235 /* Return true if VAR is an automatic variable defined in function FN. */
8237 bool
8238 auto_var_in_fn_p (const_tree var, const_tree fn)
8240 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8241 && (auto_var_p (var)
8242 || TREE_CODE (var) == LABEL_DECL));
8245 /* Subprogram of following function. Called by walk_tree.
8247 Return *TP if it is an automatic variable or parameter of the
8248 function passed in as DATA. */
8250 static tree
8251 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8253 tree fn = (tree) data;
8255 if (TYPE_P (*tp))
8256 *walk_subtrees = 0;
8258 else if (DECL_P (*tp)
8259 && auto_var_in_fn_p (*tp, fn))
8260 return *tp;
8262 return NULL_TREE;
8265 /* Returns true if T is, contains, or refers to a type with variable
8266 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8267 arguments, but not the return type. If FN is nonzero, only return
8268 true if a modifier of the type or position of FN is a variable or
8269 parameter inside FN.
8271 This concept is more general than that of C99 'variably modified types':
8272 in C99, a struct type is never variably modified because a VLA may not
8273 appear as a structure member. However, in GNU C code like:
8275 struct S { int i[f()]; };
8277 is valid, and other languages may define similar constructs. */
8279 bool
8280 variably_modified_type_p (tree type, tree fn)
8282 tree t;
8284 /* Test if T is either variable (if FN is zero) or an expression containing
8285 a variable in FN. If TYPE isn't gimplified, return true also if
8286 gimplify_one_sizepos would gimplify the expression into a local
8287 variable. */
8288 #define RETURN_TRUE_IF_VAR(T) \
8289 do { tree _t = (T); \
8290 if (_t != NULL_TREE \
8291 && _t != error_mark_node \
8292 && !CONSTANT_CLASS_P (_t) \
8293 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8294 && (!fn \
8295 || (!TYPE_SIZES_GIMPLIFIED (type) \
8296 && (TREE_CODE (_t) != VAR_DECL \
8297 && !CONTAINS_PLACEHOLDER_P (_t))) \
8298 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8299 return true; } while (0)
8301 if (type == error_mark_node)
8302 return false;
8304 /* If TYPE itself has variable size, it is variably modified. */
8305 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8306 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8308 switch (TREE_CODE (type))
8310 case POINTER_TYPE:
8311 case REFERENCE_TYPE:
8312 case VECTOR_TYPE:
8313 /* Ada can have pointer types refering to themselves indirectly. */
8314 if (TREE_VISITED (type))
8315 return false;
8316 TREE_VISITED (type) = true;
8317 if (variably_modified_type_p (TREE_TYPE (type), fn))
8319 TREE_VISITED (type) = false;
8320 return true;
8322 TREE_VISITED (type) = false;
8323 break;
8325 case FUNCTION_TYPE:
8326 case METHOD_TYPE:
8327 /* If TYPE is a function type, it is variably modified if the
8328 return type is variably modified. */
8329 if (variably_modified_type_p (TREE_TYPE (type), fn))
8330 return true;
8331 break;
8333 case INTEGER_TYPE:
8334 case REAL_TYPE:
8335 case FIXED_POINT_TYPE:
8336 case ENUMERAL_TYPE:
8337 case BOOLEAN_TYPE:
8338 /* Scalar types are variably modified if their end points
8339 aren't constant. */
8340 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8341 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8342 break;
8344 case RECORD_TYPE:
8345 case UNION_TYPE:
8346 case QUAL_UNION_TYPE:
8347 /* We can't see if any of the fields are variably-modified by the
8348 definition we normally use, since that would produce infinite
8349 recursion via pointers. */
8350 /* This is variably modified if some field's type is. */
8351 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8352 if (TREE_CODE (t) == FIELD_DECL)
8354 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8355 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8356 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8358 /* If the type is a qualified union, then the DECL_QUALIFIER
8359 of fields can also be an expression containing a variable. */
8360 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8361 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8363 /* If the field is a qualified union, then it's only a container
8364 for what's inside so we look into it. That's necessary in LTO
8365 mode because the sizes of the field tested above have been set
8366 to PLACEHOLDER_EXPRs by free_lang_data. */
8367 if (TREE_CODE (TREE_TYPE (t)) == QUAL_UNION_TYPE
8368 && variably_modified_type_p (TREE_TYPE (t), fn))
8369 return true;
8371 break;
8373 case ARRAY_TYPE:
8374 /* Do not call ourselves to avoid infinite recursion. This is
8375 variably modified if the element type is. */
8376 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8377 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8378 break;
8380 default:
8381 break;
8384 /* The current language may have other cases to check, but in general,
8385 all other types are not variably modified. */
8386 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8388 #undef RETURN_TRUE_IF_VAR
8391 /* Given a DECL or TYPE, return the scope in which it was declared, or
8392 NULL_TREE if there is no containing scope. */
8394 tree
8395 get_containing_scope (const_tree t)
8397 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8400 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
8402 const_tree
8403 get_ultimate_context (const_tree decl)
8405 while (decl && TREE_CODE (decl) != TRANSLATION_UNIT_DECL)
8407 if (TREE_CODE (decl) == BLOCK)
8408 decl = BLOCK_SUPERCONTEXT (decl);
8409 else
8410 decl = get_containing_scope (decl);
8412 return decl;
8415 /* Return the innermost context enclosing DECL that is
8416 a FUNCTION_DECL, or zero if none. */
8418 tree
8419 decl_function_context (const_tree decl)
8421 tree context;
8423 if (TREE_CODE (decl) == ERROR_MARK)
8424 return 0;
8426 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8427 where we look up the function at runtime. Such functions always take
8428 a first argument of type 'pointer to real context'.
8430 C++ should really be fixed to use DECL_CONTEXT for the real context,
8431 and use something else for the "virtual context". */
8432 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VIRTUAL_P (decl))
8433 context
8434 = TYPE_MAIN_VARIANT
8435 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8436 else
8437 context = DECL_CONTEXT (decl);
8439 while (context && TREE_CODE (context) != FUNCTION_DECL)
8441 if (TREE_CODE (context) == BLOCK)
8442 context = BLOCK_SUPERCONTEXT (context);
8443 else
8444 context = get_containing_scope (context);
8447 return context;
8450 /* Return the innermost context enclosing DECL that is
8451 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8452 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8454 tree
8455 decl_type_context (const_tree decl)
8457 tree context = DECL_CONTEXT (decl);
8459 while (context)
8460 switch (TREE_CODE (context))
8462 case NAMESPACE_DECL:
8463 case TRANSLATION_UNIT_DECL:
8464 return NULL_TREE;
8466 case RECORD_TYPE:
8467 case UNION_TYPE:
8468 case QUAL_UNION_TYPE:
8469 return context;
8471 case TYPE_DECL:
8472 case FUNCTION_DECL:
8473 context = DECL_CONTEXT (context);
8474 break;
8476 case BLOCK:
8477 context = BLOCK_SUPERCONTEXT (context);
8478 break;
8480 default:
8481 gcc_unreachable ();
8484 return NULL_TREE;
8487 /* CALL is a CALL_EXPR. Return the declaration for the function
8488 called, or NULL_TREE if the called function cannot be
8489 determined. */
8491 tree
8492 get_callee_fndecl (const_tree call)
8494 tree addr;
8496 if (call == error_mark_node)
8497 return error_mark_node;
8499 /* It's invalid to call this function with anything but a
8500 CALL_EXPR. */
8501 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8503 /* The first operand to the CALL is the address of the function
8504 called. */
8505 addr = CALL_EXPR_FN (call);
8507 /* If there is no function, return early. */
8508 if (addr == NULL_TREE)
8509 return NULL_TREE;
8511 STRIP_NOPS (addr);
8513 /* If this is a readonly function pointer, extract its initial value. */
8514 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8515 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8516 && DECL_INITIAL (addr))
8517 addr = DECL_INITIAL (addr);
8519 /* If the address is just `&f' for some function `f', then we know
8520 that `f' is being called. */
8521 if (TREE_CODE (addr) == ADDR_EXPR
8522 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8523 return TREE_OPERAND (addr, 0);
8525 /* We couldn't figure out what was being called. */
8526 return NULL_TREE;
8529 /* Return true when STMTs arguments and return value match those of FNDECL,
8530 a decl of a builtin function. */
8532 static bool
8533 tree_builtin_call_types_compatible_p (const_tree call, tree fndecl)
8535 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN);
8537 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8538 if (tree decl = builtin_decl_explicit (DECL_FUNCTION_CODE (fndecl)))
8539 fndecl = decl;
8541 bool gimple_form = (cfun && (cfun->curr_properties & PROP_gimple)) != 0;
8542 if (gimple_form
8543 ? !useless_type_conversion_p (TREE_TYPE (call),
8544 TREE_TYPE (TREE_TYPE (fndecl)))
8545 : (TYPE_MAIN_VARIANT (TREE_TYPE (call))
8546 != TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (fndecl)))))
8547 return false;
8549 tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8550 unsigned nargs = call_expr_nargs (call);
8551 for (unsigned i = 0; i < nargs; ++i, targs = TREE_CHAIN (targs))
8553 /* Variadic args follow. */
8554 if (!targs)
8555 return true;
8556 tree arg = CALL_EXPR_ARG (call, i);
8557 tree type = TREE_VALUE (targs);
8558 if (gimple_form
8559 ? !useless_type_conversion_p (type, TREE_TYPE (arg))
8560 : TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (TREE_TYPE (arg)))
8562 /* For pointer arguments be more forgiving, e.g. due to
8563 FILE * vs. fileptr_type_node, or say char * vs. const char *
8564 differences etc. */
8565 if (!gimple_form
8566 && POINTER_TYPE_P (type)
8567 && POINTER_TYPE_P (TREE_TYPE (arg))
8568 && tree_nop_conversion_p (type, TREE_TYPE (arg)))
8569 continue;
8570 /* char/short integral arguments are promoted to int
8571 by several frontends if targetm.calls.promote_prototypes
8572 is true. Allow such promotion too. */
8573 if (INTEGRAL_TYPE_P (type)
8574 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
8575 && INTEGRAL_TYPE_P (TREE_TYPE (arg))
8576 && !TYPE_UNSIGNED (TREE_TYPE (arg))
8577 && targetm.calls.promote_prototypes (TREE_TYPE (fndecl))
8578 && (gimple_form
8579 ? useless_type_conversion_p (integer_type_node,
8580 TREE_TYPE (arg))
8581 : tree_nop_conversion_p (integer_type_node,
8582 TREE_TYPE (arg))))
8583 continue;
8584 return false;
8587 if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
8588 return false;
8589 return true;
8592 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
8593 return the associated function code, otherwise return CFN_LAST. */
8595 combined_fn
8596 get_call_combined_fn (const_tree call)
8598 /* It's invalid to call this function with anything but a CALL_EXPR. */
8599 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8601 if (!CALL_EXPR_FN (call))
8602 return as_combined_fn (CALL_EXPR_IFN (call));
8604 tree fndecl = get_callee_fndecl (call);
8605 if (fndecl
8606 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)
8607 && tree_builtin_call_types_compatible_p (call, fndecl))
8608 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
8610 return CFN_LAST;
8613 /* Comparator of indices based on tree_node_counts. */
8615 static int
8616 tree_nodes_cmp (const void *p1, const void *p2)
8618 const unsigned *n1 = (const unsigned *)p1;
8619 const unsigned *n2 = (const unsigned *)p2;
8621 return tree_node_counts[*n1] - tree_node_counts[*n2];
8624 /* Comparator of indices based on tree_code_counts. */
8626 static int
8627 tree_codes_cmp (const void *p1, const void *p2)
8629 const unsigned *n1 = (const unsigned *)p1;
8630 const unsigned *n2 = (const unsigned *)p2;
8632 return tree_code_counts[*n1] - tree_code_counts[*n2];
8635 #define TREE_MEM_USAGE_SPACES 40
8637 /* Print debugging information about tree nodes generated during the compile,
8638 and any language-specific information. */
8640 void
8641 dump_tree_statistics (void)
8643 if (GATHER_STATISTICS)
8645 uint64_t total_nodes, total_bytes;
8646 fprintf (stderr, "\nKind Nodes Bytes\n");
8647 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8648 total_nodes = total_bytes = 0;
8651 auto_vec<unsigned> indices (all_kinds);
8652 for (unsigned i = 0; i < all_kinds; i++)
8653 indices.quick_push (i);
8654 indices.qsort (tree_nodes_cmp);
8656 for (unsigned i = 0; i < (int) all_kinds; i++)
8658 unsigned j = indices[i];
8659 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n",
8660 tree_node_kind_names[j], SIZE_AMOUNT (tree_node_counts[j]),
8661 SIZE_AMOUNT (tree_node_sizes[j]));
8662 total_nodes += tree_node_counts[j];
8663 total_bytes += tree_node_sizes[j];
8665 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8666 fprintf (stderr, "%-20s %6" PRIu64 "%c %9" PRIu64 "%c\n", "Total",
8667 SIZE_AMOUNT (total_nodes), SIZE_AMOUNT (total_bytes));
8668 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8672 fprintf (stderr, "Code Nodes\n");
8673 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8675 auto_vec<unsigned> indices (MAX_TREE_CODES);
8676 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8677 indices.quick_push (i);
8678 indices.qsort (tree_codes_cmp);
8680 for (unsigned i = 0; i < MAX_TREE_CODES; i++)
8682 unsigned j = indices[i];
8683 fprintf (stderr, "%-32s %6" PRIu64 "%c\n",
8684 get_tree_code_name ((enum tree_code) j),
8685 SIZE_AMOUNT (tree_code_counts[j]));
8687 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
8688 fprintf (stderr, "\n");
8689 ssanames_print_statistics ();
8690 fprintf (stderr, "\n");
8691 phinodes_print_statistics ();
8692 fprintf (stderr, "\n");
8695 else
8696 fprintf (stderr, "(No per-node statistics)\n");
8698 print_type_hash_statistics ();
8699 print_debug_expr_statistics ();
8700 print_value_expr_statistics ();
8701 lang_hooks.print_statistics ();
8704 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8706 /* Generate a crc32 of the low BYTES bytes of VALUE. */
8708 unsigned
8709 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
8711 /* This relies on the raw feedback's top 4 bits being zero. */
8712 #define FEEDBACK(X) ((X) * 0x04c11db7)
8713 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
8714 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
8715 static const unsigned syndromes[16] =
8717 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
8718 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
8719 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
8720 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
8722 #undef FEEDBACK
8723 #undef SYNDROME
8725 value <<= (32 - bytes * 8);
8726 for (unsigned ix = bytes * 2; ix--; value <<= 4)
8728 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
8730 chksum = (chksum << 4) ^ feedback;
8733 return chksum;
8736 /* Generate a crc32 of a string. */
8738 unsigned
8739 crc32_string (unsigned chksum, const char *string)
8742 chksum = crc32_byte (chksum, *string);
8743 while (*string++);
8744 return chksum;
8747 /* P is a string that will be used in a symbol. Mask out any characters
8748 that are not valid in that context. */
8750 void
8751 clean_symbol_name (char *p)
8753 for (; *p; p++)
8754 if (! (ISALNUM (*p)
8755 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8756 || *p == '$'
8757 #endif
8758 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
8759 || *p == '.'
8760 #endif
8762 *p = '_';
8765 static GTY(()) unsigned anon_cnt = 0; /* Saved for PCH. */
8767 /* Create a unique anonymous identifier. The identifier is still a
8768 valid assembly label. */
8770 tree
8771 make_anon_name ()
8773 const char *fmt =
8774 #if !defined (NO_DOT_IN_LABEL)
8776 #elif !defined (NO_DOLLAR_IN_LABEL)
8778 #else
8780 #endif
8781 "_anon_%d";
8783 char buf[24];
8784 int len = snprintf (buf, sizeof (buf), fmt, anon_cnt++);
8785 gcc_checking_assert (len < int (sizeof (buf)));
8787 tree id = get_identifier_with_length (buf, len);
8788 IDENTIFIER_ANON_P (id) = true;
8790 return id;
8793 /* Generate a name for a special-purpose function.
8794 The generated name may need to be unique across the whole link.
8795 Changes to this function may also require corresponding changes to
8796 xstrdup_mask_random.
8797 TYPE is some string to identify the purpose of this function to the
8798 linker or collect2; it must start with an uppercase letter,
8799 one of:
8800 I - for constructors
8801 D - for destructors
8802 N - for C++ anonymous namespaces
8803 F - for DWARF unwind frame information. */
8805 tree
8806 get_file_function_name (const char *type)
8808 char *buf;
8809 const char *p;
8810 char *q;
8812 /* If we already have a name we know to be unique, just use that. */
8813 if (first_global_object_name)
8814 p = q = ASTRDUP (first_global_object_name);
8815 /* If the target is handling the constructors/destructors, they
8816 will be local to this file and the name is only necessary for
8817 debugging purposes.
8818 We also assign sub_I and sub_D sufixes to constructors called from
8819 the global static constructors. These are always local. */
8820 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
8821 || (startswith (type, "sub_")
8822 && (type[4] == 'I' || type[4] == 'D')))
8824 const char *file = main_input_filename;
8825 if (! file)
8826 file = LOCATION_FILE (input_location);
8827 /* Just use the file's basename, because the full pathname
8828 might be quite long. */
8829 p = q = ASTRDUP (lbasename (file));
8831 else
8833 /* Otherwise, the name must be unique across the entire link.
8834 We don't have anything that we know to be unique to this translation
8835 unit, so use what we do have and throw in some randomness. */
8836 unsigned len;
8837 const char *name = weak_global_object_name;
8838 const char *file = main_input_filename;
8840 if (! name)
8841 name = "";
8842 if (! file)
8843 file = LOCATION_FILE (input_location);
8845 len = strlen (file);
8846 q = (char *) alloca (9 + 19 + len + 1);
8847 memcpy (q, file, len + 1);
8849 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
8850 crc32_string (0, name), get_random_seed (false));
8852 p = q;
8855 clean_symbol_name (q);
8856 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
8857 + strlen (type));
8859 /* Set up the name of the file-level functions we may need.
8860 Use a global object (which is already required to be unique over
8861 the program) rather than the file name (which imposes extra
8862 constraints). */
8863 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
8865 return get_identifier (buf);
8868 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
8870 /* Complain that the tree code of NODE does not match the expected 0
8871 terminated list of trailing codes. The trailing code list can be
8872 empty, for a more vague error message. FILE, LINE, and FUNCTION
8873 are of the caller. */
8875 void
8876 tree_check_failed (const_tree node, const char *file,
8877 int line, const char *function, ...)
8879 va_list args;
8880 const char *buffer;
8881 unsigned length = 0;
8882 enum tree_code code;
8884 va_start (args, function);
8885 while ((code = (enum tree_code) va_arg (args, int)))
8886 length += 4 + strlen (get_tree_code_name (code));
8887 va_end (args);
8888 if (length)
8890 char *tmp;
8891 va_start (args, function);
8892 length += strlen ("expected ");
8893 buffer = tmp = (char *) alloca (length);
8894 length = 0;
8895 while ((code = (enum tree_code) va_arg (args, int)))
8897 const char *prefix = length ? " or " : "expected ";
8899 strcpy (tmp + length, prefix);
8900 length += strlen (prefix);
8901 strcpy (tmp + length, get_tree_code_name (code));
8902 length += strlen (get_tree_code_name (code));
8904 va_end (args);
8906 else
8907 buffer = "unexpected node";
8909 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8910 buffer, get_tree_code_name (TREE_CODE (node)),
8911 function, trim_filename (file), line);
8914 /* Complain that the tree code of NODE does match the expected 0
8915 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
8916 the caller. */
8918 void
8919 tree_not_check_failed (const_tree node, const char *file,
8920 int line, const char *function, ...)
8922 va_list args;
8923 char *buffer;
8924 unsigned length = 0;
8925 enum tree_code code;
8927 va_start (args, function);
8928 while ((code = (enum tree_code) va_arg (args, int)))
8929 length += 4 + strlen (get_tree_code_name (code));
8930 va_end (args);
8931 va_start (args, function);
8932 buffer = (char *) alloca (length);
8933 length = 0;
8934 while ((code = (enum tree_code) va_arg (args, int)))
8936 if (length)
8938 strcpy (buffer + length, " or ");
8939 length += 4;
8941 strcpy (buffer + length, get_tree_code_name (code));
8942 length += strlen (get_tree_code_name (code));
8944 va_end (args);
8946 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
8947 buffer, get_tree_code_name (TREE_CODE (node)),
8948 function, trim_filename (file), line);
8951 /* Similar to tree_check_failed, except that we check for a class of tree
8952 code, given in CL. */
8954 void
8955 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
8956 const char *file, int line, const char *function)
8958 internal_error
8959 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
8960 TREE_CODE_CLASS_STRING (cl),
8961 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
8962 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
8965 /* Similar to tree_check_failed, except that instead of specifying a
8966 dozen codes, use the knowledge that they're all sequential. */
8968 void
8969 tree_range_check_failed (const_tree node, const char *file, int line,
8970 const char *function, enum tree_code c1,
8971 enum tree_code c2)
8973 char *buffer;
8974 unsigned length = 0;
8975 unsigned int c;
8977 for (c = c1; c <= c2; ++c)
8978 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
8980 length += strlen ("expected ");
8981 buffer = (char *) alloca (length);
8982 length = 0;
8984 for (c = c1; c <= c2; ++c)
8986 const char *prefix = length ? " or " : "expected ";
8988 strcpy (buffer + length, prefix);
8989 length += strlen (prefix);
8990 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
8991 length += strlen (get_tree_code_name ((enum tree_code) c));
8994 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8995 buffer, get_tree_code_name (TREE_CODE (node)),
8996 function, trim_filename (file), line);
9000 /* Similar to tree_check_failed, except that we check that a tree does
9001 not have the specified code, given in CL. */
9003 void
9004 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9005 const char *file, int line, const char *function)
9007 internal_error
9008 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9009 TREE_CODE_CLASS_STRING (cl),
9010 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9011 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9015 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9017 void
9018 omp_clause_check_failed (const_tree node, const char *file, int line,
9019 const char *function, enum omp_clause_code code)
9021 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
9022 "in %s, at %s:%d",
9023 omp_clause_code_name[code],
9024 get_tree_code_name (TREE_CODE (node)),
9025 function, trim_filename (file), line);
9029 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9031 void
9032 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9033 const char *function, enum omp_clause_code c1,
9034 enum omp_clause_code c2)
9036 char *buffer;
9037 unsigned length = 0;
9038 unsigned int c;
9040 for (c = c1; c <= c2; ++c)
9041 length += 4 + strlen (omp_clause_code_name[c]);
9043 length += strlen ("expected ");
9044 buffer = (char *) alloca (length);
9045 length = 0;
9047 for (c = c1; c <= c2; ++c)
9049 const char *prefix = length ? " or " : "expected ";
9051 strcpy (buffer + length, prefix);
9052 length += strlen (prefix);
9053 strcpy (buffer + length, omp_clause_code_name[c]);
9054 length += strlen (omp_clause_code_name[c]);
9057 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9058 buffer, omp_clause_code_name[TREE_CODE (node)],
9059 function, trim_filename (file), line);
9063 #undef DEFTREESTRUCT
9064 #define DEFTREESTRUCT(VAL, NAME) NAME,
9066 static const char *ts_enum_names[] = {
9067 #include "treestruct.def"
9069 #undef DEFTREESTRUCT
9071 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9073 /* Similar to tree_class_check_failed, except that we check for
9074 whether CODE contains the tree structure identified by EN. */
9076 void
9077 tree_contains_struct_check_failed (const_tree node,
9078 const enum tree_node_structure_enum en,
9079 const char *file, int line,
9080 const char *function)
9082 internal_error
9083 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9084 TS_ENUM_NAME (en),
9085 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9089 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9090 (dynamically sized) vector. */
9092 void
9093 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9094 const char *function)
9096 internal_error
9097 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
9098 "at %s:%d",
9099 idx + 1, len, function, trim_filename (file), line);
9102 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9103 (dynamically sized) vector. */
9105 void
9106 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9107 const char *function)
9109 internal_error
9110 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
9111 idx + 1, len, function, trim_filename (file), line);
9114 /* Similar to above, except that the check is for the bounds of the operand
9115 vector of an expression node EXP. */
9117 void
9118 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9119 int line, const char *function)
9121 enum tree_code code = TREE_CODE (exp);
9122 internal_error
9123 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9124 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9125 function, trim_filename (file), line);
9128 /* Similar to above, except that the check is for the number of
9129 operands of an OMP_CLAUSE node. */
9131 void
9132 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9133 int line, const char *function)
9135 internal_error
9136 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
9137 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9138 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9139 trim_filename (file), line);
9141 #endif /* ENABLE_TREE_CHECKING */
9143 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
9144 and mapped to the machine mode MODE. Initialize its fields and build
9145 the information necessary for debugging output. */
9147 static tree
9148 make_vector_type (tree innertype, poly_int64 nunits, machine_mode mode)
9150 tree t;
9151 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9153 t = make_node (VECTOR_TYPE);
9154 TREE_TYPE (t) = mv_innertype;
9155 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9156 SET_TYPE_MODE (t, mode);
9158 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9159 SET_TYPE_STRUCTURAL_EQUALITY (t);
9160 else if ((TYPE_CANONICAL (mv_innertype) != innertype
9161 || mode != VOIDmode)
9162 && !VECTOR_BOOLEAN_TYPE_P (t))
9163 TYPE_CANONICAL (t)
9164 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
9166 layout_type (t);
9168 hashval_t hash = type_hash_canon_hash (t);
9169 t = type_hash_canon (hash, t);
9171 /* We have built a main variant, based on the main variant of the
9172 inner type. Use it to build the variant we return. */
9173 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9174 && TREE_TYPE (t) != innertype)
9175 return build_type_attribute_qual_variant (t,
9176 TYPE_ATTRIBUTES (innertype),
9177 TYPE_QUALS (innertype));
9179 return t;
9182 static tree
9183 make_or_reuse_type (unsigned size, int unsignedp)
9185 int i;
9187 if (size == INT_TYPE_SIZE)
9188 return unsignedp ? unsigned_type_node : integer_type_node;
9189 if (size == CHAR_TYPE_SIZE)
9190 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9191 if (size == SHORT_TYPE_SIZE)
9192 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9193 if (size == LONG_TYPE_SIZE)
9194 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9195 if (size == LONG_LONG_TYPE_SIZE)
9196 return (unsignedp ? long_long_unsigned_type_node
9197 : long_long_integer_type_node);
9199 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9200 if (size == int_n_data[i].bitsize
9201 && int_n_enabled_p[i])
9202 return (unsignedp ? int_n_trees[i].unsigned_type
9203 : int_n_trees[i].signed_type);
9205 if (unsignedp)
9206 return make_unsigned_type (size);
9207 else
9208 return make_signed_type (size);
9211 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9213 static tree
9214 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9216 if (satp)
9218 if (size == SHORT_FRACT_TYPE_SIZE)
9219 return unsignedp ? sat_unsigned_short_fract_type_node
9220 : sat_short_fract_type_node;
9221 if (size == FRACT_TYPE_SIZE)
9222 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9223 if (size == LONG_FRACT_TYPE_SIZE)
9224 return unsignedp ? sat_unsigned_long_fract_type_node
9225 : sat_long_fract_type_node;
9226 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9227 return unsignedp ? sat_unsigned_long_long_fract_type_node
9228 : sat_long_long_fract_type_node;
9230 else
9232 if (size == SHORT_FRACT_TYPE_SIZE)
9233 return unsignedp ? unsigned_short_fract_type_node
9234 : short_fract_type_node;
9235 if (size == FRACT_TYPE_SIZE)
9236 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9237 if (size == LONG_FRACT_TYPE_SIZE)
9238 return unsignedp ? unsigned_long_fract_type_node
9239 : long_fract_type_node;
9240 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9241 return unsignedp ? unsigned_long_long_fract_type_node
9242 : long_long_fract_type_node;
9245 return make_fract_type (size, unsignedp, satp);
9248 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9250 static tree
9251 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9253 if (satp)
9255 if (size == SHORT_ACCUM_TYPE_SIZE)
9256 return unsignedp ? sat_unsigned_short_accum_type_node
9257 : sat_short_accum_type_node;
9258 if (size == ACCUM_TYPE_SIZE)
9259 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9260 if (size == LONG_ACCUM_TYPE_SIZE)
9261 return unsignedp ? sat_unsigned_long_accum_type_node
9262 : sat_long_accum_type_node;
9263 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9264 return unsignedp ? sat_unsigned_long_long_accum_type_node
9265 : sat_long_long_accum_type_node;
9267 else
9269 if (size == SHORT_ACCUM_TYPE_SIZE)
9270 return unsignedp ? unsigned_short_accum_type_node
9271 : short_accum_type_node;
9272 if (size == ACCUM_TYPE_SIZE)
9273 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9274 if (size == LONG_ACCUM_TYPE_SIZE)
9275 return unsignedp ? unsigned_long_accum_type_node
9276 : long_accum_type_node;
9277 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9278 return unsignedp ? unsigned_long_long_accum_type_node
9279 : long_long_accum_type_node;
9282 return make_accum_type (size, unsignedp, satp);
9286 /* Create an atomic variant node for TYPE. This routine is called
9287 during initialization of data types to create the 5 basic atomic
9288 types. The generic build_variant_type function requires these to
9289 already be set up in order to function properly, so cannot be
9290 called from there. If ALIGN is non-zero, then ensure alignment is
9291 overridden to this value. */
9293 static tree
9294 build_atomic_base (tree type, unsigned int align)
9296 tree t;
9298 /* Make sure its not already registered. */
9299 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9300 return t;
9302 t = build_variant_type_copy (type);
9303 set_type_quals (t, TYPE_QUAL_ATOMIC);
9305 if (align)
9306 SET_TYPE_ALIGN (t, align);
9308 return t;
9311 /* Information about the _FloatN and _FloatNx types. This must be in
9312 the same order as the corresponding TI_* enum values. */
9313 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
9315 { 16, false },
9316 { 32, false },
9317 { 64, false },
9318 { 128, false },
9319 { 32, true },
9320 { 64, true },
9321 { 128, true },
9325 /* Create nodes for all integer types (and error_mark_node) using the sizes
9326 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9328 void
9329 build_common_tree_nodes (bool signed_char)
9331 int i;
9333 error_mark_node = make_node (ERROR_MARK);
9334 TREE_TYPE (error_mark_node) = error_mark_node;
9336 initialize_sizetypes ();
9338 /* Define both `signed char' and `unsigned char'. */
9339 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9340 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9341 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9342 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9344 /* Define `char', which is like either `signed char' or `unsigned char'
9345 but not the same as either. */
9346 char_type_node
9347 = (signed_char
9348 ? make_signed_type (CHAR_TYPE_SIZE)
9349 : make_unsigned_type (CHAR_TYPE_SIZE));
9350 TYPE_STRING_FLAG (char_type_node) = 1;
9352 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9353 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9354 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9355 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9356 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9357 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9358 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9359 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9361 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9363 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9364 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9366 if (int_n_enabled_p[i])
9368 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9369 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9373 /* Define a boolean type. This type only represents boolean values but
9374 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9375 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9376 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9377 TYPE_PRECISION (boolean_type_node) = 1;
9378 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9380 /* Define what type to use for size_t. */
9381 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9382 size_type_node = unsigned_type_node;
9383 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9384 size_type_node = long_unsigned_type_node;
9385 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9386 size_type_node = long_long_unsigned_type_node;
9387 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9388 size_type_node = short_unsigned_type_node;
9389 else
9391 int i;
9393 size_type_node = NULL_TREE;
9394 for (i = 0; i < NUM_INT_N_ENTS; i++)
9395 if (int_n_enabled_p[i])
9397 char name[50], altname[50];
9398 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9399 sprintf (altname, "__int%d__ unsigned", int_n_data[i].bitsize);
9401 if (strcmp (name, SIZE_TYPE) == 0
9402 || strcmp (altname, SIZE_TYPE) == 0)
9404 size_type_node = int_n_trees[i].unsigned_type;
9407 if (size_type_node == NULL_TREE)
9408 gcc_unreachable ();
9411 /* Define what type to use for ptrdiff_t. */
9412 if (strcmp (PTRDIFF_TYPE, "int") == 0)
9413 ptrdiff_type_node = integer_type_node;
9414 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
9415 ptrdiff_type_node = long_integer_type_node;
9416 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
9417 ptrdiff_type_node = long_long_integer_type_node;
9418 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
9419 ptrdiff_type_node = short_integer_type_node;
9420 else
9422 ptrdiff_type_node = NULL_TREE;
9423 for (int i = 0; i < NUM_INT_N_ENTS; i++)
9424 if (int_n_enabled_p[i])
9426 char name[50], altname[50];
9427 sprintf (name, "__int%d", int_n_data[i].bitsize);
9428 sprintf (altname, "__int%d__", int_n_data[i].bitsize);
9430 if (strcmp (name, PTRDIFF_TYPE) == 0
9431 || strcmp (altname, PTRDIFF_TYPE) == 0)
9432 ptrdiff_type_node = int_n_trees[i].signed_type;
9434 if (ptrdiff_type_node == NULL_TREE)
9435 gcc_unreachable ();
9438 /* Fill in the rest of the sized types. Reuse existing type nodes
9439 when possible. */
9440 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9441 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9442 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9443 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9444 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9446 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9447 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9448 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9449 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9450 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9452 /* Don't call build_qualified type for atomics. That routine does
9453 special processing for atomics, and until they are initialized
9454 it's better not to make that call.
9456 Check to see if there is a target override for atomic types. */
9458 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9459 targetm.atomic_align_for_mode (QImode));
9460 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9461 targetm.atomic_align_for_mode (HImode));
9462 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9463 targetm.atomic_align_for_mode (SImode));
9464 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9465 targetm.atomic_align_for_mode (DImode));
9466 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9467 targetm.atomic_align_for_mode (TImode));
9469 access_public_node = get_identifier ("public");
9470 access_protected_node = get_identifier ("protected");
9471 access_private_node = get_identifier ("private");
9473 /* Define these next since types below may used them. */
9474 integer_zero_node = build_int_cst (integer_type_node, 0);
9475 integer_one_node = build_int_cst (integer_type_node, 1);
9476 integer_three_node = build_int_cst (integer_type_node, 3);
9477 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9479 size_zero_node = size_int (0);
9480 size_one_node = size_int (1);
9481 bitsize_zero_node = bitsize_int (0);
9482 bitsize_one_node = bitsize_int (1);
9483 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9485 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9486 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9488 void_type_node = make_node (VOID_TYPE);
9489 layout_type (void_type_node);
9491 /* We are not going to have real types in C with less than byte alignment,
9492 so we might as well not have any types that claim to have it. */
9493 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
9494 TYPE_USER_ALIGN (void_type_node) = 0;
9496 void_node = make_node (VOID_CST);
9497 TREE_TYPE (void_node) = void_type_node;
9499 void_list_node = build_tree_list (NULL_TREE, void_type_node);
9501 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9502 layout_type (TREE_TYPE (null_pointer_node));
9504 ptr_type_node = build_pointer_type (void_type_node);
9505 const_ptr_type_node
9506 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9507 for (unsigned i = 0; i < ARRAY_SIZE (builtin_structptr_types); ++i)
9508 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
9510 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9512 float_type_node = make_node (REAL_TYPE);
9513 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9514 layout_type (float_type_node);
9516 double_type_node = make_node (REAL_TYPE);
9517 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9518 layout_type (double_type_node);
9520 long_double_type_node = make_node (REAL_TYPE);
9521 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9522 layout_type (long_double_type_node);
9524 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9526 int n = floatn_nx_types[i].n;
9527 bool extended = floatn_nx_types[i].extended;
9528 scalar_float_mode mode;
9529 if (!targetm.floatn_mode (n, extended).exists (&mode))
9530 continue;
9531 int precision = GET_MODE_PRECISION (mode);
9532 /* Work around the rs6000 KFmode having precision 113 not
9533 128. */
9534 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
9535 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
9536 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
9537 if (!extended)
9538 gcc_assert (min_precision == n);
9539 if (precision < min_precision)
9540 precision = min_precision;
9541 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
9542 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
9543 layout_type (FLOATN_NX_TYPE_NODE (i));
9544 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
9546 float128t_type_node = float128_type_node;
9547 #ifdef HAVE_BFmode
9548 if (REAL_MODE_FORMAT (BFmode) == &arm_bfloat_half_format
9549 && targetm.scalar_mode_supported_p (BFmode)
9550 && targetm.libgcc_floating_mode_supported_p (BFmode))
9552 bfloat16_type_node = make_node (REAL_TYPE);
9553 TYPE_PRECISION (bfloat16_type_node) = GET_MODE_PRECISION (BFmode);
9554 layout_type (bfloat16_type_node);
9555 SET_TYPE_MODE (bfloat16_type_node, BFmode);
9557 #endif
9559 float_ptr_type_node = build_pointer_type (float_type_node);
9560 double_ptr_type_node = build_pointer_type (double_type_node);
9561 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9562 integer_ptr_type_node = build_pointer_type (integer_type_node);
9564 /* Fixed size integer types. */
9565 uint16_type_node = make_or_reuse_type (16, 1);
9566 uint32_type_node = make_or_reuse_type (32, 1);
9567 uint64_type_node = make_or_reuse_type (64, 1);
9568 if (targetm.scalar_mode_supported_p (TImode))
9569 uint128_type_node = make_or_reuse_type (128, 1);
9571 /* Decimal float types. */
9572 if (targetm.decimal_float_supported_p ())
9574 dfloat32_type_node = make_node (REAL_TYPE);
9575 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9576 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9577 layout_type (dfloat32_type_node);
9579 dfloat64_type_node = make_node (REAL_TYPE);
9580 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9581 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9582 layout_type (dfloat64_type_node);
9584 dfloat128_type_node = make_node (REAL_TYPE);
9585 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9586 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9587 layout_type (dfloat128_type_node);
9590 complex_integer_type_node = build_complex_type (integer_type_node, true);
9591 complex_float_type_node = build_complex_type (float_type_node, true);
9592 complex_double_type_node = build_complex_type (double_type_node, true);
9593 complex_long_double_type_node = build_complex_type (long_double_type_node,
9594 true);
9596 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
9598 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
9599 COMPLEX_FLOATN_NX_TYPE_NODE (i)
9600 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
9603 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9604 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9605 sat_ ## KIND ## _type_node = \
9606 make_sat_signed_ ## KIND ## _type (SIZE); \
9607 sat_unsigned_ ## KIND ## _type_node = \
9608 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9609 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9610 unsigned_ ## KIND ## _type_node = \
9611 make_unsigned_ ## KIND ## _type (SIZE);
9613 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9614 sat_ ## WIDTH ## KIND ## _type_node = \
9615 make_sat_signed_ ## KIND ## _type (SIZE); \
9616 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9617 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9618 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9619 unsigned_ ## WIDTH ## KIND ## _type_node = \
9620 make_unsigned_ ## KIND ## _type (SIZE);
9622 /* Make fixed-point type nodes based on four different widths. */
9623 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9624 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9625 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9626 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9627 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9629 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9630 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9631 NAME ## _type_node = \
9632 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9633 u ## NAME ## _type_node = \
9634 make_or_reuse_unsigned_ ## KIND ## _type \
9635 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9636 sat_ ## NAME ## _type_node = \
9637 make_or_reuse_sat_signed_ ## KIND ## _type \
9638 (GET_MODE_BITSIZE (MODE ## mode)); \
9639 sat_u ## NAME ## _type_node = \
9640 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9641 (GET_MODE_BITSIZE (U ## MODE ## mode));
9643 /* Fixed-point type and mode nodes. */
9644 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9645 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9646 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9647 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9648 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9649 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9650 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9651 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9652 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9653 MAKE_FIXED_MODE_NODE (accum, da, DA)
9654 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9657 tree t = targetm.build_builtin_va_list ();
9659 /* Many back-ends define record types without setting TYPE_NAME.
9660 If we copied the record type here, we'd keep the original
9661 record type without a name. This breaks name mangling. So,
9662 don't copy record types and let c_common_nodes_and_builtins()
9663 declare the type to be __builtin_va_list. */
9664 if (TREE_CODE (t) != RECORD_TYPE)
9665 t = build_variant_type_copy (t);
9667 va_list_type_node = t;
9670 /* SCEV analyzer global shared trees. */
9671 chrec_dont_know = make_node (SCEV_NOT_KNOWN);
9672 TREE_TYPE (chrec_dont_know) = void_type_node;
9673 chrec_known = make_node (SCEV_KNOWN);
9674 TREE_TYPE (chrec_known) = void_type_node;
9677 /* Modify DECL for given flags.
9678 TM_PURE attribute is set only on types, so the function will modify
9679 DECL's type when ECF_TM_PURE is used. */
9681 void
9682 set_call_expr_flags (tree decl, int flags)
9684 if (flags & ECF_NOTHROW)
9685 TREE_NOTHROW (decl) = 1;
9686 if (flags & ECF_CONST)
9687 TREE_READONLY (decl) = 1;
9688 if (flags & ECF_PURE)
9689 DECL_PURE_P (decl) = 1;
9690 if (flags & ECF_LOOPING_CONST_OR_PURE)
9691 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9692 if (flags & ECF_NOVOPS)
9693 DECL_IS_NOVOPS (decl) = 1;
9694 if (flags & ECF_NORETURN)
9695 TREE_THIS_VOLATILE (decl) = 1;
9696 if (flags & ECF_MALLOC)
9697 DECL_IS_MALLOC (decl) = 1;
9698 if (flags & ECF_RETURNS_TWICE)
9699 DECL_IS_RETURNS_TWICE (decl) = 1;
9700 if (flags & ECF_LEAF)
9701 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9702 NULL, DECL_ATTRIBUTES (decl));
9703 if (flags & ECF_COLD)
9704 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
9705 NULL, DECL_ATTRIBUTES (decl));
9706 if (flags & ECF_RET1)
9707 DECL_ATTRIBUTES (decl)
9708 = tree_cons (get_identifier ("fn spec"),
9709 build_tree_list (NULL_TREE, build_string (2, "1 ")),
9710 DECL_ATTRIBUTES (decl));
9711 if ((flags & ECF_TM_PURE) && flag_tm)
9712 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9713 /* Looping const or pure is implied by noreturn.
9714 There is currently no way to declare looping const or looping pure alone. */
9715 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9716 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9720 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9722 static void
9723 local_define_builtin (const char *name, tree type, enum built_in_function code,
9724 const char *library_name, int ecf_flags)
9726 tree decl;
9728 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9729 library_name, NULL_TREE);
9730 set_call_expr_flags (decl, ecf_flags);
9732 set_builtin_decl (code, decl, true);
9735 /* Call this function after instantiating all builtins that the language
9736 front end cares about. This will build the rest of the builtins
9737 and internal functions that are relied upon by the tree optimizers and
9738 the middle-end. */
9740 void
9741 build_common_builtin_nodes (void)
9743 tree tmp, ftype;
9744 int ecf_flags;
9746 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_PADDING))
9748 ftype = build_function_type_list (void_type_node,
9749 ptr_type_node,
9750 ptr_type_node,
9751 integer_type_node,
9752 NULL_TREE);
9753 local_define_builtin ("__builtin_clear_padding", ftype,
9754 BUILT_IN_CLEAR_PADDING,
9755 "__builtin_clear_padding",
9756 ECF_LEAF | ECF_NOTHROW);
9759 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
9760 || !builtin_decl_explicit_p (BUILT_IN_TRAP)
9761 || !builtin_decl_explicit_p (BUILT_IN_UNREACHABLE_TRAP)
9762 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
9764 ftype = build_function_type (void_type_node, void_list_node);
9765 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9766 local_define_builtin ("__builtin_unreachable", ftype,
9767 BUILT_IN_UNREACHABLE,
9768 "__builtin_unreachable",
9769 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9770 | ECF_CONST | ECF_COLD);
9771 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE_TRAP))
9772 local_define_builtin ("__builtin_unreachable trap", ftype,
9773 BUILT_IN_UNREACHABLE_TRAP,
9774 "__builtin_unreachable trap",
9775 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9776 | ECF_CONST | ECF_COLD);
9777 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
9778 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
9779 "abort",
9780 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
9781 if (!builtin_decl_explicit_p (BUILT_IN_TRAP))
9782 local_define_builtin ("__builtin_trap", ftype, BUILT_IN_TRAP,
9783 "__builtin_trap",
9784 ECF_NORETURN | ECF_NOTHROW | ECF_LEAF | ECF_COLD);
9787 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9788 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9790 ftype = build_function_type_list (ptr_type_node,
9791 ptr_type_node, const_ptr_type_node,
9792 size_type_node, NULL_TREE);
9794 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9795 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9796 "memcpy", ECF_NOTHROW | ECF_LEAF);
9797 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9798 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9799 "memmove", ECF_NOTHROW | ECF_LEAF);
9802 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9804 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9805 const_ptr_type_node, size_type_node,
9806 NULL_TREE);
9807 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9808 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9811 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9813 ftype = build_function_type_list (ptr_type_node,
9814 ptr_type_node, integer_type_node,
9815 size_type_node, NULL_TREE);
9816 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9817 "memset", ECF_NOTHROW | ECF_LEAF);
9820 /* If we're checking the stack, `alloca' can throw. */
9821 const int alloca_flags
9822 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
9824 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9826 ftype = build_function_type_list (ptr_type_node,
9827 size_type_node, NULL_TREE);
9828 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9829 "alloca", alloca_flags);
9832 ftype = build_function_type_list (ptr_type_node, size_type_node,
9833 size_type_node, NULL_TREE);
9834 local_define_builtin ("__builtin_alloca_with_align", ftype,
9835 BUILT_IN_ALLOCA_WITH_ALIGN,
9836 "__builtin_alloca_with_align",
9837 alloca_flags);
9839 ftype = build_function_type_list (ptr_type_node, size_type_node,
9840 size_type_node, size_type_node, NULL_TREE);
9841 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype,
9842 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX,
9843 "__builtin_alloca_with_align_and_max",
9844 alloca_flags);
9846 ftype = build_function_type_list (void_type_node,
9847 ptr_type_node, ptr_type_node,
9848 ptr_type_node, NULL_TREE);
9849 local_define_builtin ("__builtin_init_trampoline", ftype,
9850 BUILT_IN_INIT_TRAMPOLINE,
9851 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9852 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9853 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9854 "__builtin_init_heap_trampoline",
9855 ECF_NOTHROW | ECF_LEAF);
9856 local_define_builtin ("__builtin_init_descriptor", ftype,
9857 BUILT_IN_INIT_DESCRIPTOR,
9858 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
9860 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9861 local_define_builtin ("__builtin_adjust_trampoline", ftype,
9862 BUILT_IN_ADJUST_TRAMPOLINE,
9863 "__builtin_adjust_trampoline",
9864 ECF_CONST | ECF_NOTHROW);
9865 local_define_builtin ("__builtin_adjust_descriptor", ftype,
9866 BUILT_IN_ADJUST_DESCRIPTOR,
9867 "__builtin_adjust_descriptor",
9868 ECF_CONST | ECF_NOTHROW);
9870 ftype = build_function_type_list (void_type_node,
9871 ptr_type_node, ptr_type_node, NULL_TREE);
9872 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_CACHE))
9873 local_define_builtin ("__builtin___clear_cache", ftype,
9874 BUILT_IN_CLEAR_CACHE,
9875 "__clear_cache",
9876 ECF_NOTHROW);
9878 local_define_builtin ("__builtin_nonlocal_goto", ftype,
9879 BUILT_IN_NONLOCAL_GOTO,
9880 "__builtin_nonlocal_goto",
9881 ECF_NORETURN | ECF_NOTHROW);
9883 ftype = build_function_type_list (void_type_node,
9884 ptr_type_node, ptr_type_node, NULL_TREE);
9885 local_define_builtin ("__builtin_setjmp_setup", ftype,
9886 BUILT_IN_SETJMP_SETUP,
9887 "__builtin_setjmp_setup", ECF_NOTHROW);
9889 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9890 local_define_builtin ("__builtin_setjmp_receiver", ftype,
9891 BUILT_IN_SETJMP_RECEIVER,
9892 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
9894 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
9895 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
9896 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
9898 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9899 local_define_builtin ("__builtin_stack_restore", ftype,
9900 BUILT_IN_STACK_RESTORE,
9901 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
9903 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9904 const_ptr_type_node, size_type_node,
9905 NULL_TREE);
9906 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
9907 "__builtin_memcmp_eq",
9908 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9910 local_define_builtin ("__builtin_strncmp_eq", ftype, BUILT_IN_STRNCMP_EQ,
9911 "__builtin_strncmp_eq",
9912 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9914 local_define_builtin ("__builtin_strcmp_eq", ftype, BUILT_IN_STRCMP_EQ,
9915 "__builtin_strcmp_eq",
9916 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9918 /* If there's a possibility that we might use the ARM EABI, build the
9919 alternate __cxa_end_cleanup node used to resume from C++. */
9920 if (targetm.arm_eabi_unwinder)
9922 ftype = build_function_type_list (void_type_node, NULL_TREE);
9923 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
9924 BUILT_IN_CXA_END_CLEANUP,
9925 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
9928 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9929 local_define_builtin ("__builtin_unwind_resume", ftype,
9930 BUILT_IN_UNWIND_RESUME,
9931 ((targetm_common.except_unwind_info (&global_options)
9932 == UI_SJLJ)
9933 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
9934 ECF_NORETURN);
9936 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
9938 ftype = build_function_type_list (ptr_type_node, integer_type_node,
9939 NULL_TREE);
9940 local_define_builtin ("__builtin_return_address", ftype,
9941 BUILT_IN_RETURN_ADDRESS,
9942 "__builtin_return_address",
9943 ECF_NOTHROW);
9946 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
9947 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9949 ftype = build_function_type_list (void_type_node, ptr_type_node,
9950 ptr_type_node, NULL_TREE);
9951 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
9952 local_define_builtin ("__cyg_profile_func_enter", ftype,
9953 BUILT_IN_PROFILE_FUNC_ENTER,
9954 "__cyg_profile_func_enter", 0);
9955 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9956 local_define_builtin ("__cyg_profile_func_exit", ftype,
9957 BUILT_IN_PROFILE_FUNC_EXIT,
9958 "__cyg_profile_func_exit", 0);
9961 /* The exception object and filter values from the runtime. The argument
9962 must be zero before exception lowering, i.e. from the front end. After
9963 exception lowering, it will be the region number for the exception
9964 landing pad. These functions are PURE instead of CONST to prevent
9965 them from being hoisted past the exception edge that will initialize
9966 its value in the landing pad. */
9967 ftype = build_function_type_list (ptr_type_node,
9968 integer_type_node, NULL_TREE);
9969 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
9970 /* Only use TM_PURE if we have TM language support. */
9971 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
9972 ecf_flags |= ECF_TM_PURE;
9973 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
9974 "__builtin_eh_pointer", ecf_flags);
9976 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
9977 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
9978 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
9979 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9981 ftype = build_function_type_list (void_type_node,
9982 integer_type_node, integer_type_node,
9983 NULL_TREE);
9984 local_define_builtin ("__builtin_eh_copy_values", ftype,
9985 BUILT_IN_EH_COPY_VALUES,
9986 "__builtin_eh_copy_values", ECF_NOTHROW);
9988 /* Complex multiplication and division. These are handled as builtins
9989 rather than optabs because emit_library_call_value doesn't support
9990 complex. Further, we can do slightly better with folding these
9991 beasties if the real and complex parts of the arguments are separate. */
9993 int mode;
9995 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
9997 char mode_name_buf[4], *q;
9998 const char *p;
9999 enum built_in_function mcode, dcode;
10000 tree type, inner_type;
10001 const char *prefix = "__";
10003 if (targetm.libfunc_gnu_prefix)
10004 prefix = "__gnu_";
10006 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10007 if (type == NULL)
10008 continue;
10009 inner_type = TREE_TYPE (type);
10011 ftype = build_function_type_list (type, inner_type, inner_type,
10012 inner_type, inner_type, NULL_TREE);
10014 mcode = ((enum built_in_function)
10015 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10016 dcode = ((enum built_in_function)
10017 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10019 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10020 *q = TOLOWER (*p);
10021 *q = '\0';
10023 /* For -ftrapping-math these should throw from a former
10024 -fnon-call-exception stmt. */
10025 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10026 NULL);
10027 local_define_builtin (built_in_names[mcode], ftype, mcode,
10028 built_in_names[mcode],
10029 ECF_CONST | ECF_LEAF);
10031 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10032 NULL);
10033 local_define_builtin (built_in_names[dcode], ftype, dcode,
10034 built_in_names[dcode],
10035 ECF_CONST | ECF_LEAF);
10039 init_internal_fns ();
10042 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10043 better way.
10045 If we requested a pointer to a vector, build up the pointers that
10046 we stripped off while looking for the inner type. Similarly for
10047 return values from functions.
10049 The argument TYPE is the top of the chain, and BOTTOM is the
10050 new type which we will point to. */
10052 tree
10053 reconstruct_complex_type (tree type, tree bottom)
10055 tree inner, outer;
10057 if (TREE_CODE (type) == POINTER_TYPE)
10059 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10060 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10061 TYPE_REF_CAN_ALIAS_ALL (type));
10063 else if (TREE_CODE (type) == REFERENCE_TYPE)
10065 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10066 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10067 TYPE_REF_CAN_ALIAS_ALL (type));
10069 else if (TREE_CODE (type) == ARRAY_TYPE)
10071 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10072 outer = build_array_type (inner, TYPE_DOMAIN (type));
10074 else if (TREE_CODE (type) == FUNCTION_TYPE)
10076 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10077 outer = build_function_type (inner, TYPE_ARG_TYPES (type),
10078 TYPE_NO_NAMED_ARGS_STDARG_P (type));
10080 else if (TREE_CODE (type) == METHOD_TYPE)
10082 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10083 /* The build_method_type_directly() routine prepends 'this' to argument list,
10084 so we must compensate by getting rid of it. */
10085 outer
10086 = build_method_type_directly
10087 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10088 inner,
10089 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10091 else if (TREE_CODE (type) == OFFSET_TYPE)
10093 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10094 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10096 else
10097 return bottom;
10099 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10100 TYPE_QUALS (type));
10103 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10104 the inner type. */
10105 tree
10106 build_vector_type_for_mode (tree innertype, machine_mode mode)
10108 poly_int64 nunits;
10109 unsigned int bitsize;
10111 switch (GET_MODE_CLASS (mode))
10113 case MODE_VECTOR_BOOL:
10114 case MODE_VECTOR_INT:
10115 case MODE_VECTOR_FLOAT:
10116 case MODE_VECTOR_FRACT:
10117 case MODE_VECTOR_UFRACT:
10118 case MODE_VECTOR_ACCUM:
10119 case MODE_VECTOR_UACCUM:
10120 nunits = GET_MODE_NUNITS (mode);
10121 break;
10123 case MODE_INT:
10124 /* Check that there are no leftover bits. */
10125 bitsize = GET_MODE_BITSIZE (as_a <scalar_int_mode> (mode));
10126 gcc_assert (bitsize % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10127 nunits = bitsize / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10128 break;
10130 default:
10131 gcc_unreachable ();
10134 return make_vector_type (innertype, nunits, mode);
10137 /* Similarly, but takes the inner type and number of units, which must be
10138 a power of two. */
10140 tree
10141 build_vector_type (tree innertype, poly_int64 nunits)
10143 return make_vector_type (innertype, nunits, VOIDmode);
10146 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE. */
10148 tree
10149 build_truth_vector_type_for_mode (poly_uint64 nunits, machine_mode mask_mode)
10151 gcc_assert (mask_mode != BLKmode);
10153 unsigned HOST_WIDE_INT esize;
10154 if (VECTOR_MODE_P (mask_mode))
10156 poly_uint64 vsize = GET_MODE_BITSIZE (mask_mode);
10157 esize = vector_element_size (vsize, nunits);
10159 else
10160 esize = 1;
10162 tree bool_type = build_nonstandard_boolean_type (esize);
10164 return make_vector_type (bool_type, nunits, mask_mode);
10167 /* Build a vector type that holds one boolean result for each element of
10168 vector type VECTYPE. The public interface for this operation is
10169 truth_type_for. */
10171 static tree
10172 build_truth_vector_type_for (tree vectype)
10174 machine_mode vector_mode = TYPE_MODE (vectype);
10175 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
10177 machine_mode mask_mode;
10178 if (VECTOR_MODE_P (vector_mode)
10179 && targetm.vectorize.get_mask_mode (vector_mode).exists (&mask_mode))
10180 return build_truth_vector_type_for_mode (nunits, mask_mode);
10182 poly_uint64 vsize = tree_to_poly_uint64 (TYPE_SIZE (vectype));
10183 unsigned HOST_WIDE_INT esize = vector_element_size (vsize, nunits);
10184 tree bool_type = build_nonstandard_boolean_type (esize);
10186 return make_vector_type (bool_type, nunits, VOIDmode);
10189 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
10190 set. */
10192 tree
10193 build_opaque_vector_type (tree innertype, poly_int64 nunits)
10195 tree t = make_vector_type (innertype, nunits, VOIDmode);
10196 tree cand;
10197 /* We always build the non-opaque variant before the opaque one,
10198 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10199 cand = TYPE_NEXT_VARIANT (t);
10200 if (cand
10201 && TYPE_VECTOR_OPAQUE (cand)
10202 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10203 return cand;
10204 /* Othewise build a variant type and make sure to queue it after
10205 the non-opaque type. */
10206 cand = build_distinct_type_copy (t);
10207 TYPE_VECTOR_OPAQUE (cand) = true;
10208 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10209 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10210 TYPE_NEXT_VARIANT (t) = cand;
10211 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10212 return cand;
10215 /* Return the value of element I of VECTOR_CST T as a wide_int. */
10217 static poly_wide_int
10218 vector_cst_int_elt (const_tree t, unsigned int i)
10220 /* First handle elements that are directly encoded. */
10221 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10222 if (i < encoded_nelts)
10223 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, i));
10225 /* Identify the pattern that contains element I and work out the index of
10226 the last encoded element for that pattern. */
10227 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10228 unsigned int pattern = i % npatterns;
10229 unsigned int count = i / npatterns;
10230 unsigned int final_i = encoded_nelts - npatterns + pattern;
10232 /* If there are no steps, the final encoded value is the right one. */
10233 if (!VECTOR_CST_STEPPED_P (t))
10234 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t, final_i));
10236 /* Otherwise work out the value from the last two encoded elements. */
10237 tree v1 = VECTOR_CST_ENCODED_ELT (t, final_i - npatterns);
10238 tree v2 = VECTOR_CST_ENCODED_ELT (t, final_i);
10239 poly_wide_int diff = wi::to_poly_wide (v2) - wi::to_poly_wide (v1);
10240 return wi::to_poly_wide (v2) + (count - 2) * diff;
10243 /* Return the value of element I of VECTOR_CST T. */
10245 tree
10246 vector_cst_elt (const_tree t, unsigned int i)
10248 /* First handle elements that are directly encoded. */
10249 unsigned int encoded_nelts = vector_cst_encoded_nelts (t);
10250 if (i < encoded_nelts)
10251 return VECTOR_CST_ENCODED_ELT (t, i);
10253 /* If there are no steps, the final encoded value is the right one. */
10254 if (!VECTOR_CST_STEPPED_P (t))
10256 /* Identify the pattern that contains element I and work out the index of
10257 the last encoded element for that pattern. */
10258 unsigned int npatterns = VECTOR_CST_NPATTERNS (t);
10259 unsigned int pattern = i % npatterns;
10260 unsigned int final_i = encoded_nelts - npatterns + pattern;
10261 return VECTOR_CST_ENCODED_ELT (t, final_i);
10264 /* Otherwise work out the value from the last two encoded elements. */
10265 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t)),
10266 vector_cst_int_elt (t, i));
10269 /* Given an initializer INIT, return TRUE if INIT is zero or some
10270 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
10271 null, set *NONZERO if and only if INIT is known not to be all
10272 zeros. The combination of return value of false and *NONZERO
10273 false implies that INIT may but need not be all zeros. Other
10274 combinations indicate definitive answers. */
10276 bool
10277 initializer_zerop (const_tree init, bool *nonzero /* = NULL */)
10279 bool dummy;
10280 if (!nonzero)
10281 nonzero = &dummy;
10283 /* Conservatively clear NONZERO and set it only if INIT is definitely
10284 not all zero. */
10285 *nonzero = false;
10287 STRIP_NOPS (init);
10289 unsigned HOST_WIDE_INT off = 0;
10291 switch (TREE_CODE (init))
10293 case INTEGER_CST:
10294 if (integer_zerop (init))
10295 return true;
10297 *nonzero = true;
10298 return false;
10300 case REAL_CST:
10301 /* ??? Note that this is not correct for C4X float formats. There,
10302 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10303 negative exponent. */
10304 if (real_zerop (init)
10305 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)))
10306 return true;
10308 *nonzero = true;
10309 return false;
10311 case FIXED_CST:
10312 if (fixed_zerop (init))
10313 return true;
10315 *nonzero = true;
10316 return false;
10318 case COMPLEX_CST:
10319 if (integer_zerop (init)
10320 || (real_zerop (init)
10321 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10322 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))))
10323 return true;
10325 *nonzero = true;
10326 return false;
10328 case VECTOR_CST:
10329 if (VECTOR_CST_NPATTERNS (init) == 1
10330 && VECTOR_CST_DUPLICATE_P (init)
10331 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init, 0)))
10332 return true;
10334 *nonzero = true;
10335 return false;
10337 case CONSTRUCTOR:
10339 if (TREE_CLOBBER_P (init))
10340 return false;
10342 unsigned HOST_WIDE_INT idx;
10343 tree elt;
10345 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10346 if (!initializer_zerop (elt, nonzero))
10347 return false;
10349 return true;
10352 case MEM_REF:
10354 tree arg = TREE_OPERAND (init, 0);
10355 if (TREE_CODE (arg) != ADDR_EXPR)
10356 return false;
10357 tree offset = TREE_OPERAND (init, 1);
10358 if (TREE_CODE (offset) != INTEGER_CST
10359 || !tree_fits_uhwi_p (offset))
10360 return false;
10361 off = tree_to_uhwi (offset);
10362 if (INT_MAX < off)
10363 return false;
10364 arg = TREE_OPERAND (arg, 0);
10365 if (TREE_CODE (arg) != STRING_CST)
10366 return false;
10367 init = arg;
10369 /* Fall through. */
10371 case STRING_CST:
10373 gcc_assert (off <= INT_MAX);
10375 int i = off;
10376 int n = TREE_STRING_LENGTH (init);
10377 if (n <= i)
10378 return false;
10380 /* We need to loop through all elements to handle cases like
10381 "\0" and "\0foobar". */
10382 for (i = 0; i < n; ++i)
10383 if (TREE_STRING_POINTER (init)[i] != '\0')
10385 *nonzero = true;
10386 return false;
10389 return true;
10392 default:
10393 return false;
10397 /* Return true if EXPR is an initializer expression in which every element
10398 is a constant that is numerically equal to 0 or 1. The elements do not
10399 need to be equal to each other. */
10401 bool
10402 initializer_each_zero_or_onep (const_tree expr)
10404 STRIP_ANY_LOCATION_WRAPPER (expr);
10406 switch (TREE_CODE (expr))
10408 case INTEGER_CST:
10409 return integer_zerop (expr) || integer_onep (expr);
10411 case REAL_CST:
10412 return real_zerop (expr) || real_onep (expr);
10414 case VECTOR_CST:
10416 unsigned HOST_WIDE_INT nelts = vector_cst_encoded_nelts (expr);
10417 if (VECTOR_CST_STEPPED_P (expr)
10418 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)).is_constant (&nelts))
10419 return false;
10421 for (unsigned int i = 0; i < nelts; ++i)
10423 tree elt = vector_cst_elt (expr, i);
10424 if (!initializer_each_zero_or_onep (elt))
10425 return false;
10428 return true;
10431 default:
10432 return false;
10436 /* Check if vector VEC consists of all the equal elements and
10437 that the number of elements corresponds to the type of VEC.
10438 The function returns first element of the vector
10439 or NULL_TREE if the vector is not uniform. */
10440 tree
10441 uniform_vector_p (const_tree vec)
10443 tree first, t;
10444 unsigned HOST_WIDE_INT i, nelts;
10446 if (vec == NULL_TREE)
10447 return NULL_TREE;
10449 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10451 if (TREE_CODE (vec) == VEC_DUPLICATE_EXPR)
10452 return TREE_OPERAND (vec, 0);
10454 else if (TREE_CODE (vec) == VECTOR_CST)
10456 if (VECTOR_CST_NPATTERNS (vec) == 1 && VECTOR_CST_DUPLICATE_P (vec))
10457 return VECTOR_CST_ENCODED_ELT (vec, 0);
10458 return NULL_TREE;
10461 else if (TREE_CODE (vec) == CONSTRUCTOR
10462 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)).is_constant (&nelts))
10464 first = error_mark_node;
10466 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10468 if (i == 0)
10470 first = t;
10471 continue;
10473 if (!operand_equal_p (first, t, 0))
10474 return NULL_TREE;
10476 if (i != nelts)
10477 return NULL_TREE;
10479 if (TREE_CODE (first) == CONSTRUCTOR || TREE_CODE (first) == VECTOR_CST)
10480 return uniform_vector_p (first);
10481 return first;
10484 return NULL_TREE;
10487 /* If the argument is INTEGER_CST, return it. If the argument is vector
10488 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
10489 return NULL_TREE.
10490 Look through location wrappers. */
10492 tree
10493 uniform_integer_cst_p (tree t)
10495 STRIP_ANY_LOCATION_WRAPPER (t);
10497 if (TREE_CODE (t) == INTEGER_CST)
10498 return t;
10500 if (VECTOR_TYPE_P (TREE_TYPE (t)))
10502 t = uniform_vector_p (t);
10503 if (t && TREE_CODE (t) == INTEGER_CST)
10504 return t;
10507 return NULL_TREE;
10510 /* Checks to see if T is a constant or a constant vector and if each element E
10511 adheres to ~E + 1 == pow2 then return ~E otherwise NULL_TREE. */
10513 tree
10514 bitmask_inv_cst_vector_p (tree t)
10517 tree_code code = TREE_CODE (t);
10518 tree type = TREE_TYPE (t);
10520 if (!INTEGRAL_TYPE_P (type)
10521 && !VECTOR_INTEGER_TYPE_P (type))
10522 return NULL_TREE;
10524 unsigned HOST_WIDE_INT nelts = 1;
10525 tree cst;
10526 unsigned int idx = 0;
10527 bool uniform = uniform_integer_cst_p (t);
10528 tree newtype = unsigned_type_for (type);
10529 tree_vector_builder builder;
10530 if (code == INTEGER_CST)
10531 cst = t;
10532 else
10534 if (!VECTOR_CST_NELTS (t).is_constant (&nelts))
10535 return NULL_TREE;
10537 cst = vector_cst_elt (t, 0);
10538 builder.new_vector (newtype, nelts, 1);
10541 tree ty = unsigned_type_for (TREE_TYPE (cst));
10545 if (idx > 0)
10546 cst = vector_cst_elt (t, idx);
10547 wide_int icst = wi::to_wide (cst);
10548 wide_int inv = wi::bit_not (icst);
10549 icst = wi::add (1, inv);
10550 if (wi::popcount (icst) != 1)
10551 return NULL_TREE;
10553 tree newcst = wide_int_to_tree (ty, inv);
10555 if (uniform)
10556 return build_uniform_cst (newtype, newcst);
10558 builder.quick_push (newcst);
10560 while (++idx < nelts);
10562 return builder.build ();
10565 /* If VECTOR_CST T has a single nonzero element, return the index of that
10566 element, otherwise return -1. */
10569 single_nonzero_element (const_tree t)
10571 unsigned HOST_WIDE_INT nelts;
10572 unsigned int repeat_nelts;
10573 if (VECTOR_CST_NELTS (t).is_constant (&nelts))
10574 repeat_nelts = nelts;
10575 else if (VECTOR_CST_NELTS_PER_PATTERN (t) == 2)
10577 nelts = vector_cst_encoded_nelts (t);
10578 repeat_nelts = VECTOR_CST_NPATTERNS (t);
10580 else
10581 return -1;
10583 int res = -1;
10584 for (unsigned int i = 0; i < nelts; ++i)
10586 tree elt = vector_cst_elt (t, i);
10587 if (!integer_zerop (elt) && !real_zerop (elt))
10589 if (res >= 0 || i >= repeat_nelts)
10590 return -1;
10591 res = i;
10594 return res;
10597 /* Build an empty statement at location LOC. */
10599 tree
10600 build_empty_stmt (location_t loc)
10602 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10603 SET_EXPR_LOCATION (t, loc);
10604 return t;
10608 /* Build an OMP clause with code CODE. LOC is the location of the
10609 clause. */
10611 tree
10612 build_omp_clause (location_t loc, enum omp_clause_code code)
10614 tree t;
10615 int size, length;
10617 length = omp_clause_num_ops[code];
10618 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10620 record_node_allocation_statistics (OMP_CLAUSE, size);
10622 t = (tree) ggc_internal_alloc (size);
10623 memset (t, 0, size);
10624 TREE_SET_CODE (t, OMP_CLAUSE);
10625 OMP_CLAUSE_SET_CODE (t, code);
10626 OMP_CLAUSE_LOCATION (t) = loc;
10628 return t;
10631 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10632 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10633 Except for the CODE and operand count field, other storage for the
10634 object is initialized to zeros. */
10636 tree
10637 build_vl_exp (enum tree_code code, int len MEM_STAT_DECL)
10639 tree t;
10640 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10642 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10643 gcc_assert (len >= 1);
10645 record_node_allocation_statistics (code, length);
10647 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10649 TREE_SET_CODE (t, code);
10651 /* Can't use TREE_OPERAND to store the length because if checking is
10652 enabled, it will try to check the length before we store it. :-P */
10653 t->exp.operands[0] = build_int_cst (sizetype, len);
10655 return t;
10658 /* Helper function for build_call_* functions; build a CALL_EXPR with
10659 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10660 the argument slots. */
10662 static tree
10663 build_call_1 (tree return_type, tree fn, int nargs)
10665 tree t;
10667 t = build_vl_exp (CALL_EXPR, nargs + 3);
10668 TREE_TYPE (t) = return_type;
10669 CALL_EXPR_FN (t) = fn;
10670 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10672 return t;
10675 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10676 FN and a null static chain slot. NARGS is the number of call arguments
10677 which are specified as "..." arguments. */
10679 tree
10680 build_call_nary (tree return_type, tree fn, int nargs, ...)
10682 tree ret;
10683 va_list args;
10684 va_start (args, nargs);
10685 ret = build_call_valist (return_type, fn, nargs, args);
10686 va_end (args);
10687 return ret;
10690 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10691 FN and a null static chain slot. NARGS is the number of call arguments
10692 which are specified as a va_list ARGS. */
10694 tree
10695 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10697 tree t;
10698 int i;
10700 t = build_call_1 (return_type, fn, nargs);
10701 for (i = 0; i < nargs; i++)
10702 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10703 process_call_operands (t);
10704 return t;
10707 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10708 FN and a null static chain slot. NARGS is the number of call arguments
10709 which are specified as a tree array ARGS. */
10711 tree
10712 build_call_array_loc (location_t loc, tree return_type, tree fn,
10713 int nargs, const tree *args)
10715 tree t;
10716 int i;
10718 t = build_call_1 (return_type, fn, nargs);
10719 for (i = 0; i < nargs; i++)
10720 CALL_EXPR_ARG (t, i) = args[i];
10721 process_call_operands (t);
10722 SET_EXPR_LOCATION (t, loc);
10723 return t;
10726 /* Like build_call_array, but takes a vec. */
10728 tree
10729 build_call_vec (tree return_type, tree fn, const vec<tree, va_gc> *args)
10731 tree ret, t;
10732 unsigned int ix;
10734 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10735 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10736 CALL_EXPR_ARG (ret, ix) = t;
10737 process_call_operands (ret);
10738 return ret;
10741 /* Conveniently construct a function call expression. FNDECL names the
10742 function to be called and N arguments are passed in the array
10743 ARGARRAY. */
10745 tree
10746 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10748 tree fntype = TREE_TYPE (fndecl);
10749 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10751 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10754 /* Conveniently construct a function call expression. FNDECL names the
10755 function to be called and the arguments are passed in the vector
10756 VEC. */
10758 tree
10759 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10761 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10762 vec_safe_address (vec));
10766 /* Conveniently construct a function call expression. FNDECL names the
10767 function to be called, N is the number of arguments, and the "..."
10768 parameters are the argument expressions. */
10770 tree
10771 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10773 va_list ap;
10774 tree *argarray = XALLOCAVEC (tree, n);
10775 int i;
10777 va_start (ap, n);
10778 for (i = 0; i < n; i++)
10779 argarray[i] = va_arg (ap, tree);
10780 va_end (ap);
10781 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10784 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10785 varargs macros aren't supported by all bootstrap compilers. */
10787 tree
10788 build_call_expr (tree fndecl, int n, ...)
10790 va_list ap;
10791 tree *argarray = XALLOCAVEC (tree, n);
10792 int i;
10794 va_start (ap, n);
10795 for (i = 0; i < n; i++)
10796 argarray[i] = va_arg (ap, tree);
10797 va_end (ap);
10798 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10801 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10802 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10803 It will get gimplified later into an ordinary internal function. */
10805 tree
10806 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
10807 tree type, int n, const tree *args)
10809 tree t = build_call_1 (type, NULL_TREE, n);
10810 for (int i = 0; i < n; ++i)
10811 CALL_EXPR_ARG (t, i) = args[i];
10812 SET_EXPR_LOCATION (t, loc);
10813 CALL_EXPR_IFN (t) = ifn;
10814 process_call_operands (t);
10815 return t;
10818 /* Build internal call expression. This is just like CALL_EXPR, except
10819 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10820 internal function. */
10822 tree
10823 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10824 tree type, int n, ...)
10826 va_list ap;
10827 tree *argarray = XALLOCAVEC (tree, n);
10828 int i;
10830 va_start (ap, n);
10831 for (i = 0; i < n; i++)
10832 argarray[i] = va_arg (ap, tree);
10833 va_end (ap);
10834 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10837 /* Return a function call to FN, if the target is guaranteed to support it,
10838 or null otherwise.
10840 N is the number of arguments, passed in the "...", and TYPE is the
10841 type of the return value. */
10843 tree
10844 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
10845 int n, ...)
10847 va_list ap;
10848 tree *argarray = XALLOCAVEC (tree, n);
10849 int i;
10851 va_start (ap, n);
10852 for (i = 0; i < n; i++)
10853 argarray[i] = va_arg (ap, tree);
10854 va_end (ap);
10855 if (internal_fn_p (fn))
10857 internal_fn ifn = as_internal_fn (fn);
10858 if (direct_internal_fn_p (ifn))
10860 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
10861 if (!direct_internal_fn_supported_p (ifn, types,
10862 OPTIMIZE_FOR_BOTH))
10863 return NULL_TREE;
10865 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
10867 else
10869 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
10870 if (!fndecl)
10871 return NULL_TREE;
10872 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10876 /* Return a function call to the appropriate builtin alloca variant.
10878 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
10879 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
10880 bound for SIZE in case it is not a fixed value. */
10882 tree
10883 build_alloca_call_expr (tree size, unsigned int align, HOST_WIDE_INT max_size)
10885 if (max_size >= 0)
10887 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX);
10888 return
10889 build_call_expr (t, 3, size, size_int (align), size_int (max_size));
10891 else if (align > 0)
10893 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10894 return build_call_expr (t, 2, size, size_int (align));
10896 else
10898 tree t = builtin_decl_explicit (BUILT_IN_ALLOCA);
10899 return build_call_expr (t, 1, size);
10903 /* The built-in decl to use to mark code points believed to be unreachable.
10904 Typically __builtin_unreachable, but __builtin_trap if
10905 -fsanitize=unreachable -fsanitize-trap=unreachable. If only
10906 -fsanitize=unreachable, we rely on sanopt to replace calls with the
10907 appropriate ubsan function. When building a call directly, use
10908 {gimple_},build_builtin_unreachable instead. */
10910 tree
10911 builtin_decl_unreachable ()
10913 enum built_in_function fncode = BUILT_IN_UNREACHABLE;
10915 if (sanitize_flags_p (SANITIZE_UNREACHABLE)
10916 ? (flag_sanitize_trap & SANITIZE_UNREACHABLE)
10917 : flag_unreachable_traps)
10918 fncode = BUILT_IN_UNREACHABLE_TRAP;
10919 /* For non-trapping sanitize, we will rewrite __builtin_unreachable () later,
10920 in the sanopt pass. */
10922 return builtin_decl_explicit (fncode);
10925 /* Build a call to __builtin_unreachable, possibly rewritten by
10926 -fsanitize=unreachable. Use this rather than the above when practical. */
10928 tree
10929 build_builtin_unreachable (location_t loc)
10931 tree data = NULL_TREE;
10932 tree fn = sanitize_unreachable_fn (&data, loc);
10933 return build_call_expr_loc (loc, fn, data != NULL_TREE, data);
10936 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
10937 if SIZE == -1) and return a tree node representing char* pointer to
10938 it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). When STR is nonnull
10939 the STRING_CST value is the LEN bytes at STR (the representation
10940 of the string, which may be wide). Otherwise it's all zeros. */
10942 tree
10943 build_string_literal (unsigned len, const char *str /* = NULL */,
10944 tree eltype /* = char_type_node */,
10945 unsigned HOST_WIDE_INT size /* = -1 */)
10947 tree t = build_string (len, str);
10948 /* Set the maximum valid index based on the string length or SIZE. */
10949 unsigned HOST_WIDE_INT maxidx
10950 = (size == HOST_WIDE_INT_M1U ? len : size) - 1;
10952 tree index = build_index_type (size_int (maxidx));
10953 eltype = build_type_variant (eltype, 1, 0);
10954 tree type = build_array_type (eltype, index);
10955 TREE_TYPE (t) = type;
10956 TREE_CONSTANT (t) = 1;
10957 TREE_READONLY (t) = 1;
10958 TREE_STATIC (t) = 1;
10960 type = build_pointer_type (eltype);
10961 t = build1 (ADDR_EXPR, type,
10962 build4 (ARRAY_REF, eltype,
10963 t, integer_zero_node, NULL_TREE, NULL_TREE));
10964 return t;
10969 /* Return true if T (assumed to be a DECL) must be assigned a memory
10970 location. */
10972 bool
10973 needs_to_live_in_memory (const_tree t)
10975 return (TREE_ADDRESSABLE (t)
10976 || is_global_var (t)
10977 || (TREE_CODE (t) == RESULT_DECL
10978 && !DECL_BY_REFERENCE (t)
10979 && aggregate_value_p (t, current_function_decl)));
10982 /* Return value of a constant X and sign-extend it. */
10984 HOST_WIDE_INT
10985 int_cst_value (const_tree x)
10987 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10988 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10990 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10991 gcc_assert (cst_and_fits_in_hwi (x));
10993 if (bits < HOST_BITS_PER_WIDE_INT)
10995 bool negative = ((val >> (bits - 1)) & 1) != 0;
10996 if (negative)
10997 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
10998 else
10999 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
11002 return val;
11005 /* If TYPE is an integral or pointer type, return an integer type with
11006 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11007 if TYPE is already an integer type of signedness UNSIGNEDP.
11008 If TYPE is a floating-point type, return an integer type with the same
11009 bitsize and with the signedness given by UNSIGNEDP; this is useful
11010 when doing bit-level operations on a floating-point value. */
11012 tree
11013 signed_or_unsigned_type_for (int unsignedp, tree type)
11015 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
11016 return type;
11018 if (TREE_CODE (type) == VECTOR_TYPE)
11020 tree inner = TREE_TYPE (type);
11021 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11022 if (!inner2)
11023 return NULL_TREE;
11024 if (inner == inner2)
11025 return type;
11026 machine_mode new_mode;
11027 if (VECTOR_MODE_P (TYPE_MODE (type))
11028 && related_int_vector_mode (TYPE_MODE (type)).exists (&new_mode))
11029 return build_vector_type_for_mode (inner2, new_mode);
11030 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11033 if (TREE_CODE (type) == COMPLEX_TYPE)
11035 tree inner = TREE_TYPE (type);
11036 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11037 if (!inner2)
11038 return NULL_TREE;
11039 if (inner == inner2)
11040 return type;
11041 return build_complex_type (inner2);
11044 unsigned int bits;
11045 if (INTEGRAL_TYPE_P (type)
11046 || POINTER_TYPE_P (type)
11047 || TREE_CODE (type) == OFFSET_TYPE)
11048 bits = TYPE_PRECISION (type);
11049 else if (TREE_CODE (type) == REAL_TYPE)
11050 bits = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type));
11051 else
11052 return NULL_TREE;
11054 return build_nonstandard_integer_type (bits, unsignedp);
11057 /* If TYPE is an integral or pointer type, return an integer type with
11058 the same precision which is unsigned, or itself if TYPE is already an
11059 unsigned integer type. If TYPE is a floating-point type, return an
11060 unsigned integer type with the same bitsize as TYPE. */
11062 tree
11063 unsigned_type_for (tree type)
11065 return signed_or_unsigned_type_for (1, type);
11068 /* If TYPE is an integral or pointer type, return an integer type with
11069 the same precision which is signed, or itself if TYPE is already a
11070 signed integer type. If TYPE is a floating-point type, return a
11071 signed integer type with the same bitsize as TYPE. */
11073 tree
11074 signed_type_for (tree type)
11076 return signed_or_unsigned_type_for (0, type);
11079 /* - For VECTOR_TYPEs:
11080 - The truth type must be a VECTOR_BOOLEAN_TYPE.
11081 - The number of elements must match (known_eq).
11082 - targetm.vectorize.get_mask_mode exists, and exactly
11083 the same mode as the truth type.
11084 - Otherwise, the truth type must be a BOOLEAN_TYPE
11085 or useless_type_conversion_p to BOOLEAN_TYPE. */
11086 bool
11087 is_truth_type_for (tree type, tree truth_type)
11089 machine_mode mask_mode = TYPE_MODE (truth_type);
11090 machine_mode vmode = TYPE_MODE (type);
11091 machine_mode tmask_mode;
11093 if (TREE_CODE (type) == VECTOR_TYPE)
11095 if (VECTOR_BOOLEAN_TYPE_P (truth_type)
11096 && known_eq (TYPE_VECTOR_SUBPARTS (type),
11097 TYPE_VECTOR_SUBPARTS (truth_type))
11098 && targetm.vectorize.get_mask_mode (vmode).exists (&tmask_mode)
11099 && tmask_mode == mask_mode)
11100 return true;
11102 return false;
11105 return useless_type_conversion_p (boolean_type_node, truth_type);
11108 /* If TYPE is a vector type, return a signed integer vector type with the
11109 same width and number of subparts. Otherwise return boolean_type_node. */
11111 tree
11112 truth_type_for (tree type)
11114 if (TREE_CODE (type) == VECTOR_TYPE)
11116 if (VECTOR_BOOLEAN_TYPE_P (type))
11117 return type;
11118 return build_truth_vector_type_for (type);
11120 else
11121 return boolean_type_node;
11124 /* Returns the largest value obtainable by casting something in INNER type to
11125 OUTER type. */
11127 tree
11128 upper_bound_in_type (tree outer, tree inner)
11130 unsigned int det = 0;
11131 unsigned oprec = TYPE_PRECISION (outer);
11132 unsigned iprec = TYPE_PRECISION (inner);
11133 unsigned prec;
11135 /* Compute a unique number for every combination. */
11136 det |= (oprec > iprec) ? 4 : 0;
11137 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11138 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11140 /* Determine the exponent to use. */
11141 switch (det)
11143 case 0:
11144 case 1:
11145 /* oprec <= iprec, outer: signed, inner: don't care. */
11146 prec = oprec - 1;
11147 break;
11148 case 2:
11149 case 3:
11150 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11151 prec = oprec;
11152 break;
11153 case 4:
11154 /* oprec > iprec, outer: signed, inner: signed. */
11155 prec = iprec - 1;
11156 break;
11157 case 5:
11158 /* oprec > iprec, outer: signed, inner: unsigned. */
11159 prec = iprec;
11160 break;
11161 case 6:
11162 /* oprec > iprec, outer: unsigned, inner: signed. */
11163 prec = oprec;
11164 break;
11165 case 7:
11166 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11167 prec = iprec;
11168 break;
11169 default:
11170 gcc_unreachable ();
11173 return wide_int_to_tree (outer,
11174 wi::mask (prec, false, TYPE_PRECISION (outer)));
11177 /* Returns the smallest value obtainable by casting something in INNER type to
11178 OUTER type. */
11180 tree
11181 lower_bound_in_type (tree outer, tree inner)
11183 unsigned oprec = TYPE_PRECISION (outer);
11184 unsigned iprec = TYPE_PRECISION (inner);
11186 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11187 and obtain 0. */
11188 if (TYPE_UNSIGNED (outer)
11189 /* If we are widening something of an unsigned type, OUTER type
11190 contains all values of INNER type. In particular, both INNER
11191 and OUTER types have zero in common. */
11192 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11193 return build_int_cst (outer, 0);
11194 else
11196 /* If we are widening a signed type to another signed type, we
11197 want to obtain -2^^(iprec-1). If we are keeping the
11198 precision or narrowing to a signed type, we want to obtain
11199 -2^(oprec-1). */
11200 unsigned prec = oprec > iprec ? iprec : oprec;
11201 return wide_int_to_tree (outer,
11202 wi::mask (prec - 1, true,
11203 TYPE_PRECISION (outer)));
11207 /* Return nonzero if two operands that are suitable for PHI nodes are
11208 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11209 SSA_NAME or invariant. Note that this is strictly an optimization.
11210 That is, callers of this function can directly call operand_equal_p
11211 and get the same result, only slower. */
11214 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11216 if (arg0 == arg1)
11217 return 1;
11218 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11219 return 0;
11220 return operand_equal_p (arg0, arg1, 0);
11223 /* Returns number of zeros at the end of binary representation of X. */
11225 tree
11226 num_ending_zeros (const_tree x)
11228 return build_int_cst (TREE_TYPE (x), wi::ctz (wi::to_wide (x)));
11232 #define WALK_SUBTREE(NODE) \
11233 do \
11235 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11236 if (result) \
11237 return result; \
11239 while (0)
11241 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11242 be walked whenever a type is seen in the tree. Rest of operands and return
11243 value are as for walk_tree. */
11245 static tree
11246 walk_type_fields (tree type, walk_tree_fn func, void *data,
11247 hash_set<tree> *pset, walk_tree_lh lh)
11249 tree result = NULL_TREE;
11251 switch (TREE_CODE (type))
11253 case POINTER_TYPE:
11254 case REFERENCE_TYPE:
11255 case VECTOR_TYPE:
11256 /* We have to worry about mutually recursive pointers. These can't
11257 be written in C. They can in Ada. It's pathological, but
11258 there's an ACATS test (c38102a) that checks it. Deal with this
11259 by checking if we're pointing to another pointer, that one
11260 points to another pointer, that one does too, and we have no htab.
11261 If so, get a hash table. We check three levels deep to avoid
11262 the cost of the hash table if we don't need one. */
11263 if (POINTER_TYPE_P (TREE_TYPE (type))
11264 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11265 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11266 && !pset)
11268 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11269 func, data);
11270 if (result)
11271 return result;
11273 break;
11276 /* fall through */
11278 case COMPLEX_TYPE:
11279 WALK_SUBTREE (TREE_TYPE (type));
11280 break;
11282 case METHOD_TYPE:
11283 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11285 /* Fall through. */
11287 case FUNCTION_TYPE:
11288 WALK_SUBTREE (TREE_TYPE (type));
11290 tree arg;
11292 /* We never want to walk into default arguments. */
11293 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11294 WALK_SUBTREE (TREE_VALUE (arg));
11296 break;
11298 case ARRAY_TYPE:
11299 /* Don't follow this nodes's type if a pointer for fear that
11300 we'll have infinite recursion. If we have a PSET, then we
11301 need not fear. */
11302 if (pset
11303 || (!POINTER_TYPE_P (TREE_TYPE (type))
11304 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11305 WALK_SUBTREE (TREE_TYPE (type));
11306 WALK_SUBTREE (TYPE_DOMAIN (type));
11307 break;
11309 case OFFSET_TYPE:
11310 WALK_SUBTREE (TREE_TYPE (type));
11311 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11312 break;
11314 default:
11315 break;
11318 return NULL_TREE;
11321 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11322 called with the DATA and the address of each sub-tree. If FUNC returns a
11323 non-NULL value, the traversal is stopped, and the value returned by FUNC
11324 is returned. If PSET is non-NULL it is used to record the nodes visited,
11325 and to avoid visiting a node more than once. */
11327 tree
11328 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11329 hash_set<tree> *pset, walk_tree_lh lh)
11331 enum tree_code code;
11332 int walk_subtrees;
11333 tree result;
11335 #define WALK_SUBTREE_TAIL(NODE) \
11336 do \
11338 tp = & (NODE); \
11339 goto tail_recurse; \
11341 while (0)
11343 tail_recurse:
11344 /* Skip empty subtrees. */
11345 if (!*tp)
11346 return NULL_TREE;
11348 /* Don't walk the same tree twice, if the user has requested
11349 that we avoid doing so. */
11350 if (pset && pset->add (*tp))
11351 return NULL_TREE;
11353 /* Call the function. */
11354 walk_subtrees = 1;
11355 result = (*func) (tp, &walk_subtrees, data);
11357 /* If we found something, return it. */
11358 if (result)
11359 return result;
11361 code = TREE_CODE (*tp);
11363 /* Even if we didn't, FUNC may have decided that there was nothing
11364 interesting below this point in the tree. */
11365 if (!walk_subtrees)
11367 /* But we still need to check our siblings. */
11368 if (code == TREE_LIST)
11369 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11370 else if (code == OMP_CLAUSE)
11371 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11372 else
11373 return NULL_TREE;
11376 if (lh)
11378 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11379 if (result || !walk_subtrees)
11380 return result;
11383 switch (code)
11385 case ERROR_MARK:
11386 case IDENTIFIER_NODE:
11387 case INTEGER_CST:
11388 case REAL_CST:
11389 case FIXED_CST:
11390 case STRING_CST:
11391 case BLOCK:
11392 case PLACEHOLDER_EXPR:
11393 case SSA_NAME:
11394 case FIELD_DECL:
11395 case RESULT_DECL:
11396 /* None of these have subtrees other than those already walked
11397 above. */
11398 break;
11400 case TREE_LIST:
11401 WALK_SUBTREE (TREE_VALUE (*tp));
11402 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11404 case TREE_VEC:
11406 int len = TREE_VEC_LENGTH (*tp);
11408 if (len == 0)
11409 break;
11411 /* Walk all elements but the last. */
11412 for (int i = 0; i < len - 1; ++i)
11413 WALK_SUBTREE (TREE_VEC_ELT (*tp, i));
11415 /* Now walk the last one as a tail call. */
11416 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, len - 1));
11419 case VECTOR_CST:
11421 unsigned len = vector_cst_encoded_nelts (*tp);
11422 if (len == 0)
11423 break;
11424 /* Walk all elements but the last. */
11425 for (unsigned i = 0; i < len - 1; ++i)
11426 WALK_SUBTREE (VECTOR_CST_ENCODED_ELT (*tp, i));
11427 /* Now walk the last one as a tail call. */
11428 WALK_SUBTREE_TAIL (VECTOR_CST_ENCODED_ELT (*tp, len - 1));
11431 case COMPLEX_CST:
11432 WALK_SUBTREE (TREE_REALPART (*tp));
11433 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11435 case CONSTRUCTOR:
11437 unsigned HOST_WIDE_INT idx;
11438 constructor_elt *ce;
11440 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11441 idx++)
11442 WALK_SUBTREE (ce->value);
11444 break;
11446 case SAVE_EXPR:
11447 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11449 case BIND_EXPR:
11451 tree decl;
11452 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11454 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11455 into declarations that are just mentioned, rather than
11456 declared; they don't really belong to this part of the tree.
11457 And, we can see cycles: the initializer for a declaration
11458 can refer to the declaration itself. */
11459 WALK_SUBTREE (DECL_INITIAL (decl));
11460 WALK_SUBTREE (DECL_SIZE (decl));
11461 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11463 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11466 case STATEMENT_LIST:
11468 tree_stmt_iterator i;
11469 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11470 WALK_SUBTREE (*tsi_stmt_ptr (i));
11472 break;
11474 case OMP_CLAUSE:
11476 int len = omp_clause_num_ops[OMP_CLAUSE_CODE (*tp)];
11477 for (int i = 0; i < len; i++)
11478 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11479 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11482 case TARGET_EXPR:
11484 int i, len;
11486 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11487 But, we only want to walk once. */
11488 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11489 for (i = 0; i < len; ++i)
11490 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11491 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11494 case DECL_EXPR:
11495 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11496 defining. We only want to walk into these fields of a type in this
11497 case and not in the general case of a mere reference to the type.
11499 The criterion is as follows: if the field can be an expression, it
11500 must be walked only here. This should be in keeping with the fields
11501 that are directly gimplified in gimplify_type_sizes in order for the
11502 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11503 variable-sized types.
11505 Note that DECLs get walked as part of processing the BIND_EXPR. */
11506 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11508 /* Call the function for the decl so e.g. copy_tree_body_r can
11509 replace it with the remapped one. */
11510 result = (*func) (&DECL_EXPR_DECL (*tp), &walk_subtrees, data);
11511 if (result || !walk_subtrees)
11512 return result;
11514 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11515 if (TREE_CODE (*type_p) == ERROR_MARK)
11516 return NULL_TREE;
11518 /* Call the function for the type. See if it returns anything or
11519 doesn't want us to continue. If we are to continue, walk both
11520 the normal fields and those for the declaration case. */
11521 result = (*func) (type_p, &walk_subtrees, data);
11522 if (result || !walk_subtrees)
11523 return result;
11525 /* But do not walk a pointed-to type since it may itself need to
11526 be walked in the declaration case if it isn't anonymous. */
11527 if (!POINTER_TYPE_P (*type_p))
11529 result = walk_type_fields (*type_p, func, data, pset, lh);
11530 if (result)
11531 return result;
11534 /* If this is a record type, also walk the fields. */
11535 if (RECORD_OR_UNION_TYPE_P (*type_p))
11537 tree field;
11539 for (field = TYPE_FIELDS (*type_p); field;
11540 field = DECL_CHAIN (field))
11542 /* We'd like to look at the type of the field, but we can
11543 easily get infinite recursion. So assume it's pointed
11544 to elsewhere in the tree. Also, ignore things that
11545 aren't fields. */
11546 if (TREE_CODE (field) != FIELD_DECL)
11547 continue;
11549 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11550 WALK_SUBTREE (DECL_SIZE (field));
11551 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11552 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11553 WALK_SUBTREE (DECL_QUALIFIER (field));
11557 /* Same for scalar types. */
11558 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11559 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11560 || TREE_CODE (*type_p) == INTEGER_TYPE
11561 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11562 || TREE_CODE (*type_p) == REAL_TYPE)
11564 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11565 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11568 WALK_SUBTREE (TYPE_SIZE (*type_p));
11569 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11571 /* FALLTHRU */
11573 default:
11574 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11576 int i, len;
11578 /* Walk over all the sub-trees of this operand. */
11579 len = TREE_OPERAND_LENGTH (*tp);
11581 /* Go through the subtrees. We need to do this in forward order so
11582 that the scope of a FOR_EXPR is handled properly. */
11583 if (len)
11585 for (i = 0; i < len - 1; ++i)
11586 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11587 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11590 /* If this is a type, walk the needed fields in the type. */
11591 else if (TYPE_P (*tp))
11592 return walk_type_fields (*tp, func, data, pset, lh);
11593 break;
11596 /* We didn't find what we were looking for. */
11597 return NULL_TREE;
11599 #undef WALK_SUBTREE_TAIL
11601 #undef WALK_SUBTREE
11603 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11605 tree
11606 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11607 walk_tree_lh lh)
11609 tree result;
11611 hash_set<tree> pset;
11612 result = walk_tree_1 (tp, func, data, &pset, lh);
11613 return result;
11617 tree
11618 tree_block (tree t)
11620 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11622 if (IS_EXPR_CODE_CLASS (c))
11623 return LOCATION_BLOCK (t->exp.locus);
11624 gcc_unreachable ();
11625 return NULL;
11628 void
11629 tree_set_block (tree t, tree b)
11631 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11633 if (IS_EXPR_CODE_CLASS (c))
11635 t->exp.locus = set_block (t->exp.locus, b);
11637 else
11638 gcc_unreachable ();
11641 /* Create a nameless artificial label and put it in the current
11642 function context. The label has a location of LOC. Returns the
11643 newly created label. */
11645 tree
11646 create_artificial_label (location_t loc)
11648 tree lab = build_decl (loc,
11649 LABEL_DECL, NULL_TREE, void_type_node);
11651 DECL_ARTIFICIAL (lab) = 1;
11652 DECL_IGNORED_P (lab) = 1;
11653 DECL_CONTEXT (lab) = current_function_decl;
11654 return lab;
11657 /* Given a tree, try to return a useful variable name that we can use
11658 to prefix a temporary that is being assigned the value of the tree.
11659 I.E. given <temp> = &A, return A. */
11661 const char *
11662 get_name (tree t)
11664 tree stripped_decl;
11666 stripped_decl = t;
11667 STRIP_NOPS (stripped_decl);
11668 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11669 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11670 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11672 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11673 if (!name)
11674 return NULL;
11675 return IDENTIFIER_POINTER (name);
11677 else
11679 switch (TREE_CODE (stripped_decl))
11681 case ADDR_EXPR:
11682 return get_name (TREE_OPERAND (stripped_decl, 0));
11683 default:
11684 return NULL;
11689 /* Return true if TYPE has a variable argument list. */
11691 bool
11692 stdarg_p (const_tree fntype)
11694 function_args_iterator args_iter;
11695 tree n = NULL_TREE, t;
11697 if (!fntype)
11698 return false;
11700 if (TYPE_NO_NAMED_ARGS_STDARG_P (fntype))
11701 return true;
11703 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11705 n = t;
11708 return n != NULL_TREE && n != void_type_node;
11711 /* Return true if TYPE has a prototype. */
11713 bool
11714 prototype_p (const_tree fntype)
11716 tree t;
11718 gcc_assert (fntype != NULL_TREE);
11720 if (TYPE_NO_NAMED_ARGS_STDARG_P (fntype))
11721 return true;
11723 t = TYPE_ARG_TYPES (fntype);
11724 return (t != NULL_TREE);
11727 /* If BLOCK is inlined from an __attribute__((__artificial__))
11728 routine, return pointer to location from where it has been
11729 called. */
11730 location_t *
11731 block_nonartificial_location (tree block)
11733 location_t *ret = NULL;
11735 while (block && TREE_CODE (block) == BLOCK
11736 && BLOCK_ABSTRACT_ORIGIN (block))
11738 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11739 if (TREE_CODE (ao) == FUNCTION_DECL)
11741 /* If AO is an artificial inline, point RET to the
11742 call site locus at which it has been inlined and continue
11743 the loop, in case AO's caller is also an artificial
11744 inline. */
11745 if (DECL_DECLARED_INLINE_P (ao)
11746 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11747 ret = &BLOCK_SOURCE_LOCATION (block);
11748 else
11749 break;
11751 else if (TREE_CODE (ao) != BLOCK)
11752 break;
11754 block = BLOCK_SUPERCONTEXT (block);
11756 return ret;
11760 /* If EXP is inlined from an __attribute__((__artificial__))
11761 function, return the location of the original call expression. */
11763 location_t
11764 tree_nonartificial_location (tree exp)
11766 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11768 if (loc)
11769 return *loc;
11770 else
11771 return EXPR_LOCATION (exp);
11774 /* Return the location into which EXP has been inlined. Analogous
11775 to tree_nonartificial_location() above but not limited to artificial
11776 functions declared inline. If SYSTEM_HEADER is true, return
11777 the macro expansion point of the location if it's in a system header */
11779 location_t
11780 tree_inlined_location (tree exp, bool system_header /* = true */)
11782 location_t loc = UNKNOWN_LOCATION;
11784 tree block = TREE_BLOCK (exp);
11786 while (block && TREE_CODE (block) == BLOCK
11787 && BLOCK_ABSTRACT_ORIGIN (block))
11789 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11790 if (TREE_CODE (ao) == FUNCTION_DECL)
11791 loc = BLOCK_SOURCE_LOCATION (block);
11792 else if (TREE_CODE (ao) != BLOCK)
11793 break;
11795 block = BLOCK_SUPERCONTEXT (block);
11798 if (loc == UNKNOWN_LOCATION)
11800 loc = EXPR_LOCATION (exp);
11801 if (system_header)
11802 /* Only consider macro expansion when the block traversal failed
11803 to find a location. Otherwise it's not relevant. */
11804 return expansion_point_location_if_in_system_header (loc);
11807 return loc;
11810 /* These are the hash table functions for the hash table of OPTIMIZATION_NODE
11811 nodes. */
11813 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11815 hashval_t
11816 cl_option_hasher::hash (tree x)
11818 const_tree const t = x;
11820 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11821 return cl_optimization_hash (TREE_OPTIMIZATION (t));
11822 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11823 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11824 else
11825 gcc_unreachable ();
11828 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11829 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11830 same. */
11832 bool
11833 cl_option_hasher::equal (tree x, tree y)
11835 const_tree const xt = x;
11836 const_tree const yt = y;
11838 if (TREE_CODE (xt) != TREE_CODE (yt))
11839 return 0;
11841 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11842 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt),
11843 TREE_OPTIMIZATION (yt));
11844 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11845 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11846 TREE_TARGET_OPTION (yt));
11847 else
11848 gcc_unreachable ();
11851 /* Build an OPTIMIZATION_NODE based on the options in OPTS and OPTS_SET. */
11853 tree
11854 build_optimization_node (struct gcc_options *opts,
11855 struct gcc_options *opts_set)
11857 tree t;
11859 /* Use the cache of optimization nodes. */
11861 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11862 opts, opts_set);
11864 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11865 t = *slot;
11866 if (!t)
11868 /* Insert this one into the hash table. */
11869 t = cl_optimization_node;
11870 *slot = t;
11872 /* Make a new node for next time round. */
11873 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11876 return t;
11879 /* Build a TARGET_OPTION_NODE based on the options in OPTS and OPTS_SET. */
11881 tree
11882 build_target_option_node (struct gcc_options *opts,
11883 struct gcc_options *opts_set)
11885 tree t;
11887 /* Use the cache of optimization nodes. */
11889 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11890 opts, opts_set);
11892 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11893 t = *slot;
11894 if (!t)
11896 /* Insert this one into the hash table. */
11897 t = cl_target_option_node;
11898 *slot = t;
11900 /* Make a new node for next time round. */
11901 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11904 return t;
11907 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11908 so that they aren't saved during PCH writing. */
11910 void
11911 prepare_target_option_nodes_for_pch (void)
11913 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11914 for (; iter != cl_option_hash_table->end (); ++iter)
11915 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11916 TREE_TARGET_GLOBALS (*iter) = NULL;
11919 /* Determine the "ultimate origin" of a block. */
11921 tree
11922 block_ultimate_origin (const_tree block)
11924 tree origin = BLOCK_ABSTRACT_ORIGIN (block);
11926 if (origin == NULL_TREE)
11927 return NULL_TREE;
11928 else
11930 gcc_checking_assert ((DECL_P (origin)
11931 && DECL_ORIGIN (origin) == origin)
11932 || BLOCK_ORIGIN (origin) == origin);
11933 return origin;
11937 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11938 no instruction. */
11940 bool
11941 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11943 /* Do not strip casts into or out of differing address spaces. */
11944 if (POINTER_TYPE_P (outer_type)
11945 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
11947 if (!POINTER_TYPE_P (inner_type)
11948 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
11949 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
11950 return false;
11952 else if (POINTER_TYPE_P (inner_type)
11953 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
11955 /* We already know that outer_type is not a pointer with
11956 a non-generic address space. */
11957 return false;
11960 /* Use precision rather then machine mode when we can, which gives
11961 the correct answer even for submode (bit-field) types. */
11962 if ((INTEGRAL_TYPE_P (outer_type)
11963 || POINTER_TYPE_P (outer_type)
11964 || TREE_CODE (outer_type) == OFFSET_TYPE)
11965 && (INTEGRAL_TYPE_P (inner_type)
11966 || POINTER_TYPE_P (inner_type)
11967 || TREE_CODE (inner_type) == OFFSET_TYPE))
11968 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11970 /* Otherwise fall back on comparing machine modes (e.g. for
11971 aggregate types, floats). */
11972 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11975 /* Return true iff conversion in EXP generates no instruction. Mark
11976 it inline so that we fully inline into the stripping functions even
11977 though we have two uses of this function. */
11979 static inline bool
11980 tree_nop_conversion (const_tree exp)
11982 tree outer_type, inner_type;
11984 if (location_wrapper_p (exp))
11985 return true;
11986 if (!CONVERT_EXPR_P (exp)
11987 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11988 return false;
11990 outer_type = TREE_TYPE (exp);
11991 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11992 if (!inner_type || inner_type == error_mark_node)
11993 return false;
11995 return tree_nop_conversion_p (outer_type, inner_type);
11998 /* Return true iff conversion in EXP generates no instruction. Don't
11999 consider conversions changing the signedness. */
12001 static bool
12002 tree_sign_nop_conversion (const_tree exp)
12004 tree outer_type, inner_type;
12006 if (!tree_nop_conversion (exp))
12007 return false;
12009 outer_type = TREE_TYPE (exp);
12010 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12012 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12013 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12016 /* Strip conversions from EXP according to tree_nop_conversion and
12017 return the resulting expression. */
12019 tree
12020 tree_strip_nop_conversions (tree exp)
12022 while (tree_nop_conversion (exp))
12023 exp = TREE_OPERAND (exp, 0);
12024 return exp;
12027 /* Strip conversions from EXP according to tree_sign_nop_conversion
12028 and return the resulting expression. */
12030 tree
12031 tree_strip_sign_nop_conversions (tree exp)
12033 while (tree_sign_nop_conversion (exp))
12034 exp = TREE_OPERAND (exp, 0);
12035 return exp;
12038 /* Avoid any floating point extensions from EXP. */
12039 tree
12040 strip_float_extensions (tree exp)
12042 tree sub, expt, subt;
12044 /* For floating point constant look up the narrowest type that can hold
12045 it properly and handle it like (type)(narrowest_type)constant.
12046 This way we can optimize for instance a=a*2.0 where "a" is float
12047 but 2.0 is double constant. */
12048 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12050 REAL_VALUE_TYPE orig;
12051 tree type = NULL;
12053 orig = TREE_REAL_CST (exp);
12054 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12055 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12056 type = float_type_node;
12057 else if (TYPE_PRECISION (TREE_TYPE (exp))
12058 > TYPE_PRECISION (double_type_node)
12059 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12060 type = double_type_node;
12061 if (type)
12062 return build_real_truncate (type, orig);
12065 if (!CONVERT_EXPR_P (exp))
12066 return exp;
12068 sub = TREE_OPERAND (exp, 0);
12069 subt = TREE_TYPE (sub);
12070 expt = TREE_TYPE (exp);
12072 if (!FLOAT_TYPE_P (subt))
12073 return exp;
12075 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12076 return exp;
12078 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12079 return exp;
12081 return strip_float_extensions (sub);
12084 /* Strip out all handled components that produce invariant
12085 offsets. */
12087 const_tree
12088 strip_invariant_refs (const_tree op)
12090 while (handled_component_p (op))
12092 switch (TREE_CODE (op))
12094 case ARRAY_REF:
12095 case ARRAY_RANGE_REF:
12096 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12097 || TREE_OPERAND (op, 2) != NULL_TREE
12098 || TREE_OPERAND (op, 3) != NULL_TREE)
12099 return NULL;
12100 break;
12102 case COMPONENT_REF:
12103 if (TREE_OPERAND (op, 2) != NULL_TREE)
12104 return NULL;
12105 break;
12107 default:;
12109 op = TREE_OPERAND (op, 0);
12112 return op;
12115 /* Strip handled components with zero offset from OP. */
12117 tree
12118 strip_zero_offset_components (tree op)
12120 while (TREE_CODE (op) == COMPONENT_REF
12121 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (op, 1)))
12122 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (op, 1))))
12123 op = TREE_OPERAND (op, 0);
12124 return op;
12127 static GTY(()) tree gcc_eh_personality_decl;
12129 /* Return the GCC personality function decl. */
12131 tree
12132 lhd_gcc_personality (void)
12134 if (!gcc_eh_personality_decl)
12135 gcc_eh_personality_decl = build_personality_function ("gcc");
12136 return gcc_eh_personality_decl;
12139 /* TARGET is a call target of GIMPLE call statement
12140 (obtained by gimple_call_fn). Return true if it is
12141 OBJ_TYPE_REF representing an virtual call of C++ method.
12142 (As opposed to OBJ_TYPE_REF representing objc calls
12143 through a cast where middle-end devirtualization machinery
12144 can't apply.) FOR_DUMP_P is true when being called from
12145 the dump routines. */
12147 bool
12148 virtual_method_call_p (const_tree target, bool for_dump_p)
12150 if (TREE_CODE (target) != OBJ_TYPE_REF)
12151 return false;
12152 tree t = TREE_TYPE (target);
12153 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12154 t = TREE_TYPE (t);
12155 if (TREE_CODE (t) == FUNCTION_TYPE)
12156 return false;
12157 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12158 /* If we do not have BINFO associated, it means that type was built
12159 without devirtualization enabled. Do not consider this a virtual
12160 call. */
12161 if (!TYPE_BINFO (obj_type_ref_class (target, for_dump_p)))
12162 return false;
12163 return true;
12166 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12168 static tree
12169 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12171 unsigned int i;
12172 tree base_binfo, b;
12174 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12175 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12176 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12177 return base_binfo;
12178 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12179 return b;
12180 return NULL;
12183 /* Try to find a base info of BINFO that would have its field decl at offset
12184 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12185 found, return, otherwise return NULL_TREE. */
12187 tree
12188 get_binfo_at_offset (tree binfo, poly_int64 offset, tree expected_type)
12190 tree type = BINFO_TYPE (binfo);
12192 while (true)
12194 HOST_WIDE_INT pos, size;
12195 tree fld;
12196 int i;
12198 if (types_same_for_odr (type, expected_type))
12199 return binfo;
12200 if (maybe_lt (offset, 0))
12201 return NULL_TREE;
12203 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12205 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12206 continue;
12208 pos = int_bit_position (fld);
12209 size = tree_to_uhwi (DECL_SIZE (fld));
12210 if (known_in_range_p (offset, pos, size))
12211 break;
12213 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12214 return NULL_TREE;
12216 /* Offset 0 indicates the primary base, whose vtable contents are
12217 represented in the binfo for the derived class. */
12218 else if (maybe_ne (offset, 0))
12220 tree found_binfo = NULL, base_binfo;
12221 /* Offsets in BINFO are in bytes relative to the whole structure
12222 while POS is in bits relative to the containing field. */
12223 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12224 / BITS_PER_UNIT);
12226 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12227 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12228 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12230 found_binfo = base_binfo;
12231 break;
12233 if (found_binfo)
12234 binfo = found_binfo;
12235 else
12236 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12237 binfo_offset);
12240 type = TREE_TYPE (fld);
12241 offset -= pos;
12245 /* Returns true if X is a typedef decl. */
12247 bool
12248 is_typedef_decl (const_tree x)
12250 return (x && TREE_CODE (x) == TYPE_DECL
12251 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12254 /* Returns true iff TYPE is a type variant created for a typedef. */
12256 bool
12257 typedef_variant_p (const_tree type)
12259 return is_typedef_decl (TYPE_NAME (type));
12262 /* PR 84195: Replace control characters in "unescaped" with their
12263 escaped equivalents. Allow newlines if -fmessage-length has
12264 been set to a non-zero value. This is done here, rather than
12265 where the attribute is recorded as the message length can
12266 change between these two locations. */
12268 void
12269 escaped_string::escape (const char *unescaped)
12271 char *escaped;
12272 size_t i, new_i, len;
12274 if (m_owned)
12275 free (m_str);
12277 m_str = const_cast<char *> (unescaped);
12278 m_owned = false;
12280 if (unescaped == NULL || *unescaped == 0)
12281 return;
12283 len = strlen (unescaped);
12284 escaped = NULL;
12285 new_i = 0;
12287 for (i = 0; i < len; i++)
12289 char c = unescaped[i];
12291 if (!ISCNTRL (c))
12293 if (escaped)
12294 escaped[new_i++] = c;
12295 continue;
12298 if (c != '\n' || !pp_is_wrapping_line (global_dc->printer))
12300 if (escaped == NULL)
12302 /* We only allocate space for a new string if we
12303 actually encounter a control character that
12304 needs replacing. */
12305 escaped = (char *) xmalloc (len * 2 + 1);
12306 strncpy (escaped, unescaped, i);
12307 new_i = i;
12310 escaped[new_i++] = '\\';
12312 switch (c)
12314 case '\a': escaped[new_i++] = 'a'; break;
12315 case '\b': escaped[new_i++] = 'b'; break;
12316 case '\f': escaped[new_i++] = 'f'; break;
12317 case '\n': escaped[new_i++] = 'n'; break;
12318 case '\r': escaped[new_i++] = 'r'; break;
12319 case '\t': escaped[new_i++] = 't'; break;
12320 case '\v': escaped[new_i++] = 'v'; break;
12321 default: escaped[new_i++] = '?'; break;
12324 else if (escaped)
12325 escaped[new_i++] = c;
12328 if (escaped)
12330 escaped[new_i] = 0;
12331 m_str = escaped;
12332 m_owned = true;
12336 /* Warn about a use of an identifier which was marked deprecated. Returns
12337 whether a warning was given. */
12339 bool
12340 warn_deprecated_use (tree node, tree attr)
12342 escaped_string msg;
12344 if (node == 0 || !warn_deprecated_decl)
12345 return false;
12347 if (!attr)
12349 if (DECL_P (node))
12350 attr = DECL_ATTRIBUTES (node);
12351 else if (TYPE_P (node))
12353 tree decl = TYPE_STUB_DECL (node);
12354 if (decl)
12355 attr = TYPE_ATTRIBUTES (TREE_TYPE (decl));
12356 else if ((decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (node)))
12357 != NULL_TREE)
12359 node = TREE_TYPE (decl);
12360 attr = TYPE_ATTRIBUTES (node);
12365 if (attr)
12366 attr = lookup_attribute ("deprecated", attr);
12368 if (attr)
12369 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12371 bool w = false;
12372 if (DECL_P (node))
12374 auto_diagnostic_group d;
12375 if (msg)
12376 w = warning (OPT_Wdeprecated_declarations,
12377 "%qD is deprecated: %s", node, (const char *) msg);
12378 else
12379 w = warning (OPT_Wdeprecated_declarations,
12380 "%qD is deprecated", node);
12381 if (w)
12382 inform (DECL_SOURCE_LOCATION (node), "declared here");
12384 else if (TYPE_P (node))
12386 tree what = NULL_TREE;
12387 tree decl = TYPE_STUB_DECL (node);
12389 if (TYPE_NAME (node))
12391 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12392 what = TYPE_NAME (node);
12393 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12394 && DECL_NAME (TYPE_NAME (node)))
12395 what = DECL_NAME (TYPE_NAME (node));
12398 auto_diagnostic_group d;
12399 if (what)
12401 if (msg)
12402 w = warning (OPT_Wdeprecated_declarations,
12403 "%qE is deprecated: %s", what, (const char *) msg);
12404 else
12405 w = warning (OPT_Wdeprecated_declarations,
12406 "%qE is deprecated", what);
12408 else
12410 if (msg)
12411 w = warning (OPT_Wdeprecated_declarations,
12412 "type is deprecated: %s", (const char *) msg);
12413 else
12414 w = warning (OPT_Wdeprecated_declarations,
12415 "type is deprecated");
12418 if (w && decl)
12419 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12422 return w;
12425 /* Error out with an identifier which was marked 'unavailable'. */
12426 void
12427 error_unavailable_use (tree node, tree attr)
12429 escaped_string msg;
12431 if (node == 0)
12432 return;
12434 if (!attr)
12436 if (DECL_P (node))
12437 attr = DECL_ATTRIBUTES (node);
12438 else if (TYPE_P (node))
12440 tree decl = TYPE_STUB_DECL (node);
12441 if (decl)
12442 attr = lookup_attribute ("unavailable",
12443 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12447 if (attr)
12448 attr = lookup_attribute ("unavailable", attr);
12450 if (attr)
12451 msg.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
12453 if (DECL_P (node))
12455 auto_diagnostic_group d;
12456 if (msg)
12457 error ("%qD is unavailable: %s", node, (const char *) msg);
12458 else
12459 error ("%qD is unavailable", node);
12460 inform (DECL_SOURCE_LOCATION (node), "declared here");
12462 else if (TYPE_P (node))
12464 tree what = NULL_TREE;
12465 tree decl = TYPE_STUB_DECL (node);
12467 if (TYPE_NAME (node))
12469 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12470 what = TYPE_NAME (node);
12471 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12472 && DECL_NAME (TYPE_NAME (node)))
12473 what = DECL_NAME (TYPE_NAME (node));
12476 auto_diagnostic_group d;
12477 if (what)
12479 if (msg)
12480 error ("%qE is unavailable: %s", what, (const char *) msg);
12481 else
12482 error ("%qE is unavailable", what);
12484 else
12486 if (msg)
12487 error ("type is unavailable: %s", (const char *) msg);
12488 else
12489 error ("type is unavailable");
12492 if (decl)
12493 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12497 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12498 somewhere in it. */
12500 bool
12501 contains_bitfld_component_ref_p (const_tree ref)
12503 while (handled_component_p (ref))
12505 if (TREE_CODE (ref) == COMPONENT_REF
12506 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12507 return true;
12508 ref = TREE_OPERAND (ref, 0);
12511 return false;
12514 /* Try to determine whether a TRY_CATCH expression can fall through.
12515 This is a subroutine of block_may_fallthru. */
12517 static bool
12518 try_catch_may_fallthru (const_tree stmt)
12520 tree_stmt_iterator i;
12522 /* If the TRY block can fall through, the whole TRY_CATCH can
12523 fall through. */
12524 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12525 return true;
12527 i = tsi_start (TREE_OPERAND (stmt, 1));
12528 switch (TREE_CODE (tsi_stmt (i)))
12530 case CATCH_EXPR:
12531 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12532 catch expression and a body. The whole TRY_CATCH may fall
12533 through iff any of the catch bodies falls through. */
12534 for (; !tsi_end_p (i); tsi_next (&i))
12536 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12537 return true;
12539 return false;
12541 case EH_FILTER_EXPR:
12542 /* The exception filter expression only matters if there is an
12543 exception. If the exception does not match EH_FILTER_TYPES,
12544 we will execute EH_FILTER_FAILURE, and we will fall through
12545 if that falls through. If the exception does match
12546 EH_FILTER_TYPES, the stack unwinder will continue up the
12547 stack, so we will not fall through. We don't know whether we
12548 will throw an exception which matches EH_FILTER_TYPES or not,
12549 so we just ignore EH_FILTER_TYPES and assume that we might
12550 throw an exception which doesn't match. */
12551 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12553 default:
12554 /* This case represents statements to be executed when an
12555 exception occurs. Those statements are implicitly followed
12556 by a RESX statement to resume execution after the exception.
12557 So in this case the TRY_CATCH never falls through. */
12558 return false;
12562 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12563 need not be 100% accurate; simply be conservative and return true if we
12564 don't know. This is used only to avoid stupidly generating extra code.
12565 If we're wrong, we'll just delete the extra code later. */
12567 bool
12568 block_may_fallthru (const_tree block)
12570 /* This CONST_CAST is okay because expr_last returns its argument
12571 unmodified and we assign it to a const_tree. */
12572 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12574 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12576 case GOTO_EXPR:
12577 case RETURN_EXPR:
12578 /* Easy cases. If the last statement of the block implies
12579 control transfer, then we can't fall through. */
12580 return false;
12582 case SWITCH_EXPR:
12583 /* If there is a default: label or case labels cover all possible
12584 SWITCH_COND values, then the SWITCH_EXPR will transfer control
12585 to some case label in all cases and all we care is whether the
12586 SWITCH_BODY falls through. */
12587 if (SWITCH_ALL_CASES_P (stmt))
12588 return block_may_fallthru (SWITCH_BODY (stmt));
12589 return true;
12591 case COND_EXPR:
12592 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12593 return true;
12594 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12596 case BIND_EXPR:
12597 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12599 case TRY_CATCH_EXPR:
12600 return try_catch_may_fallthru (stmt);
12602 case TRY_FINALLY_EXPR:
12603 /* The finally clause is always executed after the try clause,
12604 so if it does not fall through, then the try-finally will not
12605 fall through. Otherwise, if the try clause does not fall
12606 through, then when the finally clause falls through it will
12607 resume execution wherever the try clause was going. So the
12608 whole try-finally will only fall through if both the try
12609 clause and the finally clause fall through. */
12610 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12611 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12613 case EH_ELSE_EXPR:
12614 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12616 case MODIFY_EXPR:
12617 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12618 stmt = TREE_OPERAND (stmt, 1);
12619 else
12620 return true;
12621 /* FALLTHRU */
12623 case CALL_EXPR:
12624 /* Functions that do not return do not fall through. */
12625 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12627 case CLEANUP_POINT_EXPR:
12628 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12630 case TARGET_EXPR:
12631 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12633 case ERROR_MARK:
12634 return true;
12636 default:
12637 return lang_hooks.block_may_fallthru (stmt);
12641 /* True if we are using EH to handle cleanups. */
12642 static bool using_eh_for_cleanups_flag = false;
12644 /* This routine is called from front ends to indicate eh should be used for
12645 cleanups. */
12646 void
12647 using_eh_for_cleanups (void)
12649 using_eh_for_cleanups_flag = true;
12652 /* Query whether EH is used for cleanups. */
12653 bool
12654 using_eh_for_cleanups_p (void)
12656 return using_eh_for_cleanups_flag;
12659 /* Wrapper for tree_code_name to ensure that tree code is valid */
12660 const char *
12661 get_tree_code_name (enum tree_code code)
12663 const char *invalid = "<invalid tree code>";
12665 /* The tree_code enum promotes to signed, but we could be getting
12666 invalid values, so force an unsigned comparison. */
12667 if (unsigned (code) >= MAX_TREE_CODES)
12669 if ((unsigned)code == 0xa5a5)
12670 return "ggc_freed";
12671 return invalid;
12674 return tree_code_name[code];
12677 /* Drops the TREE_OVERFLOW flag from T. */
12679 tree
12680 drop_tree_overflow (tree t)
12682 gcc_checking_assert (TREE_OVERFLOW (t));
12684 /* For tree codes with a sharing machinery re-build the result. */
12685 if (poly_int_tree_p (t))
12686 return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
12688 /* For VECTOR_CST, remove the overflow bits from the encoded elements
12689 and canonicalize the result. */
12690 if (TREE_CODE (t) == VECTOR_CST)
12692 tree_vector_builder builder;
12693 builder.new_unary_operation (TREE_TYPE (t), t, true);
12694 unsigned int count = builder.encoded_nelts ();
12695 for (unsigned int i = 0; i < count; ++i)
12697 tree elt = VECTOR_CST_ELT (t, i);
12698 if (TREE_OVERFLOW (elt))
12699 elt = drop_tree_overflow (elt);
12700 builder.quick_push (elt);
12702 return builder.build ();
12705 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12706 and drop the flag. */
12707 t = copy_node (t);
12708 TREE_OVERFLOW (t) = 0;
12710 /* For constants that contain nested constants, drop the flag
12711 from those as well. */
12712 if (TREE_CODE (t) == COMPLEX_CST)
12714 if (TREE_OVERFLOW (TREE_REALPART (t)))
12715 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
12716 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
12717 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
12720 return t;
12723 /* Given a memory reference expression T, return its base address.
12724 The base address of a memory reference expression is the main
12725 object being referenced. For instance, the base address for
12726 'array[i].fld[j]' is 'array'. You can think of this as stripping
12727 away the offset part from a memory address.
12729 This function calls handled_component_p to strip away all the inner
12730 parts of the memory reference until it reaches the base object. */
12732 tree
12733 get_base_address (tree t)
12735 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12736 t = TREE_OPERAND (t, 0);
12737 while (handled_component_p (t))
12738 t = TREE_OPERAND (t, 0);
12740 if ((TREE_CODE (t) == MEM_REF
12741 || TREE_CODE (t) == TARGET_MEM_REF)
12742 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12743 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12745 return t;
12748 /* Return a tree of sizetype representing the size, in bytes, of the element
12749 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12751 tree
12752 array_ref_element_size (tree exp)
12754 tree aligned_size = TREE_OPERAND (exp, 3);
12755 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12756 location_t loc = EXPR_LOCATION (exp);
12758 /* If a size was specified in the ARRAY_REF, it's the size measured
12759 in alignment units of the element type. So multiply by that value. */
12760 if (aligned_size)
12762 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12763 sizetype from another type of the same width and signedness. */
12764 if (TREE_TYPE (aligned_size) != sizetype)
12765 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12766 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12767 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12770 /* Otherwise, take the size from that of the element type. Substitute
12771 any PLACEHOLDER_EXPR that we have. */
12772 else
12773 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12776 /* Return a tree representing the lower bound of the array mentioned in
12777 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12779 tree
12780 array_ref_low_bound (tree exp)
12782 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12784 /* If a lower bound is specified in EXP, use it. */
12785 if (TREE_OPERAND (exp, 2))
12786 return TREE_OPERAND (exp, 2);
12788 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12789 substituting for a PLACEHOLDER_EXPR as needed. */
12790 if (domain_type && TYPE_MIN_VALUE (domain_type))
12791 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12793 /* Otherwise, return a zero of the appropriate type. */
12794 tree idxtype = TREE_TYPE (TREE_OPERAND (exp, 1));
12795 return (idxtype == error_mark_node
12796 ? integer_zero_node : build_int_cst (idxtype, 0));
12799 /* Return a tree representing the upper bound of the array mentioned in
12800 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12802 tree
12803 array_ref_up_bound (tree exp)
12805 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12807 /* If there is a domain type and it has an upper bound, use it, substituting
12808 for a PLACEHOLDER_EXPR as needed. */
12809 if (domain_type && TYPE_MAX_VALUE (domain_type))
12810 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12812 /* Otherwise fail. */
12813 return NULL_TREE;
12816 /* Returns true if REF is an array reference, a component reference,
12817 or a memory reference to an array whose actual size might be larger
12818 than its upper bound implies, there are multiple cases:
12819 A. a ref to a flexible array member at the end of a structure;
12820 B. a ref to an array with a different type against the original decl;
12821 for example:
12823 short a[16] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 };
12824 (*((char(*)[16])&a[0]))[i+8]
12826 C. a ref to an array that was passed as a parameter;
12827 for example:
12829 int test (uint8_t *p, uint32_t t[1][1], int n) {
12830 for (int i = 0; i < 4; i++, p++)
12831 t[i][0] = ...;
12833 If non-null, set IS_TRAILING_ARRAY to true if the ref is the above case A.
12836 bool
12837 array_ref_flexible_size_p (tree ref, bool *is_trailing_array /* = NULL */)
12839 /* The TYPE for this array referece. */
12840 tree atype = NULL_TREE;
12841 /* The FIELD_DECL for the array field in the containing structure. */
12842 tree afield_decl = NULL_TREE;
12843 /* Whether this array is the trailing array of a structure. */
12844 bool is_trailing_array_tmp = false;
12845 if (!is_trailing_array)
12846 is_trailing_array = &is_trailing_array_tmp;
12848 if (TREE_CODE (ref) == ARRAY_REF
12849 || TREE_CODE (ref) == ARRAY_RANGE_REF)
12851 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
12852 ref = TREE_OPERAND (ref, 0);
12854 else if (TREE_CODE (ref) == COMPONENT_REF
12855 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
12857 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
12858 afield_decl = TREE_OPERAND (ref, 1);
12860 else if (TREE_CODE (ref) == MEM_REF)
12862 tree arg = TREE_OPERAND (ref, 0);
12863 if (TREE_CODE (arg) == ADDR_EXPR)
12864 arg = TREE_OPERAND (arg, 0);
12865 tree argtype = TREE_TYPE (arg);
12866 if (TREE_CODE (argtype) == RECORD_TYPE)
12868 if (tree fld = last_field (argtype))
12870 atype = TREE_TYPE (fld);
12871 afield_decl = fld;
12872 if (TREE_CODE (atype) != ARRAY_TYPE)
12873 return false;
12874 if (VAR_P (arg) && DECL_SIZE (fld))
12875 return false;
12877 else
12878 return false;
12880 else
12881 return false;
12883 else
12884 return false;
12886 if (TREE_CODE (ref) == STRING_CST)
12887 return false;
12889 tree ref_to_array = ref;
12890 while (handled_component_p (ref))
12892 /* If the reference chain contains a component reference to a
12893 non-union type and there follows another field the reference
12894 is not at the end of a structure. */
12895 if (TREE_CODE (ref) == COMPONENT_REF)
12897 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12899 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12900 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12901 nextf = DECL_CHAIN (nextf);
12902 if (nextf)
12903 return false;
12906 /* If we have a multi-dimensional array we do not consider
12907 a non-innermost dimension as flex array if the whole
12908 multi-dimensional array is at struct end.
12909 Same for an array of aggregates with a trailing array
12910 member. */
12911 else if (TREE_CODE (ref) == ARRAY_REF)
12912 return false;
12913 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
12915 /* If we view an underlying object as sth else then what we
12916 gathered up to now is what we have to rely on. */
12917 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
12918 break;
12919 else
12920 gcc_unreachable ();
12922 ref = TREE_OPERAND (ref, 0);
12925 gcc_assert (!afield_decl
12926 || (afield_decl && TREE_CODE (afield_decl) == FIELD_DECL));
12928 /* The array now is at struct end. Treat flexible array member as
12929 always subject to extend, even into just padding constrained by
12930 an underlying decl. */
12931 if (! TYPE_SIZE (atype)
12932 || ! TYPE_DOMAIN (atype)
12933 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12935 *is_trailing_array = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12936 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12939 /* If the reference is based on a declared entity, the size of the array
12940 is constrained by its given domain. (Do not trust commons PR/69368). */
12941 ref = get_base_address (ref);
12942 if (ref
12943 && DECL_P (ref)
12944 && !(flag_unconstrained_commons
12945 && VAR_P (ref) && DECL_COMMON (ref))
12946 && DECL_SIZE_UNIT (ref)
12947 && TREE_CODE (DECL_SIZE_UNIT (ref)) == INTEGER_CST)
12949 /* If the object itself is the array it is not at struct end. */
12950 if (DECL_P (ref_to_array))
12951 return false;
12953 /* Check whether the array domain covers all of the available
12954 padding. */
12955 poly_int64 offset;
12956 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype))) != INTEGER_CST
12957 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST
12958 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype))) != INTEGER_CST)
12960 *is_trailing_array
12961 = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12962 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12964 if (! get_addr_base_and_unit_offset (ref_to_array, &offset))
12966 *is_trailing_array
12967 = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12968 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12971 /* If at least one extra element fits it is a flexarray. */
12972 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype)))
12973 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype)))
12974 + 2)
12975 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype))),
12976 wi::to_offset (DECL_SIZE_UNIT (ref)) - offset))
12978 *is_trailing_array
12979 = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12980 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12983 return false;
12986 *is_trailing_array = afield_decl && TREE_CODE (afield_decl) == FIELD_DECL;
12987 return afield_decl ? !DECL_NOT_FLEXARRAY (afield_decl) : true;
12991 /* Return a tree representing the offset, in bytes, of the field referenced
12992 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12994 tree
12995 component_ref_field_offset (tree exp)
12997 tree aligned_offset = TREE_OPERAND (exp, 2);
12998 tree field = TREE_OPERAND (exp, 1);
12999 location_t loc = EXPR_LOCATION (exp);
13001 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
13002 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
13003 value. */
13004 if (aligned_offset)
13006 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13007 sizetype from another type of the same width and signedness. */
13008 if (TREE_TYPE (aligned_offset) != sizetype)
13009 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
13010 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
13011 size_int (DECL_OFFSET_ALIGN (field)
13012 / BITS_PER_UNIT));
13015 /* Otherwise, take the offset from that of the field. Substitute
13016 any PLACEHOLDER_EXPR that we have. */
13017 else
13018 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
13021 /* Given the initializer INIT, return the initializer for the field
13022 DECL if it exists, otherwise null. Used to obtain the initializer
13023 for a flexible array member and determine its size. */
13025 static tree
13026 get_initializer_for (tree init, tree decl)
13028 STRIP_NOPS (init);
13030 tree fld, fld_init;
13031 unsigned HOST_WIDE_INT i;
13032 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), i, fld, fld_init)
13034 if (decl == fld)
13035 return fld_init;
13037 if (TREE_CODE (fld) == CONSTRUCTOR)
13039 fld_init = get_initializer_for (fld_init, decl);
13040 if (fld_init)
13041 return fld_init;
13045 return NULL_TREE;
13048 /* Determines the special array member type for the array reference REF. */
13049 special_array_member
13050 component_ref_sam_type (tree ref)
13052 special_array_member sam_type = special_array_member::none;
13054 tree member = TREE_OPERAND (ref, 1);
13055 tree memsize = DECL_SIZE_UNIT (member);
13056 if (memsize)
13058 tree memtype = TREE_TYPE (member);
13059 if (TREE_CODE (memtype) != ARRAY_TYPE)
13060 return sam_type;
13062 bool trailing = false;
13063 (void)array_ref_flexible_size_p (ref, &trailing);
13064 bool zero_length = integer_zerop (memsize);
13065 if (!trailing && !zero_length)
13066 /* MEMBER is an interior array with
13067 more than one element. */
13068 return special_array_member::int_n;
13070 if (zero_length)
13072 if (trailing)
13073 return special_array_member::trail_0;
13074 else
13075 return special_array_member::int_0;
13078 if (!zero_length)
13079 if (tree dom = TYPE_DOMAIN (memtype))
13080 if (tree min = TYPE_MIN_VALUE (dom))
13081 if (tree max = TYPE_MAX_VALUE (dom))
13082 if (TREE_CODE (min) == INTEGER_CST
13083 && TREE_CODE (max) == INTEGER_CST)
13085 offset_int minidx = wi::to_offset (min);
13086 offset_int maxidx = wi::to_offset (max);
13087 offset_int neltsm1 = maxidx - minidx;
13088 if (neltsm1 > 0)
13089 /* MEMBER is a trailing array with more than
13090 one elements. */
13091 return special_array_member::trail_n;
13093 if (neltsm1 == 0)
13094 return special_array_member::trail_1;
13098 return sam_type;
13101 /* Determines the size of the member referenced by the COMPONENT_REF
13102 REF, using its initializer expression if necessary in order to
13103 determine the size of an initialized flexible array member.
13104 If non-null, set *SAM to the type of special array member.
13105 Returns the size as sizetype (which might be zero for an object
13106 with an uninitialized flexible array member) or null if the size
13107 cannot be determined. */
13109 tree
13110 component_ref_size (tree ref, special_array_member *sam /* = NULL */)
13112 gcc_assert (TREE_CODE (ref) == COMPONENT_REF);
13114 special_array_member sambuf;
13115 if (!sam)
13116 sam = &sambuf;
13117 *sam = component_ref_sam_type (ref);
13119 /* The object/argument referenced by the COMPONENT_REF and its type. */
13120 tree arg = TREE_OPERAND (ref, 0);
13121 tree argtype = TREE_TYPE (arg);
13122 /* The referenced member. */
13123 tree member = TREE_OPERAND (ref, 1);
13125 tree memsize = DECL_SIZE_UNIT (member);
13126 if (memsize)
13128 tree memtype = TREE_TYPE (member);
13129 if (TREE_CODE (memtype) != ARRAY_TYPE)
13130 /* DECL_SIZE may be less than TYPE_SIZE in C++ when referring
13131 to the type of a class with a virtual base which doesn't
13132 reflect the size of the virtual's members (see pr97595).
13133 If that's the case fail for now and implement something
13134 more robust in the future. */
13135 return (tree_int_cst_equal (memsize, TYPE_SIZE_UNIT (memtype))
13136 ? memsize : NULL_TREE);
13138 /* 2-or-more elements arrays are treated as normal arrays by default. */
13139 if (*sam == special_array_member::int_n
13140 || *sam == special_array_member::trail_n)
13141 return memsize;
13143 tree afield_decl = TREE_OPERAND (ref, 1);
13144 gcc_assert (TREE_CODE (afield_decl) == FIELD_DECL);
13145 /* if the trailing array is a not a flexible array member, treat it as
13146 a normal array. */
13147 if (DECL_NOT_FLEXARRAY (afield_decl)
13148 && *sam != special_array_member::int_0)
13149 return memsize;
13151 if (*sam == special_array_member::int_0)
13152 memsize = NULL_TREE;
13154 /* For a reference to a flexible array member of a union
13155 use the size of the union instead of the size of the member. */
13156 if (TREE_CODE (argtype) == UNION_TYPE)
13157 memsize = TYPE_SIZE_UNIT (argtype);
13160 /* MEMBER is either a bona fide flexible array member, or a zero-length
13161 array member, or an array of length one treated as such. */
13163 /* If the reference is to a declared object and the member a true
13164 flexible array, try to determine its size from its initializer. */
13165 poly_int64 baseoff = 0;
13166 tree base = get_addr_base_and_unit_offset (ref, &baseoff);
13167 if (!base || !VAR_P (base))
13169 if (*sam != special_array_member::int_0)
13170 return NULL_TREE;
13172 if (TREE_CODE (arg) != COMPONENT_REF)
13173 return NULL_TREE;
13175 base = arg;
13176 while (TREE_CODE (base) == COMPONENT_REF)
13177 base = TREE_OPERAND (base, 0);
13178 baseoff = tree_to_poly_int64 (byte_position (TREE_OPERAND (ref, 1)));
13181 /* BASE is the declared object of which MEMBER is either a member
13182 or that is cast to ARGTYPE (e.g., a char buffer used to store
13183 an ARGTYPE object). */
13184 tree basetype = TREE_TYPE (base);
13186 /* Determine the base type of the referenced object. If it's
13187 the same as ARGTYPE and MEMBER has a known size, return it. */
13188 tree bt = basetype;
13189 if (*sam != special_array_member::int_0)
13190 while (TREE_CODE (bt) == ARRAY_TYPE)
13191 bt = TREE_TYPE (bt);
13192 bool typematch = useless_type_conversion_p (argtype, bt);
13193 if (memsize && typematch)
13194 return memsize;
13196 memsize = NULL_TREE;
13198 if (typematch)
13199 /* MEMBER is a true flexible array member. Compute its size from
13200 the initializer of the BASE object if it has one. */
13201 if (tree init = DECL_P (base) ? DECL_INITIAL (base) : NULL_TREE)
13202 if (init != error_mark_node)
13204 init = get_initializer_for (init, member);
13205 if (init)
13207 memsize = TYPE_SIZE_UNIT (TREE_TYPE (init));
13208 if (tree refsize = TYPE_SIZE_UNIT (argtype))
13210 /* Use the larger of the initializer size and the tail
13211 padding in the enclosing struct. */
13212 poly_int64 rsz = tree_to_poly_int64 (refsize);
13213 rsz -= baseoff;
13214 if (known_lt (tree_to_poly_int64 (memsize), rsz))
13215 memsize = wide_int_to_tree (TREE_TYPE (memsize), rsz);
13218 baseoff = 0;
13222 if (!memsize)
13224 if (typematch)
13226 if (DECL_P (base)
13227 && DECL_EXTERNAL (base)
13228 && bt == basetype
13229 && *sam != special_array_member::int_0)
13230 /* The size of a flexible array member of an extern struct
13231 with no initializer cannot be determined (it's defined
13232 in another translation unit and can have an initializer
13233 with an arbitrary number of elements). */
13234 return NULL_TREE;
13236 /* Use the size of the base struct or, for interior zero-length
13237 arrays, the size of the enclosing type. */
13238 memsize = TYPE_SIZE_UNIT (bt);
13240 else if (DECL_P (base))
13241 /* Use the size of the BASE object (possibly an array of some
13242 other type such as char used to store the struct). */
13243 memsize = DECL_SIZE_UNIT (base);
13244 else
13245 return NULL_TREE;
13248 /* If the flexible array member has a known size use the greater
13249 of it and the tail padding in the enclosing struct.
13250 Otherwise, when the size of the flexible array member is unknown
13251 and the referenced object is not a struct, use the size of its
13252 type when known. This detects sizes of array buffers when cast
13253 to struct types with flexible array members. */
13254 if (memsize)
13256 if (!tree_fits_poly_int64_p (memsize))
13257 return NULL_TREE;
13258 poly_int64 memsz64 = memsize ? tree_to_poly_int64 (memsize) : 0;
13259 if (known_lt (baseoff, memsz64))
13261 memsz64 -= baseoff;
13262 return wide_int_to_tree (TREE_TYPE (memsize), memsz64);
13264 return size_zero_node;
13267 /* Return "don't know" for an external non-array object since its
13268 flexible array member can be initialized to have any number of
13269 elements. Otherwise, return zero because the flexible array
13270 member has no elements. */
13271 return (DECL_P (base)
13272 && DECL_EXTERNAL (base)
13273 && (!typematch
13274 || TREE_CODE (basetype) != ARRAY_TYPE)
13275 ? NULL_TREE : size_zero_node);
13278 /* Return the machine mode of T. For vectors, returns the mode of the
13279 inner type. The main use case is to feed the result to HONOR_NANS,
13280 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
13282 machine_mode
13283 element_mode (const_tree t)
13285 if (!TYPE_P (t))
13286 t = TREE_TYPE (t);
13287 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
13288 t = TREE_TYPE (t);
13289 return TYPE_MODE (t);
13292 /* Vector types need to re-check the target flags each time we report
13293 the machine mode. We need to do this because attribute target can
13294 change the result of vector_mode_supported_p and have_regs_of_mode
13295 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
13296 change on a per-function basis. */
13297 /* ??? Possibly a better solution is to run through all the types
13298 referenced by a function and re-compute the TYPE_MODE once, rather
13299 than make the TYPE_MODE macro call a function. */
13301 machine_mode
13302 vector_type_mode (const_tree t)
13304 machine_mode mode;
13306 gcc_assert (TREE_CODE (t) == VECTOR_TYPE);
13308 mode = t->type_common.mode;
13309 if (VECTOR_MODE_P (mode)
13310 && (!targetm.vector_mode_supported_p (mode)
13311 || !have_regs_of_mode[mode]))
13313 scalar_int_mode innermode;
13315 /* For integers, try mapping it to a same-sized scalar mode. */
13316 if (is_int_mode (TREE_TYPE (t)->type_common.mode, &innermode))
13318 poly_int64 size = (TYPE_VECTOR_SUBPARTS (t)
13319 * GET_MODE_BITSIZE (innermode));
13320 scalar_int_mode mode;
13321 if (int_mode_for_size (size, 0).exists (&mode)
13322 && have_regs_of_mode[mode])
13323 return mode;
13326 return BLKmode;
13329 return mode;
13332 /* Return the size in bits of each element of vector type TYPE. */
13334 unsigned int
13335 vector_element_bits (const_tree type)
13337 gcc_checking_assert (VECTOR_TYPE_P (type));
13338 if (VECTOR_BOOLEAN_TYPE_P (type))
13339 return TYPE_PRECISION (TREE_TYPE (type));
13340 return tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
13343 /* Calculate the size in bits of each element of vector type TYPE
13344 and return the result as a tree of type bitsizetype. */
13346 tree
13347 vector_element_bits_tree (const_tree type)
13349 gcc_checking_assert (VECTOR_TYPE_P (type));
13350 if (VECTOR_BOOLEAN_TYPE_P (type))
13351 return bitsize_int (vector_element_bits (type));
13352 return TYPE_SIZE (TREE_TYPE (type));
13355 /* Verify that basic properties of T match TV and thus T can be a variant of
13356 TV. TV should be the more specified variant (i.e. the main variant). */
13358 static bool
13359 verify_type_variant (const_tree t, tree tv)
13361 /* Type variant can differ by:
13363 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13364 ENCODE_QUAL_ADDR_SPACE.
13365 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13366 in this case some values may not be set in the variant types
13367 (see TYPE_COMPLETE_P checks).
13368 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13369 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13370 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13371 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13372 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13373 this is necessary to make it possible to merge types form different TUs
13374 - arrays, pointers and references may have TREE_TYPE that is a variant
13375 of TREE_TYPE of their main variants.
13376 - aggregates may have new TYPE_FIELDS list that list variants of
13377 the main variant TYPE_FIELDS.
13378 - vector types may differ by TYPE_VECTOR_OPAQUE
13381 /* Convenience macro for matching individual fields. */
13382 #define verify_variant_match(flag) \
13383 do { \
13384 if (flag (tv) != flag (t)) \
13386 error ("type variant differs by %s", #flag); \
13387 debug_tree (tv); \
13388 return false; \
13390 } while (false)
13392 /* tree_base checks. */
13394 verify_variant_match (TREE_CODE);
13395 /* FIXME: Ada builds non-artificial variants of artificial types. */
13396 #if 0
13397 if (TYPE_ARTIFICIAL (tv))
13398 verify_variant_match (TYPE_ARTIFICIAL);
13399 #endif
13400 if (POINTER_TYPE_P (tv))
13401 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13402 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13403 verify_variant_match (TYPE_UNSIGNED);
13404 verify_variant_match (TYPE_PACKED);
13405 if (TREE_CODE (t) == REFERENCE_TYPE)
13406 verify_variant_match (TYPE_REF_IS_RVALUE);
13407 if (AGGREGATE_TYPE_P (t))
13408 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13409 else
13410 verify_variant_match (TYPE_SATURATING);
13411 /* FIXME: This check trigger during libstdc++ build. */
13412 #if 0
13413 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t))
13414 verify_variant_match (TYPE_FINAL_P);
13415 #endif
13417 /* tree_type_common checks. */
13419 if (COMPLETE_TYPE_P (t))
13421 verify_variant_match (TYPE_MODE);
13422 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
13423 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
13424 verify_variant_match (TYPE_SIZE);
13425 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
13426 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
13427 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
13429 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
13430 TYPE_SIZE_UNIT (tv), 0));
13431 error ("type variant has different %<TYPE_SIZE_UNIT%>");
13432 debug_tree (tv);
13433 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13434 debug_tree (TYPE_SIZE_UNIT (tv));
13435 error ("type%'s %<TYPE_SIZE_UNIT%>");
13436 debug_tree (TYPE_SIZE_UNIT (t));
13437 return false;
13439 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13441 verify_variant_match (TYPE_PRECISION);
13442 if (RECORD_OR_UNION_TYPE_P (t))
13443 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13444 else if (TREE_CODE (t) == ARRAY_TYPE)
13445 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13446 /* During LTO we merge variant lists from diferent translation units
13447 that may differ BY TYPE_CONTEXT that in turn may point
13448 to TRANSLATION_UNIT_DECL.
13449 Ada also builds variants of types with different TYPE_CONTEXT. */
13450 #if 0
13451 if (!in_lto_p || !TYPE_FILE_SCOPE_P (t))
13452 verify_variant_match (TYPE_CONTEXT);
13453 #endif
13454 if (TREE_CODE (t) == ARRAY_TYPE || TREE_CODE (t) == INTEGER_TYPE)
13455 verify_variant_match (TYPE_STRING_FLAG);
13456 if (TREE_CODE (t) == RECORD_TYPE || TREE_CODE (t) == UNION_TYPE)
13457 verify_variant_match (TYPE_CXX_ODR_P);
13458 if (TYPE_ALIAS_SET_KNOWN_P (t))
13460 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13461 debug_tree (tv);
13462 return false;
13465 /* tree_type_non_common checks. */
13467 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13468 and dangle the pointer from time to time. */
13469 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13470 && (in_lto_p || !TYPE_VFIELD (tv)
13471 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13473 error ("type variant has different %<TYPE_VFIELD%>");
13474 debug_tree (tv);
13475 return false;
13477 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13478 || TREE_CODE (t) == INTEGER_TYPE
13479 || TREE_CODE (t) == BOOLEAN_TYPE
13480 || TREE_CODE (t) == REAL_TYPE
13481 || TREE_CODE (t) == FIXED_POINT_TYPE)
13483 verify_variant_match (TYPE_MAX_VALUE);
13484 verify_variant_match (TYPE_MIN_VALUE);
13486 if (TREE_CODE (t) == METHOD_TYPE)
13487 verify_variant_match (TYPE_METHOD_BASETYPE);
13488 if (TREE_CODE (t) == OFFSET_TYPE)
13489 verify_variant_match (TYPE_OFFSET_BASETYPE);
13490 if (TREE_CODE (t) == ARRAY_TYPE)
13491 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13492 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13493 or even type's main variant. This is needed to make bootstrap pass
13494 and the bug seems new in GCC 5.
13495 C++ FE should be updated to make this consistent and we should check
13496 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13497 is a match with main variant.
13499 Also disable the check for Java for now because of parser hack that builds
13500 first an dummy BINFO and then sometimes replace it by real BINFO in some
13501 of the copies. */
13502 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13503 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13504 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13505 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13506 at LTO time only. */
13507 && (in_lto_p && odr_type_p (t)))
13509 error ("type variant has different %<TYPE_BINFO%>");
13510 debug_tree (tv);
13511 error ("type variant%'s %<TYPE_BINFO%>");
13512 debug_tree (TYPE_BINFO (tv));
13513 error ("type%'s %<TYPE_BINFO%>");
13514 debug_tree (TYPE_BINFO (t));
13515 return false;
13518 /* Check various uses of TYPE_VALUES_RAW. */
13519 if (TREE_CODE (t) == ENUMERAL_TYPE
13520 && TYPE_VALUES (t))
13521 verify_variant_match (TYPE_VALUES);
13522 else if (TREE_CODE (t) == ARRAY_TYPE)
13523 verify_variant_match (TYPE_DOMAIN);
13524 /* Permit incomplete variants of complete type. While FEs may complete
13525 all variants, this does not happen for C++ templates in all cases. */
13526 else if (RECORD_OR_UNION_TYPE_P (t)
13527 && COMPLETE_TYPE_P (t)
13528 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13530 tree f1, f2;
13532 /* Fortran builds qualified variants as new records with items of
13533 qualified type. Verify that they looks same. */
13534 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13535 f1 && f2;
13536 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13537 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13538 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13539 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13540 /* FIXME: gfc_nonrestricted_type builds all types as variants
13541 with exception of pointer types. It deeply copies the type
13542 which means that we may end up with a variant type
13543 referring non-variant pointer. We may change it to
13544 produce types as variants, too, like
13545 objc_get_protocol_qualified_type does. */
13546 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13547 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13548 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13549 break;
13550 if (f1 || f2)
13552 error ("type variant has different %<TYPE_FIELDS%>");
13553 debug_tree (tv);
13554 error ("first mismatch is field");
13555 debug_tree (f1);
13556 error ("and field");
13557 debug_tree (f2);
13558 return false;
13561 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13562 verify_variant_match (TYPE_ARG_TYPES);
13563 /* For C++ the qualified variant of array type is really an array type
13564 of qualified TREE_TYPE.
13565 objc builds variants of pointer where pointer to type is a variant, too
13566 in objc_get_protocol_qualified_type. */
13567 if (TREE_TYPE (t) != TREE_TYPE (tv)
13568 && ((TREE_CODE (t) != ARRAY_TYPE
13569 && !POINTER_TYPE_P (t))
13570 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13571 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13573 error ("type variant has different %<TREE_TYPE%>");
13574 debug_tree (tv);
13575 error ("type variant%'s %<TREE_TYPE%>");
13576 debug_tree (TREE_TYPE (tv));
13577 error ("type%'s %<TREE_TYPE%>");
13578 debug_tree (TREE_TYPE (t));
13579 return false;
13581 if (type_with_alias_set_p (t)
13582 && !gimple_canonical_types_compatible_p (t, tv, false))
13584 error ("type is not compatible with its variant");
13585 debug_tree (tv);
13586 error ("type variant%'s %<TREE_TYPE%>");
13587 debug_tree (TREE_TYPE (tv));
13588 error ("type%'s %<TREE_TYPE%>");
13589 debug_tree (TREE_TYPE (t));
13590 return false;
13592 return true;
13593 #undef verify_variant_match
13597 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13598 the middle-end types_compatible_p function. It needs to avoid
13599 claiming types are different for types that should be treated
13600 the same with respect to TBAA. Canonical types are also used
13601 for IL consistency checks via the useless_type_conversion_p
13602 predicate which does not handle all type kinds itself but falls
13603 back to pointer-comparison of TYPE_CANONICAL for aggregates
13604 for example. */
13606 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13607 type calculation because we need to allow inter-operability between signed
13608 and unsigned variants. */
13610 bool
13611 type_with_interoperable_signedness (const_tree type)
13613 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13614 signed char and unsigned char. Similarly fortran FE builds
13615 C_SIZE_T as signed type, while C defines it unsigned. */
13617 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13618 == INTEGER_TYPE
13619 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13620 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13623 /* Return true iff T1 and T2 are structurally identical for what
13624 TBAA is concerned.
13625 This function is used both by lto.cc canonical type merging and by the
13626 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13627 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13628 only for LTO because only in these cases TYPE_CANONICAL equivalence
13629 correspond to one defined by gimple_canonical_types_compatible_p. */
13631 bool
13632 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13633 bool trust_type_canonical)
13635 /* Type variants should be same as the main variant. When not doing sanity
13636 checking to verify this fact, go to main variants and save some work. */
13637 if (trust_type_canonical)
13639 t1 = TYPE_MAIN_VARIANT (t1);
13640 t2 = TYPE_MAIN_VARIANT (t2);
13643 /* Check first for the obvious case of pointer identity. */
13644 if (t1 == t2)
13645 return true;
13647 /* Check that we have two types to compare. */
13648 if (t1 == NULL_TREE || t2 == NULL_TREE)
13649 return false;
13651 /* We consider complete types always compatible with incomplete type.
13652 This does not make sense for canonical type calculation and thus we
13653 need to ensure that we are never called on it.
13655 FIXME: For more correctness the function probably should have three modes
13656 1) mode assuming that types are complete mathcing their structure
13657 2) mode allowing incomplete types but producing equivalence classes
13658 and thus ignoring all info from complete types
13659 3) mode allowing incomplete types to match complete but checking
13660 compatibility between complete types.
13662 1 and 2 can be used for canonical type calculation. 3 is the real
13663 definition of type compatibility that can be used i.e. for warnings during
13664 declaration merging. */
13666 gcc_assert (!trust_type_canonical
13667 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13669 /* If the types have been previously registered and found equal
13670 they still are. */
13672 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13673 && trust_type_canonical)
13675 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13676 they are always NULL, but they are set to non-NULL for types
13677 constructed by build_pointer_type and variants. In this case the
13678 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13679 all pointers are considered equal. Be sure to not return false
13680 negatives. */
13681 gcc_checking_assert (canonical_type_used_p (t1)
13682 && canonical_type_used_p (t2));
13683 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13686 /* For types where we do ODR based TBAA the canonical type is always
13687 set correctly, so we know that types are different if their
13688 canonical types does not match. */
13689 if (trust_type_canonical
13690 && (odr_type_p (t1) && odr_based_tbaa_p (t1))
13691 != (odr_type_p (t2) && odr_based_tbaa_p (t2)))
13692 return false;
13694 /* Can't be the same type if the types don't have the same code. */
13695 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13696 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13697 return false;
13699 /* Qualifiers do not matter for canonical type comparison purposes. */
13701 /* Void types and nullptr types are always the same. */
13702 if (TREE_CODE (t1) == VOID_TYPE
13703 || TREE_CODE (t1) == NULLPTR_TYPE)
13704 return true;
13706 /* Can't be the same type if they have different mode. */
13707 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13708 return false;
13710 /* Non-aggregate types can be handled cheaply. */
13711 if (INTEGRAL_TYPE_P (t1)
13712 || SCALAR_FLOAT_TYPE_P (t1)
13713 || FIXED_POINT_TYPE_P (t1)
13714 || TREE_CODE (t1) == VECTOR_TYPE
13715 || TREE_CODE (t1) == COMPLEX_TYPE
13716 || TREE_CODE (t1) == OFFSET_TYPE
13717 || POINTER_TYPE_P (t1))
13719 /* Can't be the same type if they have different recision. */
13720 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13721 return false;
13723 /* In some cases the signed and unsigned types are required to be
13724 inter-operable. */
13725 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13726 && !type_with_interoperable_signedness (t1))
13727 return false;
13729 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13730 interoperable with "signed char". Unless all frontends are revisited
13731 to agree on these types, we must ignore the flag completely. */
13733 /* Fortran standard define C_PTR type that is compatible with every
13734 C pointer. For this reason we need to glob all pointers into one.
13735 Still pointers in different address spaces are not compatible. */
13736 if (POINTER_TYPE_P (t1))
13738 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13739 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13740 return false;
13743 /* Tail-recurse to components. */
13744 if (TREE_CODE (t1) == VECTOR_TYPE
13745 || TREE_CODE (t1) == COMPLEX_TYPE)
13746 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13747 TREE_TYPE (t2),
13748 trust_type_canonical);
13750 return true;
13753 /* Do type-specific comparisons. */
13754 switch (TREE_CODE (t1))
13756 case ARRAY_TYPE:
13757 /* Array types are the same if the element types are the same and
13758 the number of elements are the same. */
13759 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13760 trust_type_canonical)
13761 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13762 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13763 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13764 return false;
13765 else
13767 tree i1 = TYPE_DOMAIN (t1);
13768 tree i2 = TYPE_DOMAIN (t2);
13770 /* For an incomplete external array, the type domain can be
13771 NULL_TREE. Check this condition also. */
13772 if (i1 == NULL_TREE && i2 == NULL_TREE)
13773 return true;
13774 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13775 return false;
13776 else
13778 tree min1 = TYPE_MIN_VALUE (i1);
13779 tree min2 = TYPE_MIN_VALUE (i2);
13780 tree max1 = TYPE_MAX_VALUE (i1);
13781 tree max2 = TYPE_MAX_VALUE (i2);
13783 /* The minimum/maximum values have to be the same. */
13784 if ((min1 == min2
13785 || (min1 && min2
13786 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13787 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13788 || operand_equal_p (min1, min2, 0))))
13789 && (max1 == max2
13790 || (max1 && max2
13791 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13792 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13793 || operand_equal_p (max1, max2, 0)))))
13794 return true;
13795 else
13796 return false;
13800 case METHOD_TYPE:
13801 case FUNCTION_TYPE:
13802 /* Function types are the same if the return type and arguments types
13803 are the same. */
13804 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13805 trust_type_canonical))
13806 return false;
13808 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2)
13809 && (TYPE_NO_NAMED_ARGS_STDARG_P (t1)
13810 == TYPE_NO_NAMED_ARGS_STDARG_P (t2)))
13811 return true;
13812 else
13814 tree parms1, parms2;
13816 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13817 parms1 && parms2;
13818 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13820 if (!gimple_canonical_types_compatible_p
13821 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13822 trust_type_canonical))
13823 return false;
13826 if (parms1 || parms2)
13827 return false;
13829 return true;
13832 case RECORD_TYPE:
13833 case UNION_TYPE:
13834 case QUAL_UNION_TYPE:
13836 tree f1, f2;
13838 /* Don't try to compare variants of an incomplete type, before
13839 TYPE_FIELDS has been copied around. */
13840 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13841 return true;
13844 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13845 return false;
13847 /* For aggregate types, all the fields must be the same. */
13848 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13849 f1 || f2;
13850 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13852 /* Skip non-fields and zero-sized fields. */
13853 while (f1 && (TREE_CODE (f1) != FIELD_DECL
13854 || (DECL_SIZE (f1)
13855 && integer_zerop (DECL_SIZE (f1)))))
13856 f1 = TREE_CHAIN (f1);
13857 while (f2 && (TREE_CODE (f2) != FIELD_DECL
13858 || (DECL_SIZE (f2)
13859 && integer_zerop (DECL_SIZE (f2)))))
13860 f2 = TREE_CHAIN (f2);
13861 if (!f1 || !f2)
13862 break;
13863 /* The fields must have the same name, offset and type. */
13864 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13865 || !gimple_compare_field_offset (f1, f2)
13866 || !gimple_canonical_types_compatible_p
13867 (TREE_TYPE (f1), TREE_TYPE (f2),
13868 trust_type_canonical))
13869 return false;
13872 /* If one aggregate has more fields than the other, they
13873 are not the same. */
13874 if (f1 || f2)
13875 return false;
13877 return true;
13880 default:
13881 /* Consider all types with language specific trees in them mutually
13882 compatible. This is executed only from verify_type and false
13883 positives can be tolerated. */
13884 gcc_assert (!in_lto_p);
13885 return true;
13889 /* For OPAQUE_TYPE T, it should have only size and alignment information
13890 and its mode should be of class MODE_OPAQUE. This function verifies
13891 these properties of T match TV which is the main variant of T and TC
13892 which is the canonical of T. */
13894 static void
13895 verify_opaque_type (const_tree t, tree tv, tree tc)
13897 gcc_assert (OPAQUE_TYPE_P (t));
13898 gcc_assert (tv && tv == TYPE_MAIN_VARIANT (tv));
13899 gcc_assert (tc && tc == TYPE_CANONICAL (tc));
13901 /* For an opaque type T1, check if some of its properties match
13902 the corresponding ones of the other opaque type T2, emit some
13903 error messages for those inconsistent ones. */
13904 auto check_properties_for_opaque_type = [](const_tree t1, tree t2,
13905 const char *kind_msg)
13907 if (!OPAQUE_TYPE_P (t2))
13909 error ("type %s is not an opaque type", kind_msg);
13910 debug_tree (t2);
13911 return;
13913 if (!OPAQUE_MODE_P (TYPE_MODE (t2)))
13915 error ("type %s is not with opaque mode", kind_msg);
13916 debug_tree (t2);
13917 return;
13919 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13921 error ("type %s differs by %<TYPE_MODE%>", kind_msg);
13922 debug_tree (t2);
13923 return;
13925 poly_uint64 t1_size = tree_to_poly_uint64 (TYPE_SIZE (t1));
13926 poly_uint64 t2_size = tree_to_poly_uint64 (TYPE_SIZE (t2));
13927 if (maybe_ne (t1_size, t2_size))
13929 error ("type %s differs by %<TYPE_SIZE%>", kind_msg);
13930 debug_tree (t2);
13931 return;
13933 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2))
13935 error ("type %s differs by %<TYPE_ALIGN%>", kind_msg);
13936 debug_tree (t2);
13937 return;
13939 if (TYPE_USER_ALIGN (t1) != TYPE_USER_ALIGN (t2))
13941 error ("type %s differs by %<TYPE_USER_ALIGN%>", kind_msg);
13942 debug_tree (t2);
13943 return;
13947 if (t != tv)
13948 check_properties_for_opaque_type (t, tv, "variant");
13950 if (t != tc)
13951 check_properties_for_opaque_type (t, tc, "canonical");
13954 /* Verify type T. */
13956 void
13957 verify_type (const_tree t)
13959 bool error_found = false;
13960 tree mv = TYPE_MAIN_VARIANT (t);
13961 tree ct = TYPE_CANONICAL (t);
13963 if (OPAQUE_TYPE_P (t))
13965 verify_opaque_type (t, mv, ct);
13966 return;
13969 if (!mv)
13971 error ("main variant is not defined");
13972 error_found = true;
13974 else if (mv != TYPE_MAIN_VARIANT (mv))
13976 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
13977 debug_tree (mv);
13978 error_found = true;
13980 else if (t != mv && !verify_type_variant (t, mv))
13981 error_found = true;
13983 if (!ct)
13985 else if (TYPE_CANONICAL (ct) != ct)
13987 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
13988 debug_tree (ct);
13989 error_found = true;
13991 /* Method and function types cannot be used to address memory and thus
13992 TYPE_CANONICAL really matters only for determining useless conversions.
13994 FIXME: C++ FE produce declarations of builtin functions that are not
13995 compatible with main variants. */
13996 else if (TREE_CODE (t) == FUNCTION_TYPE)
13998 else if (t != ct
13999 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
14000 with variably sized arrays because their sizes possibly
14001 gimplified to different variables. */
14002 && !variably_modified_type_p (ct, NULL)
14003 && !gimple_canonical_types_compatible_p (t, ct, false)
14004 && COMPLETE_TYPE_P (t))
14006 error ("%<TYPE_CANONICAL%> is not compatible");
14007 debug_tree (ct);
14008 error_found = true;
14011 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
14012 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
14014 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
14015 debug_tree (ct);
14016 error_found = true;
14018 if (TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
14020 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
14021 debug_tree (ct);
14022 debug_tree (TYPE_MAIN_VARIANT (ct));
14023 error_found = true;
14027 /* Check various uses of TYPE_MIN_VALUE_RAW. */
14028 if (RECORD_OR_UNION_TYPE_P (t))
14030 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
14031 and danagle the pointer from time to time. */
14032 if (TYPE_VFIELD (t)
14033 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
14034 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
14036 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
14037 debug_tree (TYPE_VFIELD (t));
14038 error_found = true;
14041 else if (TREE_CODE (t) == POINTER_TYPE)
14043 if (TYPE_NEXT_PTR_TO (t)
14044 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
14046 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
14047 debug_tree (TYPE_NEXT_PTR_TO (t));
14048 error_found = true;
14051 else if (TREE_CODE (t) == REFERENCE_TYPE)
14053 if (TYPE_NEXT_REF_TO (t)
14054 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
14056 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
14057 debug_tree (TYPE_NEXT_REF_TO (t));
14058 error_found = true;
14061 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14062 || TREE_CODE (t) == FIXED_POINT_TYPE)
14064 /* FIXME: The following check should pass:
14065 useless_type_conversion_p (const_cast <tree> (t),
14066 TREE_TYPE (TYPE_MIN_VALUE (t))
14067 but does not for C sizetypes in LTO. */
14070 /* Check various uses of TYPE_MAXVAL_RAW. */
14071 if (RECORD_OR_UNION_TYPE_P (t))
14073 if (!TYPE_BINFO (t))
14075 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
14077 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
14078 debug_tree (TYPE_BINFO (t));
14079 error_found = true;
14081 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t))
14083 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
14084 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
14085 error_found = true;
14088 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14090 if (TYPE_METHOD_BASETYPE (t)
14091 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
14092 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
14094 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
14095 debug_tree (TYPE_METHOD_BASETYPE (t));
14096 error_found = true;
14099 else if (TREE_CODE (t) == OFFSET_TYPE)
14101 if (TYPE_OFFSET_BASETYPE (t)
14102 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
14103 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
14105 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
14106 debug_tree (TYPE_OFFSET_BASETYPE (t));
14107 error_found = true;
14110 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14111 || TREE_CODE (t) == FIXED_POINT_TYPE)
14113 /* FIXME: The following check should pass:
14114 useless_type_conversion_p (const_cast <tree> (t),
14115 TREE_TYPE (TYPE_MAX_VALUE (t))
14116 but does not for C sizetypes in LTO. */
14118 else if (TREE_CODE (t) == ARRAY_TYPE)
14120 if (TYPE_ARRAY_MAX_SIZE (t)
14121 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
14123 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
14124 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
14125 error_found = true;
14128 else if (TYPE_MAX_VALUE_RAW (t))
14130 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
14131 debug_tree (TYPE_MAX_VALUE_RAW (t));
14132 error_found = true;
14135 if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
14137 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
14138 debug_tree (TYPE_LANG_SLOT_1 (t));
14139 error_found = true;
14142 /* Check various uses of TYPE_VALUES_RAW. */
14143 if (TREE_CODE (t) == ENUMERAL_TYPE)
14144 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
14146 tree value = TREE_VALUE (l);
14147 tree name = TREE_PURPOSE (l);
14149 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
14150 CONST_DECL of ENUMERAL TYPE. */
14151 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
14153 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
14154 debug_tree (value);
14155 debug_tree (name);
14156 error_found = true;
14158 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
14159 && TREE_CODE (TREE_TYPE (value)) != BOOLEAN_TYPE
14160 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
14162 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
14163 "to the enum");
14164 debug_tree (value);
14165 debug_tree (name);
14166 error_found = true;
14168 if (TREE_CODE (name) != IDENTIFIER_NODE)
14170 error ("enum value name is not %<IDENTIFIER_NODE%>");
14171 debug_tree (value);
14172 debug_tree (name);
14173 error_found = true;
14176 else if (TREE_CODE (t) == ARRAY_TYPE)
14178 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
14180 error ("array %<TYPE_DOMAIN%> is not integer type");
14181 debug_tree (TYPE_DOMAIN (t));
14182 error_found = true;
14185 else if (RECORD_OR_UNION_TYPE_P (t))
14187 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
14189 error ("%<TYPE_FIELDS%> defined in incomplete type");
14190 error_found = true;
14192 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
14194 /* TODO: verify properties of decls. */
14195 if (TREE_CODE (fld) == FIELD_DECL)
14197 else if (TREE_CODE (fld) == TYPE_DECL)
14199 else if (TREE_CODE (fld) == CONST_DECL)
14201 else if (VAR_P (fld))
14203 else if (TREE_CODE (fld) == TEMPLATE_DECL)
14205 else if (TREE_CODE (fld) == USING_DECL)
14207 else if (TREE_CODE (fld) == FUNCTION_DECL)
14209 else
14211 error ("wrong tree in %<TYPE_FIELDS%> list");
14212 debug_tree (fld);
14213 error_found = true;
14217 else if (TREE_CODE (t) == INTEGER_TYPE
14218 || TREE_CODE (t) == BOOLEAN_TYPE
14219 || TREE_CODE (t) == OFFSET_TYPE
14220 || TREE_CODE (t) == REFERENCE_TYPE
14221 || TREE_CODE (t) == NULLPTR_TYPE
14222 || TREE_CODE (t) == POINTER_TYPE)
14224 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
14226 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
14227 "is %p",
14228 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
14229 error_found = true;
14231 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
14233 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
14234 debug_tree (TYPE_CACHED_VALUES (t));
14235 error_found = true;
14237 /* Verify just enough of cache to ensure that no one copied it to new type.
14238 All copying should go by copy_node that should clear it. */
14239 else if (TYPE_CACHED_VALUES_P (t))
14241 int i;
14242 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
14243 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
14244 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
14246 error ("wrong %<TYPE_CACHED_VALUES%> entry");
14247 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
14248 error_found = true;
14249 break;
14253 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14254 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
14256 /* C++ FE uses TREE_PURPOSE to store initial values. */
14257 if (TREE_PURPOSE (l) && in_lto_p)
14259 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
14260 debug_tree (l);
14261 error_found = true;
14263 if (!TYPE_P (TREE_VALUE (l)))
14265 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
14266 debug_tree (l);
14267 error_found = true;
14270 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
14272 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
14273 debug_tree (TYPE_VALUES_RAW (t));
14274 error_found = true;
14276 if (TREE_CODE (t) != INTEGER_TYPE
14277 && TREE_CODE (t) != BOOLEAN_TYPE
14278 && TREE_CODE (t) != OFFSET_TYPE
14279 && TREE_CODE (t) != REFERENCE_TYPE
14280 && TREE_CODE (t) != NULLPTR_TYPE
14281 && TREE_CODE (t) != POINTER_TYPE
14282 && TYPE_CACHED_VALUES_P (t))
14284 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
14285 error_found = true;
14288 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14289 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14290 of a type. */
14291 if (TREE_CODE (t) == METHOD_TYPE
14292 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
14294 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
14295 error_found = true;
14298 if (error_found)
14300 debug_tree (const_cast <tree> (t));
14301 internal_error ("%qs failed", __func__);
14306 /* Return 1 if ARG interpreted as signed in its precision is known to be
14307 always positive or 2 if ARG is known to be always negative, or 3 if
14308 ARG may be positive or negative. */
14311 get_range_pos_neg (tree arg)
14313 if (arg == error_mark_node)
14314 return 3;
14316 int prec = TYPE_PRECISION (TREE_TYPE (arg));
14317 int cnt = 0;
14318 if (TREE_CODE (arg) == INTEGER_CST)
14320 wide_int w = wi::sext (wi::to_wide (arg), prec);
14321 if (wi::neg_p (w))
14322 return 2;
14323 else
14324 return 1;
14326 while (CONVERT_EXPR_P (arg)
14327 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
14328 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
14330 arg = TREE_OPERAND (arg, 0);
14331 /* Narrower value zero extended into wider type
14332 will always result in positive values. */
14333 if (TYPE_UNSIGNED (TREE_TYPE (arg))
14334 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
14335 return 1;
14336 prec = TYPE_PRECISION (TREE_TYPE (arg));
14337 if (++cnt > 30)
14338 return 3;
14341 if (TREE_CODE (arg) != SSA_NAME)
14342 return 3;
14343 value_range r;
14344 while (!get_global_range_query ()->range_of_expr (r, arg) || r.kind () != VR_RANGE)
14346 gimple *g = SSA_NAME_DEF_STMT (arg);
14347 if (is_gimple_assign (g)
14348 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
14350 tree t = gimple_assign_rhs1 (g);
14351 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
14352 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
14354 if (TYPE_UNSIGNED (TREE_TYPE (t))
14355 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
14356 return 1;
14357 prec = TYPE_PRECISION (TREE_TYPE (t));
14358 arg = t;
14359 if (++cnt > 30)
14360 return 3;
14361 continue;
14364 return 3;
14366 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
14368 /* For unsigned values, the "positive" range comes
14369 below the "negative" range. */
14370 if (!wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
14371 return 1;
14372 if (wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
14373 return 2;
14375 else
14377 if (!wi::neg_p (wi::sext (r.lower_bound (), prec), SIGNED))
14378 return 1;
14379 if (wi::neg_p (wi::sext (r.upper_bound (), prec), SIGNED))
14380 return 2;
14382 return 3;
14388 /* Return true if ARG is marked with the nonnull attribute in the
14389 current function signature. */
14391 bool
14392 nonnull_arg_p (const_tree arg)
14394 tree t, attrs, fntype;
14395 unsigned HOST_WIDE_INT arg_num;
14397 gcc_assert (TREE_CODE (arg) == PARM_DECL
14398 && (POINTER_TYPE_P (TREE_TYPE (arg))
14399 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
14401 /* The static chain decl is always non null. */
14402 if (arg == cfun->static_chain_decl)
14403 return true;
14405 /* THIS argument of method is always non-NULL. */
14406 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
14407 && arg == DECL_ARGUMENTS (cfun->decl)
14408 && flag_delete_null_pointer_checks)
14409 return true;
14411 /* Values passed by reference are always non-NULL. */
14412 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
14413 && flag_delete_null_pointer_checks)
14414 return true;
14416 fntype = TREE_TYPE (cfun->decl);
14417 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
14419 attrs = lookup_attribute ("nonnull", attrs);
14421 /* If "nonnull" wasn't specified, we know nothing about the argument. */
14422 if (attrs == NULL_TREE)
14423 return false;
14425 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
14426 if (TREE_VALUE (attrs) == NULL_TREE)
14427 return true;
14429 /* Get the position number for ARG in the function signature. */
14430 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
14432 t = DECL_CHAIN (t), arg_num++)
14434 if (t == arg)
14435 break;
14438 gcc_assert (t == arg);
14440 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14441 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14443 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14444 return true;
14448 return false;
14451 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14452 information. */
14454 location_t
14455 set_block (location_t loc, tree block)
14457 location_t pure_loc = get_pure_location (loc);
14458 source_range src_range = get_range_from_loc (line_table, loc);
14459 unsigned discriminator = get_discriminator_from_loc (line_table, loc);
14460 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block, discriminator);
14463 location_t
14464 set_source_range (tree expr, location_t start, location_t finish)
14466 source_range src_range;
14467 src_range.m_start = start;
14468 src_range.m_finish = finish;
14469 return set_source_range (expr, src_range);
14472 location_t
14473 set_source_range (tree expr, source_range src_range)
14475 if (!EXPR_P (expr))
14476 return UNKNOWN_LOCATION;
14478 location_t expr_location = EXPR_LOCATION (expr);
14479 location_t pure_loc = get_pure_location (expr_location);
14480 unsigned discriminator = get_discriminator_from_loc (expr_location);
14481 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
14482 pure_loc,
14483 src_range,
14484 NULL,
14485 discriminator);
14486 SET_EXPR_LOCATION (expr, adhoc);
14487 return adhoc;
14490 /* Return EXPR, potentially wrapped with a node expression LOC,
14491 if !CAN_HAVE_LOCATION_P (expr).
14493 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
14494 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
14496 Wrapper nodes can be identified using location_wrapper_p. */
14498 tree
14499 maybe_wrap_with_location (tree expr, location_t loc)
14501 if (expr == NULL)
14502 return NULL;
14503 if (loc == UNKNOWN_LOCATION)
14504 return expr;
14505 if (CAN_HAVE_LOCATION_P (expr))
14506 return expr;
14507 /* We should only be adding wrappers for constants and for decls,
14508 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
14509 gcc_assert (CONSTANT_CLASS_P (expr)
14510 || DECL_P (expr)
14511 || EXCEPTIONAL_CLASS_P (expr));
14513 /* For now, don't add wrappers to exceptional tree nodes, to minimize
14514 any impact of the wrapper nodes. */
14515 if (EXCEPTIONAL_CLASS_P (expr) || error_operand_p (expr))
14516 return expr;
14518 /* Compiler-generated temporary variables don't need a wrapper. */
14519 if (DECL_P (expr) && DECL_ARTIFICIAL (expr) && DECL_IGNORED_P (expr))
14520 return expr;
14522 /* If any auto_suppress_location_wrappers are active, don't create
14523 wrappers. */
14524 if (suppress_location_wrappers > 0)
14525 return expr;
14527 tree_code code
14528 = (((CONSTANT_CLASS_P (expr) && TREE_CODE (expr) != STRING_CST)
14529 || (TREE_CODE (expr) == CONST_DECL && !TREE_STATIC (expr)))
14530 ? NON_LVALUE_EXPR : VIEW_CONVERT_EXPR);
14531 tree wrapper = build1_loc (loc, code, TREE_TYPE (expr), expr);
14532 /* Mark this node as being a wrapper. */
14533 EXPR_LOCATION_WRAPPER_P (wrapper) = 1;
14534 return wrapper;
14537 int suppress_location_wrappers;
14539 /* Return the name of combined function FN, for debugging purposes. */
14541 const char *
14542 combined_fn_name (combined_fn fn)
14544 if (builtin_fn_p (fn))
14546 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14547 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14549 else
14550 return internal_fn_name (as_internal_fn (fn));
14553 /* Return a bitmap with a bit set corresponding to each argument in
14554 a function call type FNTYPE declared with attribute nonnull,
14555 or null if none of the function's argument are nonnull. The caller
14556 must free the bitmap. */
14558 bitmap
14559 get_nonnull_args (const_tree fntype)
14561 if (fntype == NULL_TREE)
14562 return NULL;
14564 bitmap argmap = NULL;
14565 if (TREE_CODE (fntype) == METHOD_TYPE)
14567 /* The this pointer in C++ non-static member functions is
14568 implicitly nonnull whether or not it's declared as such. */
14569 argmap = BITMAP_ALLOC (NULL);
14570 bitmap_set_bit (argmap, 0);
14573 tree attrs = TYPE_ATTRIBUTES (fntype);
14574 if (!attrs)
14575 return argmap;
14577 /* A function declaration can specify multiple attribute nonnull,
14578 each with zero or more arguments. The loop below creates a bitmap
14579 representing a union of all the arguments. An empty (but non-null)
14580 bitmap means that all arguments have been declaraed nonnull. */
14581 for ( ; attrs; attrs = TREE_CHAIN (attrs))
14583 attrs = lookup_attribute ("nonnull", attrs);
14584 if (!attrs)
14585 break;
14587 if (!argmap)
14588 argmap = BITMAP_ALLOC (NULL);
14590 if (!TREE_VALUE (attrs))
14592 /* Clear the bitmap in case a previous attribute nonnull
14593 set it and this one overrides it for all arguments. */
14594 bitmap_clear (argmap);
14595 return argmap;
14598 /* Iterate over the indices of the format arguments declared nonnull
14599 and set a bit for each. */
14600 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
14602 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
14603 bitmap_set_bit (argmap, val);
14607 return argmap;
14610 /* Returns true if TYPE is a type where it and all of its subobjects
14611 (recursively) are of structure, union, or array type. */
14613 bool
14614 is_empty_type (const_tree type)
14616 if (RECORD_OR_UNION_TYPE_P (type))
14618 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
14619 if (TREE_CODE (field) == FIELD_DECL
14620 && !DECL_PADDING_P (field)
14621 && !is_empty_type (TREE_TYPE (field)))
14622 return false;
14623 return true;
14625 else if (TREE_CODE (type) == ARRAY_TYPE)
14626 return (integer_minus_onep (array_type_nelts (type))
14627 || TYPE_DOMAIN (type) == NULL_TREE
14628 || is_empty_type (TREE_TYPE (type)));
14629 return false;
14632 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
14633 that shouldn't be passed via stack. */
14635 bool
14636 default_is_empty_record (const_tree type)
14638 if (!abi_version_at_least (12))
14639 return false;
14641 if (type == error_mark_node)
14642 return false;
14644 if (TREE_ADDRESSABLE (type))
14645 return false;
14647 return is_empty_type (TYPE_MAIN_VARIANT (type));
14650 /* Determine whether TYPE is a structure with a flexible array member,
14651 or a union containing such a structure (possibly recursively). */
14653 bool
14654 flexible_array_type_p (const_tree type)
14656 tree x, last;
14657 switch (TREE_CODE (type))
14659 case RECORD_TYPE:
14660 last = NULL_TREE;
14661 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14662 if (TREE_CODE (x) == FIELD_DECL)
14663 last = x;
14664 if (last == NULL_TREE)
14665 return false;
14666 if (TREE_CODE (TREE_TYPE (last)) == ARRAY_TYPE
14667 && TYPE_SIZE (TREE_TYPE (last)) == NULL_TREE
14668 && TYPE_DOMAIN (TREE_TYPE (last)) != NULL_TREE
14669 && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last))) == NULL_TREE)
14670 return true;
14671 return false;
14672 case UNION_TYPE:
14673 for (x = TYPE_FIELDS (type); x != NULL_TREE; x = DECL_CHAIN (x))
14675 if (TREE_CODE (x) == FIELD_DECL
14676 && flexible_array_type_p (TREE_TYPE (x)))
14677 return true;
14679 return false;
14680 default:
14681 return false;
14685 /* Like int_size_in_bytes, but handle empty records specially. */
14687 HOST_WIDE_INT
14688 arg_int_size_in_bytes (const_tree type)
14690 return TYPE_EMPTY_P (type) ? 0 : int_size_in_bytes (type);
14693 /* Like size_in_bytes, but handle empty records specially. */
14695 tree
14696 arg_size_in_bytes (const_tree type)
14698 return TYPE_EMPTY_P (type) ? size_zero_node : size_in_bytes (type);
14701 /* Return true if an expression with CODE has to have the same result type as
14702 its first operand. */
14704 bool
14705 expr_type_first_operand_type_p (tree_code code)
14707 switch (code)
14709 case NEGATE_EXPR:
14710 case ABS_EXPR:
14711 case BIT_NOT_EXPR:
14712 case PAREN_EXPR:
14713 case CONJ_EXPR:
14715 case PLUS_EXPR:
14716 case MINUS_EXPR:
14717 case MULT_EXPR:
14718 case TRUNC_DIV_EXPR:
14719 case CEIL_DIV_EXPR:
14720 case FLOOR_DIV_EXPR:
14721 case ROUND_DIV_EXPR:
14722 case TRUNC_MOD_EXPR:
14723 case CEIL_MOD_EXPR:
14724 case FLOOR_MOD_EXPR:
14725 case ROUND_MOD_EXPR:
14726 case RDIV_EXPR:
14727 case EXACT_DIV_EXPR:
14728 case MIN_EXPR:
14729 case MAX_EXPR:
14730 case BIT_IOR_EXPR:
14731 case BIT_XOR_EXPR:
14732 case BIT_AND_EXPR:
14734 case LSHIFT_EXPR:
14735 case RSHIFT_EXPR:
14736 case LROTATE_EXPR:
14737 case RROTATE_EXPR:
14738 return true;
14740 default:
14741 return false;
14745 /* Return a typenode for the "standard" C type with a given name. */
14746 tree
14747 get_typenode_from_name (const char *name)
14749 if (name == NULL || *name == '\0')
14750 return NULL_TREE;
14752 if (strcmp (name, "char") == 0)
14753 return char_type_node;
14754 if (strcmp (name, "unsigned char") == 0)
14755 return unsigned_char_type_node;
14756 if (strcmp (name, "signed char") == 0)
14757 return signed_char_type_node;
14759 if (strcmp (name, "short int") == 0)
14760 return short_integer_type_node;
14761 if (strcmp (name, "short unsigned int") == 0)
14762 return short_unsigned_type_node;
14764 if (strcmp (name, "int") == 0)
14765 return integer_type_node;
14766 if (strcmp (name, "unsigned int") == 0)
14767 return unsigned_type_node;
14769 if (strcmp (name, "long int") == 0)
14770 return long_integer_type_node;
14771 if (strcmp (name, "long unsigned int") == 0)
14772 return long_unsigned_type_node;
14774 if (strcmp (name, "long long int") == 0)
14775 return long_long_integer_type_node;
14776 if (strcmp (name, "long long unsigned int") == 0)
14777 return long_long_unsigned_type_node;
14779 gcc_unreachable ();
14782 /* List of pointer types used to declare builtins before we have seen their
14783 real declaration.
14785 Keep the size up to date in tree.h ! */
14786 const builtin_structptr_type builtin_structptr_types[6] =
14788 { fileptr_type_node, ptr_type_node, "FILE" },
14789 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
14790 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
14791 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
14792 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
14793 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
14796 /* Return the maximum object size. */
14798 tree
14799 max_object_size (void)
14801 /* To do: Make this a configurable parameter. */
14802 return TYPE_MAX_VALUE (ptrdiff_type_node);
14805 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
14806 parameter default to false and that weeds out error_mark_node. */
14808 bool
14809 verify_type_context (location_t loc, type_context_kind context,
14810 const_tree type, bool silent_p)
14812 if (type == error_mark_node)
14813 return true;
14815 gcc_assert (TYPE_P (type));
14816 return (!targetm.verify_type_context
14817 || targetm.verify_type_context (loc, context, type, silent_p));
14820 /* Return true if NEW_ASM and DELETE_ASM name a valid pair of new and
14821 delete operators. Return false if they may or may not name such
14822 a pair and, when nonnull, set *PCERTAIN to true if they certainly
14823 do not. */
14825 bool
14826 valid_new_delete_pair_p (tree new_asm, tree delete_asm,
14827 bool *pcertain /* = NULL */)
14829 bool certain;
14830 if (!pcertain)
14831 pcertain = &certain;
14833 const char *new_name = IDENTIFIER_POINTER (new_asm);
14834 const char *delete_name = IDENTIFIER_POINTER (delete_asm);
14835 unsigned int new_len = IDENTIFIER_LENGTH (new_asm);
14836 unsigned int delete_len = IDENTIFIER_LENGTH (delete_asm);
14838 /* The following failures are due to invalid names so they're not
14839 considered certain mismatches. */
14840 *pcertain = false;
14842 if (new_len < 5 || delete_len < 6)
14843 return false;
14844 if (new_name[0] == '_')
14845 ++new_name, --new_len;
14846 if (new_name[0] == '_')
14847 ++new_name, --new_len;
14848 if (delete_name[0] == '_')
14849 ++delete_name, --delete_len;
14850 if (delete_name[0] == '_')
14851 ++delete_name, --delete_len;
14852 if (new_len < 4 || delete_len < 5)
14853 return false;
14855 /* The following failures are due to names of user-defined operators
14856 so they're also not considered certain mismatches. */
14858 /* *_len is now just the length after initial underscores. */
14859 if (new_name[0] != 'Z' || new_name[1] != 'n')
14860 return false;
14861 if (delete_name[0] != 'Z' || delete_name[1] != 'd')
14862 return false;
14864 /* The following failures are certain mismatches. */
14865 *pcertain = true;
14867 /* _Znw must match _Zdl, _Zna must match _Zda. */
14868 if ((new_name[2] != 'w' || delete_name[2] != 'l')
14869 && (new_name[2] != 'a' || delete_name[2] != 'a'))
14870 return false;
14871 /* 'j', 'm' and 'y' correspond to size_t. */
14872 if (new_name[3] != 'j' && new_name[3] != 'm' && new_name[3] != 'y')
14873 return false;
14874 if (delete_name[3] != 'P' || delete_name[4] != 'v')
14875 return false;
14876 if (new_len == 4
14877 || (new_len == 18 && !memcmp (new_name + 4, "RKSt9nothrow_t", 14)))
14879 /* _ZnXY or _ZnXYRKSt9nothrow_t matches
14880 _ZdXPv, _ZdXPvY and _ZdXPvRKSt9nothrow_t. */
14881 if (delete_len == 5)
14882 return true;
14883 if (delete_len == 6 && delete_name[5] == new_name[3])
14884 return true;
14885 if (delete_len == 19 && !memcmp (delete_name + 5, "RKSt9nothrow_t", 14))
14886 return true;
14888 else if ((new_len == 19 && !memcmp (new_name + 4, "St11align_val_t", 15))
14889 || (new_len == 33
14890 && !memcmp (new_name + 4, "St11align_val_tRKSt9nothrow_t", 29)))
14892 /* _ZnXYSt11align_val_t or _ZnXYSt11align_val_tRKSt9nothrow_t matches
14893 _ZdXPvSt11align_val_t or _ZdXPvYSt11align_val_t or or
14894 _ZdXPvSt11align_val_tRKSt9nothrow_t. */
14895 if (delete_len == 20 && !memcmp (delete_name + 5, "St11align_val_t", 15))
14896 return true;
14897 if (delete_len == 21
14898 && delete_name[5] == new_name[3]
14899 && !memcmp (delete_name + 6, "St11align_val_t", 15))
14900 return true;
14901 if (delete_len == 34
14902 && !memcmp (delete_name + 5, "St11align_val_tRKSt9nothrow_t", 29))
14903 return true;
14906 /* The negative result is conservative. */
14907 *pcertain = false;
14908 return false;
14911 /* Return the zero-based number corresponding to the argument being
14912 deallocated if FNDECL is a deallocation function or an out-of-bounds
14913 value if it isn't. */
14915 unsigned
14916 fndecl_dealloc_argno (tree fndecl)
14918 /* A call to operator delete isn't recognized as one to a built-in. */
14919 if (DECL_IS_OPERATOR_DELETE_P (fndecl))
14921 if (DECL_IS_REPLACEABLE_OPERATOR (fndecl))
14922 return 0;
14924 /* Avoid placement delete that's not been inlined. */
14925 tree fname = DECL_ASSEMBLER_NAME (fndecl);
14926 if (id_equal (fname, "_ZdlPvS_") // ordinary form
14927 || id_equal (fname, "_ZdaPvS_")) // array form
14928 return UINT_MAX;
14929 return 0;
14932 /* TODO: Handle user-defined functions with attribute malloc? Handle
14933 known non-built-ins like fopen? */
14934 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14936 switch (DECL_FUNCTION_CODE (fndecl))
14938 case BUILT_IN_FREE:
14939 case BUILT_IN_REALLOC:
14940 return 0;
14941 default:
14942 break;
14944 return UINT_MAX;
14947 tree attrs = DECL_ATTRIBUTES (fndecl);
14948 if (!attrs)
14949 return UINT_MAX;
14951 for (tree atfree = attrs;
14952 (atfree = lookup_attribute ("*dealloc", atfree));
14953 atfree = TREE_CHAIN (atfree))
14955 tree alloc = TREE_VALUE (atfree);
14956 if (!alloc)
14957 continue;
14959 tree pos = TREE_CHAIN (alloc);
14960 if (!pos)
14961 return 0;
14963 pos = TREE_VALUE (pos);
14964 return TREE_INT_CST_LOW (pos) - 1;
14967 return UINT_MAX;
14970 /* If EXPR refers to a character array or pointer declared attribute
14971 nonstring, return a decl for that array or pointer and set *REF
14972 to the referenced enclosing object or pointer. Otherwise return
14973 null. */
14975 tree
14976 get_attr_nonstring_decl (tree expr, tree *ref)
14978 tree decl = expr;
14979 tree var = NULL_TREE;
14980 if (TREE_CODE (decl) == SSA_NAME)
14982 gimple *def = SSA_NAME_DEF_STMT (decl);
14984 if (is_gimple_assign (def))
14986 tree_code code = gimple_assign_rhs_code (def);
14987 if (code == ADDR_EXPR
14988 || code == COMPONENT_REF
14989 || code == VAR_DECL)
14990 decl = gimple_assign_rhs1 (def);
14992 else
14993 var = SSA_NAME_VAR (decl);
14996 if (TREE_CODE (decl) == ADDR_EXPR)
14997 decl = TREE_OPERAND (decl, 0);
14999 /* To simplify calling code, store the referenced DECL regardless of
15000 the attribute determined below, but avoid storing the SSA_NAME_VAR
15001 obtained above (it's not useful for dataflow purposes). */
15002 if (ref)
15003 *ref = decl;
15005 /* Use the SSA_NAME_VAR that was determined above to see if it's
15006 declared nonstring. Otherwise drill down into the referenced
15007 DECL. */
15008 if (var)
15009 decl = var;
15010 else if (TREE_CODE (decl) == ARRAY_REF)
15011 decl = TREE_OPERAND (decl, 0);
15012 else if (TREE_CODE (decl) == COMPONENT_REF)
15013 decl = TREE_OPERAND (decl, 1);
15014 else if (TREE_CODE (decl) == MEM_REF)
15015 return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
15017 if (DECL_P (decl)
15018 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
15019 return decl;
15021 return NULL_TREE;
15024 /* Return length of attribute names string,
15025 if arglist chain > 1, -1 otherwise. */
15028 get_target_clone_attr_len (tree arglist)
15030 tree arg;
15031 int str_len_sum = 0;
15032 int argnum = 0;
15034 for (arg = arglist; arg; arg = TREE_CHAIN (arg))
15036 const char *str = TREE_STRING_POINTER (TREE_VALUE (arg));
15037 size_t len = strlen (str);
15038 str_len_sum += len + 1;
15039 for (const char *p = strchr (str, ','); p; p = strchr (p + 1, ','))
15040 argnum++;
15041 argnum++;
15043 if (argnum <= 1)
15044 return -1;
15045 return str_len_sum;
15048 void
15049 tree_cc_finalize (void)
15051 clear_nonstandard_integer_type_cache ();
15054 #if CHECKING_P
15056 namespace selftest {
15058 /* Selftests for tree. */
15060 /* Verify that integer constants are sane. */
15062 static void
15063 test_integer_constants ()
15065 ASSERT_TRUE (integer_type_node != NULL);
15066 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
15068 tree type = integer_type_node;
15070 tree zero = build_zero_cst (type);
15071 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
15072 ASSERT_EQ (type, TREE_TYPE (zero));
15074 tree one = build_int_cst (type, 1);
15075 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
15076 ASSERT_EQ (type, TREE_TYPE (zero));
15079 /* Verify identifiers. */
15081 static void
15082 test_identifiers ()
15084 tree identifier = get_identifier ("foo");
15085 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
15086 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
15089 /* Verify LABEL_DECL. */
15091 static void
15092 test_labels ()
15094 tree identifier = get_identifier ("err");
15095 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
15096 identifier, void_type_node);
15097 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
15098 ASSERT_FALSE (FORCED_LABEL (label_decl));
15101 /* Return a new VECTOR_CST node whose type is TYPE and whose values
15102 are given by VALS. */
15104 static tree
15105 build_vector (tree type, const vec<tree> &vals MEM_STAT_DECL)
15107 gcc_assert (known_eq (vals.length (), TYPE_VECTOR_SUBPARTS (type)));
15108 tree_vector_builder builder (type, vals.length (), 1);
15109 builder.splice (vals);
15110 return builder.build ();
15113 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
15115 static void
15116 check_vector_cst (const vec<tree> &expected, tree actual)
15118 ASSERT_KNOWN_EQ (expected.length (),
15119 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual)));
15120 for (unsigned int i = 0; i < expected.length (); ++i)
15121 ASSERT_EQ (wi::to_wide (expected[i]),
15122 wi::to_wide (vector_cst_elt (actual, i)));
15125 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
15126 and that its elements match EXPECTED. */
15128 static void
15129 check_vector_cst_duplicate (const vec<tree> &expected, tree actual,
15130 unsigned int npatterns)
15132 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15133 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual));
15134 ASSERT_EQ (npatterns, vector_cst_encoded_nelts (actual));
15135 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual));
15136 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15137 check_vector_cst (expected, actual);
15140 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
15141 and NPATTERNS background elements, and that its elements match
15142 EXPECTED. */
15144 static void
15145 check_vector_cst_fill (const vec<tree> &expected, tree actual,
15146 unsigned int npatterns)
15148 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15149 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual));
15150 ASSERT_EQ (2 * npatterns, vector_cst_encoded_nelts (actual));
15151 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15152 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual));
15153 check_vector_cst (expected, actual);
15156 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
15157 and that its elements match EXPECTED. */
15159 static void
15160 check_vector_cst_stepped (const vec<tree> &expected, tree actual,
15161 unsigned int npatterns)
15163 ASSERT_EQ (npatterns, VECTOR_CST_NPATTERNS (actual));
15164 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual));
15165 ASSERT_EQ (3 * npatterns, vector_cst_encoded_nelts (actual));
15166 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual));
15167 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual));
15168 check_vector_cst (expected, actual);
15171 /* Test the creation of VECTOR_CSTs. */
15173 static void
15174 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO)
15176 auto_vec<tree, 8> elements (8);
15177 elements.quick_grow (8);
15178 tree element_type = build_nonstandard_integer_type (16, true);
15179 tree vector_type = build_vector_type (element_type, 8);
15181 /* Test a simple linear series with a base of 0 and a step of 1:
15182 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
15183 for (unsigned int i = 0; i < 8; ++i)
15184 elements[i] = build_int_cst (element_type, i);
15185 tree vector = build_vector (vector_type, elements PASS_MEM_STAT);
15186 check_vector_cst_stepped (elements, vector, 1);
15188 /* Try the same with the first element replaced by 100:
15189 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
15190 elements[0] = build_int_cst (element_type, 100);
15191 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15192 check_vector_cst_stepped (elements, vector, 1);
15194 /* Try a series that wraps around.
15195 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
15196 for (unsigned int i = 1; i < 8; ++i)
15197 elements[i] = build_int_cst (element_type, (65530 + i) & 0xffff);
15198 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15199 check_vector_cst_stepped (elements, vector, 1);
15201 /* Try a downward series:
15202 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
15203 for (unsigned int i = 1; i < 8; ++i)
15204 elements[i] = build_int_cst (element_type, 80 - i);
15205 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15206 check_vector_cst_stepped (elements, vector, 1);
15208 /* Try two interleaved series with different bases and steps:
15209 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
15210 elements[1] = build_int_cst (element_type, 53);
15211 for (unsigned int i = 2; i < 8; i += 2)
15213 elements[i] = build_int_cst (element_type, 70 - i * 2);
15214 elements[i + 1] = build_int_cst (element_type, 200 + i * 3);
15216 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15217 check_vector_cst_stepped (elements, vector, 2);
15219 /* Try a duplicated value:
15220 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
15221 for (unsigned int i = 1; i < 8; ++i)
15222 elements[i] = elements[0];
15223 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15224 check_vector_cst_duplicate (elements, vector, 1);
15226 /* Try an interleaved duplicated value:
15227 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
15228 elements[1] = build_int_cst (element_type, 55);
15229 for (unsigned int i = 2; i < 8; ++i)
15230 elements[i] = elements[i - 2];
15231 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15232 check_vector_cst_duplicate (elements, vector, 2);
15234 /* Try a duplicated value with 2 exceptions
15235 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
15236 elements[0] = build_int_cst (element_type, 41);
15237 elements[1] = build_int_cst (element_type, 97);
15238 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15239 check_vector_cst_fill (elements, vector, 2);
15241 /* Try with and without a step
15242 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
15243 for (unsigned int i = 3; i < 8; i += 2)
15244 elements[i] = build_int_cst (element_type, i * 7);
15245 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15246 check_vector_cst_stepped (elements, vector, 2);
15248 /* Try a fully-general constant:
15249 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
15250 elements[5] = build_int_cst (element_type, 9990);
15251 vector = build_vector (vector_type, elements PASS_MEM_STAT);
15252 check_vector_cst_fill (elements, vector, 4);
15255 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
15256 Helper function for test_location_wrappers, to deal with STRIP_NOPS
15257 modifying its argument in-place. */
15259 static void
15260 check_strip_nops (tree node, tree expected)
15262 STRIP_NOPS (node);
15263 ASSERT_EQ (expected, node);
15266 /* Verify location wrappers. */
15268 static void
15269 test_location_wrappers ()
15271 location_t loc = BUILTINS_LOCATION;
15273 ASSERT_EQ (NULL_TREE, maybe_wrap_with_location (NULL_TREE, loc));
15275 /* Wrapping a constant. */
15276 tree int_cst = build_int_cst (integer_type_node, 42);
15277 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst));
15278 ASSERT_FALSE (location_wrapper_p (int_cst));
15280 tree wrapped_int_cst = maybe_wrap_with_location (int_cst, loc);
15281 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst));
15282 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_cst));
15283 ASSERT_EQ (int_cst, tree_strip_any_location_wrapper (wrapped_int_cst));
15285 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
15286 ASSERT_EQ (int_cst, maybe_wrap_with_location (int_cst, UNKNOWN_LOCATION));
15288 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
15289 tree cast = build1 (NOP_EXPR, char_type_node, int_cst);
15290 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast));
15291 ASSERT_EQ (cast, maybe_wrap_with_location (cast, loc));
15293 /* Wrapping a STRING_CST. */
15294 tree string_cst = build_string (4, "foo");
15295 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst));
15296 ASSERT_FALSE (location_wrapper_p (string_cst));
15298 tree wrapped_string_cst = maybe_wrap_with_location (string_cst, loc);
15299 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst));
15300 ASSERT_EQ (VIEW_CONVERT_EXPR, TREE_CODE (wrapped_string_cst));
15301 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_string_cst));
15302 ASSERT_EQ (string_cst, tree_strip_any_location_wrapper (wrapped_string_cst));
15305 /* Wrapping a variable. */
15306 tree int_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
15307 get_identifier ("some_int_var"),
15308 integer_type_node);
15309 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var));
15310 ASSERT_FALSE (location_wrapper_p (int_var));
15312 tree wrapped_int_var = maybe_wrap_with_location (int_var, loc);
15313 ASSERT_TRUE (location_wrapper_p (wrapped_int_var));
15314 ASSERT_EQ (loc, EXPR_LOCATION (wrapped_int_var));
15315 ASSERT_EQ (int_var, tree_strip_any_location_wrapper (wrapped_int_var));
15317 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
15318 wrapper. */
15319 tree r_cast = build1 (NON_LVALUE_EXPR, integer_type_node, int_var);
15320 ASSERT_FALSE (location_wrapper_p (r_cast));
15321 ASSERT_EQ (r_cast, tree_strip_any_location_wrapper (r_cast));
15323 /* Verify that STRIP_NOPS removes wrappers. */
15324 check_strip_nops (wrapped_int_cst, int_cst);
15325 check_strip_nops (wrapped_string_cst, string_cst);
15326 check_strip_nops (wrapped_int_var, int_var);
15329 /* Test various tree predicates. Verify that location wrappers don't
15330 affect the results. */
15332 static void
15333 test_predicates ()
15335 /* Build various constants and wrappers around them. */
15337 location_t loc = BUILTINS_LOCATION;
15339 tree i_0 = build_int_cst (integer_type_node, 0);
15340 tree wr_i_0 = maybe_wrap_with_location (i_0, loc);
15342 tree i_1 = build_int_cst (integer_type_node, 1);
15343 tree wr_i_1 = maybe_wrap_with_location (i_1, loc);
15345 tree i_m1 = build_int_cst (integer_type_node, -1);
15346 tree wr_i_m1 = maybe_wrap_with_location (i_m1, loc);
15348 tree f_0 = build_real_from_int_cst (float_type_node, i_0);
15349 tree wr_f_0 = maybe_wrap_with_location (f_0, loc);
15350 tree f_1 = build_real_from_int_cst (float_type_node, i_1);
15351 tree wr_f_1 = maybe_wrap_with_location (f_1, loc);
15352 tree f_m1 = build_real_from_int_cst (float_type_node, i_m1);
15353 tree wr_f_m1 = maybe_wrap_with_location (f_m1, loc);
15355 tree c_i_0 = build_complex (NULL_TREE, i_0, i_0);
15356 tree c_i_1 = build_complex (NULL_TREE, i_1, i_0);
15357 tree c_i_m1 = build_complex (NULL_TREE, i_m1, i_0);
15359 tree c_f_0 = build_complex (NULL_TREE, f_0, f_0);
15360 tree c_f_1 = build_complex (NULL_TREE, f_1, f_0);
15361 tree c_f_m1 = build_complex (NULL_TREE, f_m1, f_0);
15363 /* TODO: vector constants. */
15365 /* Test integer_onep. */
15366 ASSERT_FALSE (integer_onep (i_0));
15367 ASSERT_FALSE (integer_onep (wr_i_0));
15368 ASSERT_TRUE (integer_onep (i_1));
15369 ASSERT_TRUE (integer_onep (wr_i_1));
15370 ASSERT_FALSE (integer_onep (i_m1));
15371 ASSERT_FALSE (integer_onep (wr_i_m1));
15372 ASSERT_FALSE (integer_onep (f_0));
15373 ASSERT_FALSE (integer_onep (wr_f_0));
15374 ASSERT_FALSE (integer_onep (f_1));
15375 ASSERT_FALSE (integer_onep (wr_f_1));
15376 ASSERT_FALSE (integer_onep (f_m1));
15377 ASSERT_FALSE (integer_onep (wr_f_m1));
15378 ASSERT_FALSE (integer_onep (c_i_0));
15379 ASSERT_TRUE (integer_onep (c_i_1));
15380 ASSERT_FALSE (integer_onep (c_i_m1));
15381 ASSERT_FALSE (integer_onep (c_f_0));
15382 ASSERT_FALSE (integer_onep (c_f_1));
15383 ASSERT_FALSE (integer_onep (c_f_m1));
15385 /* Test integer_zerop. */
15386 ASSERT_TRUE (integer_zerop (i_0));
15387 ASSERT_TRUE (integer_zerop (wr_i_0));
15388 ASSERT_FALSE (integer_zerop (i_1));
15389 ASSERT_FALSE (integer_zerop (wr_i_1));
15390 ASSERT_FALSE (integer_zerop (i_m1));
15391 ASSERT_FALSE (integer_zerop (wr_i_m1));
15392 ASSERT_FALSE (integer_zerop (f_0));
15393 ASSERT_FALSE (integer_zerop (wr_f_0));
15394 ASSERT_FALSE (integer_zerop (f_1));
15395 ASSERT_FALSE (integer_zerop (wr_f_1));
15396 ASSERT_FALSE (integer_zerop (f_m1));
15397 ASSERT_FALSE (integer_zerop (wr_f_m1));
15398 ASSERT_TRUE (integer_zerop (c_i_0));
15399 ASSERT_FALSE (integer_zerop (c_i_1));
15400 ASSERT_FALSE (integer_zerop (c_i_m1));
15401 ASSERT_FALSE (integer_zerop (c_f_0));
15402 ASSERT_FALSE (integer_zerop (c_f_1));
15403 ASSERT_FALSE (integer_zerop (c_f_m1));
15405 /* Test integer_all_onesp. */
15406 ASSERT_FALSE (integer_all_onesp (i_0));
15407 ASSERT_FALSE (integer_all_onesp (wr_i_0));
15408 ASSERT_FALSE (integer_all_onesp (i_1));
15409 ASSERT_FALSE (integer_all_onesp (wr_i_1));
15410 ASSERT_TRUE (integer_all_onesp (i_m1));
15411 ASSERT_TRUE (integer_all_onesp (wr_i_m1));
15412 ASSERT_FALSE (integer_all_onesp (f_0));
15413 ASSERT_FALSE (integer_all_onesp (wr_f_0));
15414 ASSERT_FALSE (integer_all_onesp (f_1));
15415 ASSERT_FALSE (integer_all_onesp (wr_f_1));
15416 ASSERT_FALSE (integer_all_onesp (f_m1));
15417 ASSERT_FALSE (integer_all_onesp (wr_f_m1));
15418 ASSERT_FALSE (integer_all_onesp (c_i_0));
15419 ASSERT_FALSE (integer_all_onesp (c_i_1));
15420 ASSERT_FALSE (integer_all_onesp (c_i_m1));
15421 ASSERT_FALSE (integer_all_onesp (c_f_0));
15422 ASSERT_FALSE (integer_all_onesp (c_f_1));
15423 ASSERT_FALSE (integer_all_onesp (c_f_m1));
15425 /* Test integer_minus_onep. */
15426 ASSERT_FALSE (integer_minus_onep (i_0));
15427 ASSERT_FALSE (integer_minus_onep (wr_i_0));
15428 ASSERT_FALSE (integer_minus_onep (i_1));
15429 ASSERT_FALSE (integer_minus_onep (wr_i_1));
15430 ASSERT_TRUE (integer_minus_onep (i_m1));
15431 ASSERT_TRUE (integer_minus_onep (wr_i_m1));
15432 ASSERT_FALSE (integer_minus_onep (f_0));
15433 ASSERT_FALSE (integer_minus_onep (wr_f_0));
15434 ASSERT_FALSE (integer_minus_onep (f_1));
15435 ASSERT_FALSE (integer_minus_onep (wr_f_1));
15436 ASSERT_FALSE (integer_minus_onep (f_m1));
15437 ASSERT_FALSE (integer_minus_onep (wr_f_m1));
15438 ASSERT_FALSE (integer_minus_onep (c_i_0));
15439 ASSERT_FALSE (integer_minus_onep (c_i_1));
15440 ASSERT_TRUE (integer_minus_onep (c_i_m1));
15441 ASSERT_FALSE (integer_minus_onep (c_f_0));
15442 ASSERT_FALSE (integer_minus_onep (c_f_1));
15443 ASSERT_FALSE (integer_minus_onep (c_f_m1));
15445 /* Test integer_each_onep. */
15446 ASSERT_FALSE (integer_each_onep (i_0));
15447 ASSERT_FALSE (integer_each_onep (wr_i_0));
15448 ASSERT_TRUE (integer_each_onep (i_1));
15449 ASSERT_TRUE (integer_each_onep (wr_i_1));
15450 ASSERT_FALSE (integer_each_onep (i_m1));
15451 ASSERT_FALSE (integer_each_onep (wr_i_m1));
15452 ASSERT_FALSE (integer_each_onep (f_0));
15453 ASSERT_FALSE (integer_each_onep (wr_f_0));
15454 ASSERT_FALSE (integer_each_onep (f_1));
15455 ASSERT_FALSE (integer_each_onep (wr_f_1));
15456 ASSERT_FALSE (integer_each_onep (f_m1));
15457 ASSERT_FALSE (integer_each_onep (wr_f_m1));
15458 ASSERT_FALSE (integer_each_onep (c_i_0));
15459 ASSERT_FALSE (integer_each_onep (c_i_1));
15460 ASSERT_FALSE (integer_each_onep (c_i_m1));
15461 ASSERT_FALSE (integer_each_onep (c_f_0));
15462 ASSERT_FALSE (integer_each_onep (c_f_1));
15463 ASSERT_FALSE (integer_each_onep (c_f_m1));
15465 /* Test integer_truep. */
15466 ASSERT_FALSE (integer_truep (i_0));
15467 ASSERT_FALSE (integer_truep (wr_i_0));
15468 ASSERT_TRUE (integer_truep (i_1));
15469 ASSERT_TRUE (integer_truep (wr_i_1));
15470 ASSERT_FALSE (integer_truep (i_m1));
15471 ASSERT_FALSE (integer_truep (wr_i_m1));
15472 ASSERT_FALSE (integer_truep (f_0));
15473 ASSERT_FALSE (integer_truep (wr_f_0));
15474 ASSERT_FALSE (integer_truep (f_1));
15475 ASSERT_FALSE (integer_truep (wr_f_1));
15476 ASSERT_FALSE (integer_truep (f_m1));
15477 ASSERT_FALSE (integer_truep (wr_f_m1));
15478 ASSERT_FALSE (integer_truep (c_i_0));
15479 ASSERT_TRUE (integer_truep (c_i_1));
15480 ASSERT_FALSE (integer_truep (c_i_m1));
15481 ASSERT_FALSE (integer_truep (c_f_0));
15482 ASSERT_FALSE (integer_truep (c_f_1));
15483 ASSERT_FALSE (integer_truep (c_f_m1));
15485 /* Test integer_nonzerop. */
15486 ASSERT_FALSE (integer_nonzerop (i_0));
15487 ASSERT_FALSE (integer_nonzerop (wr_i_0));
15488 ASSERT_TRUE (integer_nonzerop (i_1));
15489 ASSERT_TRUE (integer_nonzerop (wr_i_1));
15490 ASSERT_TRUE (integer_nonzerop (i_m1));
15491 ASSERT_TRUE (integer_nonzerop (wr_i_m1));
15492 ASSERT_FALSE (integer_nonzerop (f_0));
15493 ASSERT_FALSE (integer_nonzerop (wr_f_0));
15494 ASSERT_FALSE (integer_nonzerop (f_1));
15495 ASSERT_FALSE (integer_nonzerop (wr_f_1));
15496 ASSERT_FALSE (integer_nonzerop (f_m1));
15497 ASSERT_FALSE (integer_nonzerop (wr_f_m1));
15498 ASSERT_FALSE (integer_nonzerop (c_i_0));
15499 ASSERT_TRUE (integer_nonzerop (c_i_1));
15500 ASSERT_TRUE (integer_nonzerop (c_i_m1));
15501 ASSERT_FALSE (integer_nonzerop (c_f_0));
15502 ASSERT_FALSE (integer_nonzerop (c_f_1));
15503 ASSERT_FALSE (integer_nonzerop (c_f_m1));
15505 /* Test real_zerop. */
15506 ASSERT_FALSE (real_zerop (i_0));
15507 ASSERT_FALSE (real_zerop (wr_i_0));
15508 ASSERT_FALSE (real_zerop (i_1));
15509 ASSERT_FALSE (real_zerop (wr_i_1));
15510 ASSERT_FALSE (real_zerop (i_m1));
15511 ASSERT_FALSE (real_zerop (wr_i_m1));
15512 ASSERT_TRUE (real_zerop (f_0));
15513 ASSERT_TRUE (real_zerop (wr_f_0));
15514 ASSERT_FALSE (real_zerop (f_1));
15515 ASSERT_FALSE (real_zerop (wr_f_1));
15516 ASSERT_FALSE (real_zerop (f_m1));
15517 ASSERT_FALSE (real_zerop (wr_f_m1));
15518 ASSERT_FALSE (real_zerop (c_i_0));
15519 ASSERT_FALSE (real_zerop (c_i_1));
15520 ASSERT_FALSE (real_zerop (c_i_m1));
15521 ASSERT_TRUE (real_zerop (c_f_0));
15522 ASSERT_FALSE (real_zerop (c_f_1));
15523 ASSERT_FALSE (real_zerop (c_f_m1));
15525 /* Test real_onep. */
15526 ASSERT_FALSE (real_onep (i_0));
15527 ASSERT_FALSE (real_onep (wr_i_0));
15528 ASSERT_FALSE (real_onep (i_1));
15529 ASSERT_FALSE (real_onep (wr_i_1));
15530 ASSERT_FALSE (real_onep (i_m1));
15531 ASSERT_FALSE (real_onep (wr_i_m1));
15532 ASSERT_FALSE (real_onep (f_0));
15533 ASSERT_FALSE (real_onep (wr_f_0));
15534 ASSERT_TRUE (real_onep (f_1));
15535 ASSERT_TRUE (real_onep (wr_f_1));
15536 ASSERT_FALSE (real_onep (f_m1));
15537 ASSERT_FALSE (real_onep (wr_f_m1));
15538 ASSERT_FALSE (real_onep (c_i_0));
15539 ASSERT_FALSE (real_onep (c_i_1));
15540 ASSERT_FALSE (real_onep (c_i_m1));
15541 ASSERT_FALSE (real_onep (c_f_0));
15542 ASSERT_TRUE (real_onep (c_f_1));
15543 ASSERT_FALSE (real_onep (c_f_m1));
15545 /* Test real_minus_onep. */
15546 ASSERT_FALSE (real_minus_onep (i_0));
15547 ASSERT_FALSE (real_minus_onep (wr_i_0));
15548 ASSERT_FALSE (real_minus_onep (i_1));
15549 ASSERT_FALSE (real_minus_onep (wr_i_1));
15550 ASSERT_FALSE (real_minus_onep (i_m1));
15551 ASSERT_FALSE (real_minus_onep (wr_i_m1));
15552 ASSERT_FALSE (real_minus_onep (f_0));
15553 ASSERT_FALSE (real_minus_onep (wr_f_0));
15554 ASSERT_FALSE (real_minus_onep (f_1));
15555 ASSERT_FALSE (real_minus_onep (wr_f_1));
15556 ASSERT_TRUE (real_minus_onep (f_m1));
15557 ASSERT_TRUE (real_minus_onep (wr_f_m1));
15558 ASSERT_FALSE (real_minus_onep (c_i_0));
15559 ASSERT_FALSE (real_minus_onep (c_i_1));
15560 ASSERT_FALSE (real_minus_onep (c_i_m1));
15561 ASSERT_FALSE (real_minus_onep (c_f_0));
15562 ASSERT_FALSE (real_minus_onep (c_f_1));
15563 ASSERT_TRUE (real_minus_onep (c_f_m1));
15565 /* Test zerop. */
15566 ASSERT_TRUE (zerop (i_0));
15567 ASSERT_TRUE (zerop (wr_i_0));
15568 ASSERT_FALSE (zerop (i_1));
15569 ASSERT_FALSE (zerop (wr_i_1));
15570 ASSERT_FALSE (zerop (i_m1));
15571 ASSERT_FALSE (zerop (wr_i_m1));
15572 ASSERT_TRUE (zerop (f_0));
15573 ASSERT_TRUE (zerop (wr_f_0));
15574 ASSERT_FALSE (zerop (f_1));
15575 ASSERT_FALSE (zerop (wr_f_1));
15576 ASSERT_FALSE (zerop (f_m1));
15577 ASSERT_FALSE (zerop (wr_f_m1));
15578 ASSERT_TRUE (zerop (c_i_0));
15579 ASSERT_FALSE (zerop (c_i_1));
15580 ASSERT_FALSE (zerop (c_i_m1));
15581 ASSERT_TRUE (zerop (c_f_0));
15582 ASSERT_FALSE (zerop (c_f_1));
15583 ASSERT_FALSE (zerop (c_f_m1));
15585 /* Test tree_expr_nonnegative_p. */
15586 ASSERT_TRUE (tree_expr_nonnegative_p (i_0));
15587 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0));
15588 ASSERT_TRUE (tree_expr_nonnegative_p (i_1));
15589 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1));
15590 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1));
15591 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1));
15592 ASSERT_TRUE (tree_expr_nonnegative_p (f_0));
15593 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0));
15594 ASSERT_TRUE (tree_expr_nonnegative_p (f_1));
15595 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1));
15596 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1));
15597 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1));
15598 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0));
15599 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1));
15600 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1));
15601 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0));
15602 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1));
15603 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1));
15605 /* Test tree_expr_nonzero_p. */
15606 ASSERT_FALSE (tree_expr_nonzero_p (i_0));
15607 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0));
15608 ASSERT_TRUE (tree_expr_nonzero_p (i_1));
15609 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1));
15610 ASSERT_TRUE (tree_expr_nonzero_p (i_m1));
15611 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1));
15613 /* Test integer_valued_real_p. */
15614 ASSERT_FALSE (integer_valued_real_p (i_0));
15615 ASSERT_TRUE (integer_valued_real_p (f_0));
15616 ASSERT_TRUE (integer_valued_real_p (wr_f_0));
15617 ASSERT_TRUE (integer_valued_real_p (f_1));
15618 ASSERT_TRUE (integer_valued_real_p (wr_f_1));
15620 /* Test integer_pow2p. */
15621 ASSERT_FALSE (integer_pow2p (i_0));
15622 ASSERT_TRUE (integer_pow2p (i_1));
15623 ASSERT_TRUE (integer_pow2p (wr_i_1));
15625 /* Test uniform_integer_cst_p. */
15626 ASSERT_TRUE (uniform_integer_cst_p (i_0));
15627 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0));
15628 ASSERT_TRUE (uniform_integer_cst_p (i_1));
15629 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1));
15630 ASSERT_TRUE (uniform_integer_cst_p (i_m1));
15631 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1));
15632 ASSERT_FALSE (uniform_integer_cst_p (f_0));
15633 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0));
15634 ASSERT_FALSE (uniform_integer_cst_p (f_1));
15635 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1));
15636 ASSERT_FALSE (uniform_integer_cst_p (f_m1));
15637 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1));
15638 ASSERT_FALSE (uniform_integer_cst_p (c_i_0));
15639 ASSERT_FALSE (uniform_integer_cst_p (c_i_1));
15640 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1));
15641 ASSERT_FALSE (uniform_integer_cst_p (c_f_0));
15642 ASSERT_FALSE (uniform_integer_cst_p (c_f_1));
15643 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1));
15646 /* Check that string escaping works correctly. */
15648 static void
15649 test_escaped_strings (void)
15651 int saved_cutoff;
15652 escaped_string msg;
15654 msg.escape (NULL);
15655 /* ASSERT_STREQ does not accept NULL as a valid test
15656 result, so we have to use ASSERT_EQ instead. */
15657 ASSERT_EQ (NULL, (const char *) msg);
15659 msg.escape ("");
15660 ASSERT_STREQ ("", (const char *) msg);
15662 msg.escape ("foobar");
15663 ASSERT_STREQ ("foobar", (const char *) msg);
15665 /* Ensure that we have -fmessage-length set to 0. */
15666 saved_cutoff = pp_line_cutoff (global_dc->printer);
15667 pp_line_cutoff (global_dc->printer) = 0;
15669 msg.escape ("foo\nbar");
15670 ASSERT_STREQ ("foo\\nbar", (const char *) msg);
15672 msg.escape ("\a\b\f\n\r\t\v");
15673 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg);
15675 /* Now repeat the tests with -fmessage-length set to 5. */
15676 pp_line_cutoff (global_dc->printer) = 5;
15678 /* Note that the newline is not translated into an escape. */
15679 msg.escape ("foo\nbar");
15680 ASSERT_STREQ ("foo\nbar", (const char *) msg);
15682 msg.escape ("\a\b\f\n\r\t\v");
15683 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg);
15685 /* Restore the original message length setting. */
15686 pp_line_cutoff (global_dc->printer) = saved_cutoff;
15689 /* Run all of the selftests within this file. */
15691 void
15692 tree_cc_tests ()
15694 test_integer_constants ();
15695 test_identifiers ();
15696 test_labels ();
15697 test_vector_cst_patterns ();
15698 test_location_wrappers ();
15699 test_predicates ();
15700 test_escaped_strings ();
15703 } // namespace selftest
15705 #endif /* CHECKING_P */
15707 #include "gt-tree.h"