1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2022 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
32 #include "coretypes.h"
37 #include "tree-pass.h"
40 #include "diagnostic.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
47 #include "toplev.h" /* get_random_seed */
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
57 #include "langhooks-def.h"
58 #include "tree-diagnostic.h"
61 #include "print-tree.h"
62 #include "ipa-utils.h"
64 #include "stringpool.h"
68 #include "tree-vector-builder.h"
69 #include "gimple-fold.h"
70 #include "escaped_string.h"
71 #include "gimple-range.h"
72 #include "gomp-constants.h"
77 /* Tree code classes. */
79 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
80 #define END_OF_BASE_TREE_CODES tcc_exceptional,
82 const enum tree_code_class tree_code_type
[] = {
83 #include "all-tree.def"
87 #undef END_OF_BASE_TREE_CODES
89 /* Table indexed by tree code giving number of expression
90 operands beyond the fixed part of the node structure.
91 Not used for types or decls. */
93 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
94 #define END_OF_BASE_TREE_CODES 0,
96 const unsigned char tree_code_length
[] = {
97 #include "all-tree.def"
101 #undef END_OF_BASE_TREE_CODES
103 /* Names of tree components.
104 Used for printing out the tree and error messages. */
105 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
106 #define END_OF_BASE_TREE_CODES "@dummy",
108 static const char *const tree_code_name
[] = {
109 #include "all-tree.def"
113 #undef END_OF_BASE_TREE_CODES
115 /* Each tree code class has an associated string representation.
116 These must correspond to the tree_code_class entries. */
118 const char *const tree_code_class_strings
[] =
133 /* obstack.[ch] explicitly declined to prototype this. */
134 extern int _obstack_allocated_p (struct obstack
*h
, void *obj
);
136 /* Statistics-gathering stuff. */
138 static uint64_t tree_code_counts
[MAX_TREE_CODES
];
139 uint64_t tree_node_counts
[(int) all_kinds
];
140 uint64_t tree_node_sizes
[(int) all_kinds
];
142 /* Keep in sync with tree.h:enum tree_node_kind. */
143 static const char * const tree_node_kind_names
[] = {
162 /* Unique id for next decl created. */
163 static GTY(()) int next_decl_uid
;
164 /* Unique id for next type created. */
165 static GTY(()) unsigned next_type_uid
= 1;
166 /* Unique id for next debug decl created. Use negative numbers,
167 to catch erroneous uses. */
168 static GTY(()) int next_debug_decl_uid
;
170 /* Since we cannot rehash a type after it is in the table, we have to
171 keep the hash code. */
173 struct GTY((for_user
)) type_hash
{
178 /* Initial size of the hash table (rounded to next prime). */
179 #define TYPE_HASH_INITIAL_SIZE 1000
181 struct type_cache_hasher
: ggc_cache_ptr_hash
<type_hash
>
183 static hashval_t
hash (type_hash
*t
) { return t
->hash
; }
184 static bool equal (type_hash
*a
, type_hash
*b
);
187 keep_cache_entry (type_hash
*&t
)
189 return ggc_marked_p (t
->type
);
193 /* Now here is the hash table. When recording a type, it is added to
194 the slot whose index is the hash code. Note that the hash table is
195 used for several kinds of types (function types, array types and
196 array index range types, for now). While all these live in the
197 same table, they are completely independent, and the hash code is
198 computed differently for each of these. */
200 static GTY ((cache
)) hash_table
<type_cache_hasher
> *type_hash_table
;
202 /* Hash table and temporary node for larger integer const values. */
203 static GTY (()) tree int_cst_node
;
205 struct int_cst_hasher
: ggc_cache_ptr_hash
<tree_node
>
207 static hashval_t
hash (tree t
);
208 static bool equal (tree x
, tree y
);
211 static GTY ((cache
)) hash_table
<int_cst_hasher
> *int_cst_hash_table
;
213 /* Class and variable for making sure that there is a single POLY_INT_CST
214 for a given value. */
215 struct poly_int_cst_hasher
: ggc_cache_ptr_hash
<tree_node
>
217 typedef std::pair
<tree
, const poly_wide_int
*> compare_type
;
218 static hashval_t
hash (tree t
);
219 static bool equal (tree x
, const compare_type
&y
);
222 static GTY ((cache
)) hash_table
<poly_int_cst_hasher
> *poly_int_cst_hash_table
;
224 /* Hash table for optimization flags and target option flags. Use the same
225 hash table for both sets of options. Nodes for building the current
226 optimization and target option nodes. The assumption is most of the time
227 the options created will already be in the hash table, so we avoid
228 allocating and freeing up a node repeatably. */
229 static GTY (()) tree cl_optimization_node
;
230 static GTY (()) tree cl_target_option_node
;
232 struct cl_option_hasher
: ggc_cache_ptr_hash
<tree_node
>
234 static hashval_t
hash (tree t
);
235 static bool equal (tree x
, tree y
);
238 static GTY ((cache
)) hash_table
<cl_option_hasher
> *cl_option_hash_table
;
240 /* General tree->tree mapping structure for use in hash tables. */
244 hash_table
<tree_decl_map_cache_hasher
> *debug_expr_for_decl
;
247 hash_table
<tree_decl_map_cache_hasher
> *value_expr_for_decl
;
250 hash_table
<tree_vec_map_cache_hasher
> *debug_args_for_decl
;
252 static void set_type_quals (tree
, int);
253 static void print_type_hash_statistics (void);
254 static void print_debug_expr_statistics (void);
255 static void print_value_expr_statistics (void);
257 tree global_trees
[TI_MAX
];
258 tree integer_types
[itk_none
];
260 bool int_n_enabled_p
[NUM_INT_N_ENTS
];
261 struct int_n_trees_t int_n_trees
[NUM_INT_N_ENTS
];
263 bool tree_contains_struct
[MAX_TREE_CODES
][64];
265 /* Number of operands for each OMP clause. */
266 unsigned const char omp_clause_num_ops
[] =
268 0, /* OMP_CLAUSE_ERROR */
269 1, /* OMP_CLAUSE_PRIVATE */
270 1, /* OMP_CLAUSE_SHARED */
271 1, /* OMP_CLAUSE_FIRSTPRIVATE */
272 2, /* OMP_CLAUSE_LASTPRIVATE */
273 5, /* OMP_CLAUSE_REDUCTION */
274 5, /* OMP_CLAUSE_TASK_REDUCTION */
275 5, /* OMP_CLAUSE_IN_REDUCTION */
276 1, /* OMP_CLAUSE_COPYIN */
277 1, /* OMP_CLAUSE_COPYPRIVATE */
278 3, /* OMP_CLAUSE_LINEAR */
279 1, /* OMP_CLAUSE_AFFINITY */
280 2, /* OMP_CLAUSE_ALIGNED */
281 3, /* OMP_CLAUSE_ALLOCATE */
282 1, /* OMP_CLAUSE_DEPEND */
283 1, /* OMP_CLAUSE_NONTEMPORAL */
284 1, /* OMP_CLAUSE_UNIFORM */
285 1, /* OMP_CLAUSE_ENTER */
286 1, /* OMP_CLAUSE_LINK */
287 1, /* OMP_CLAUSE_DETACH */
288 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
289 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
290 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
291 1, /* OMP_CLAUSE_INCLUSIVE */
292 1, /* OMP_CLAUSE_EXCLUSIVE */
293 2, /* OMP_CLAUSE_FROM */
294 2, /* OMP_CLAUSE_TO */
295 2, /* OMP_CLAUSE_MAP */
296 1, /* OMP_CLAUSE_HAS_DEVICE_ADDR */
297 1, /* OMP_CLAUSE_DOACROSS */
298 2, /* OMP_CLAUSE__CACHE_ */
299 2, /* OMP_CLAUSE_GANG */
300 1, /* OMP_CLAUSE_ASYNC */
301 1, /* OMP_CLAUSE_WAIT */
302 0, /* OMP_CLAUSE_AUTO */
303 0, /* OMP_CLAUSE_SEQ */
304 1, /* OMP_CLAUSE__LOOPTEMP_ */
305 1, /* OMP_CLAUSE__REDUCTEMP_ */
306 1, /* OMP_CLAUSE__CONDTEMP_ */
307 1, /* OMP_CLAUSE__SCANTEMP_ */
308 1, /* OMP_CLAUSE_IF */
309 1, /* OMP_CLAUSE_NUM_THREADS */
310 1, /* OMP_CLAUSE_SCHEDULE */
311 0, /* OMP_CLAUSE_NOWAIT */
312 1, /* OMP_CLAUSE_ORDERED */
313 0, /* OMP_CLAUSE_DEFAULT */
314 3, /* OMP_CLAUSE_COLLAPSE */
315 0, /* OMP_CLAUSE_UNTIED */
316 1, /* OMP_CLAUSE_FINAL */
317 0, /* OMP_CLAUSE_MERGEABLE */
318 1, /* OMP_CLAUSE_DEVICE */
319 1, /* OMP_CLAUSE_DIST_SCHEDULE */
320 0, /* OMP_CLAUSE_INBRANCH */
321 0, /* OMP_CLAUSE_NOTINBRANCH */
322 2, /* OMP_CLAUSE_NUM_TEAMS */
323 1, /* OMP_CLAUSE_THREAD_LIMIT */
324 0, /* OMP_CLAUSE_PROC_BIND */
325 1, /* OMP_CLAUSE_SAFELEN */
326 1, /* OMP_CLAUSE_SIMDLEN */
327 0, /* OMP_CLAUSE_DEVICE_TYPE */
328 0, /* OMP_CLAUSE_FOR */
329 0, /* OMP_CLAUSE_PARALLEL */
330 0, /* OMP_CLAUSE_SECTIONS */
331 0, /* OMP_CLAUSE_TASKGROUP */
332 1, /* OMP_CLAUSE_PRIORITY */
333 1, /* OMP_CLAUSE_GRAINSIZE */
334 1, /* OMP_CLAUSE_NUM_TASKS */
335 0, /* OMP_CLAUSE_NOGROUP */
336 0, /* OMP_CLAUSE_THREADS */
337 0, /* OMP_CLAUSE_SIMD */
338 1, /* OMP_CLAUSE_HINT */
339 0, /* OMP_CLAUSE_DEFAULTMAP */
340 0, /* OMP_CLAUSE_ORDER */
341 0, /* OMP_CLAUSE_BIND */
342 1, /* OMP_CLAUSE_FILTER */
343 1, /* OMP_CLAUSE__SIMDUID_ */
344 0, /* OMP_CLAUSE__SIMT_ */
345 0, /* OMP_CLAUSE_INDEPENDENT */
346 1, /* OMP_CLAUSE_WORKER */
347 1, /* OMP_CLAUSE_VECTOR */
348 1, /* OMP_CLAUSE_NUM_GANGS */
349 1, /* OMP_CLAUSE_NUM_WORKERS */
350 1, /* OMP_CLAUSE_VECTOR_LENGTH */
351 3, /* OMP_CLAUSE_TILE */
352 0, /* OMP_CLAUSE_IF_PRESENT */
353 0, /* OMP_CLAUSE_FINALIZE */
354 0, /* OMP_CLAUSE_NOHOST */
357 const char * const omp_clause_code_name
[] =
448 /* Unless specific to OpenACC, we tend to internally maintain OpenMP-centric
449 clause names, but for use in diagnostics etc. would like to use the "user"
453 user_omp_clause_code_name (tree clause
, bool oacc
)
455 /* For OpenACC, the 'OMP_CLAUSE_MAP_KIND' of an 'OMP_CLAUSE_MAP' is used to
456 distinguish clauses as seen by the user. See also where front ends do
457 'build_omp_clause' with 'OMP_CLAUSE_MAP'. */
458 if (oacc
&& OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_MAP
)
459 switch (OMP_CLAUSE_MAP_KIND (clause
))
461 case GOMP_MAP_FORCE_ALLOC
:
462 case GOMP_MAP_ALLOC
: return "create";
463 case GOMP_MAP_FORCE_TO
:
464 case GOMP_MAP_TO
: return "copyin";
465 case GOMP_MAP_FORCE_FROM
:
466 case GOMP_MAP_FROM
: return "copyout";
467 case GOMP_MAP_FORCE_TOFROM
:
468 case GOMP_MAP_TOFROM
: return "copy";
469 case GOMP_MAP_RELEASE
: return "delete";
470 case GOMP_MAP_FORCE_PRESENT
: return "present";
471 case GOMP_MAP_ATTACH
: return "attach";
472 case GOMP_MAP_FORCE_DETACH
:
473 case GOMP_MAP_DETACH
: return "detach";
474 case GOMP_MAP_DEVICE_RESIDENT
: return "device_resident";
475 case GOMP_MAP_LINK
: return "link";
476 case GOMP_MAP_FORCE_DEVICEPTR
: return "deviceptr";
480 return omp_clause_code_name
[OMP_CLAUSE_CODE (clause
)];
484 /* Return the tree node structure used by tree code CODE. */
486 static inline enum tree_node_structure_enum
487 tree_node_structure_for_code (enum tree_code code
)
489 switch (TREE_CODE_CLASS (code
))
491 case tcc_declaration
:
494 case CONST_DECL
: return TS_CONST_DECL
;
495 case DEBUG_EXPR_DECL
: return TS_DECL_WRTL
;
496 case FIELD_DECL
: return TS_FIELD_DECL
;
497 case FUNCTION_DECL
: return TS_FUNCTION_DECL
;
498 case LABEL_DECL
: return TS_LABEL_DECL
;
499 case PARM_DECL
: return TS_PARM_DECL
;
500 case RESULT_DECL
: return TS_RESULT_DECL
;
501 case TRANSLATION_UNIT_DECL
: return TS_TRANSLATION_UNIT_DECL
;
502 case TYPE_DECL
: return TS_TYPE_DECL
;
503 case VAR_DECL
: return TS_VAR_DECL
;
504 default: return TS_DECL_NON_COMMON
;
507 case tcc_type
: return TS_TYPE_NON_COMMON
;
515 case tcc_vl_exp
: return TS_EXP
;
517 default: /* tcc_constant and tcc_exceptional */
523 /* tcc_constant cases. */
524 case COMPLEX_CST
: return TS_COMPLEX
;
525 case FIXED_CST
: return TS_FIXED_CST
;
526 case INTEGER_CST
: return TS_INT_CST
;
527 case POLY_INT_CST
: return TS_POLY_INT_CST
;
528 case REAL_CST
: return TS_REAL_CST
;
529 case STRING_CST
: return TS_STRING
;
530 case VECTOR_CST
: return TS_VECTOR
;
531 case VOID_CST
: return TS_TYPED
;
533 /* tcc_exceptional cases. */
534 case BLOCK
: return TS_BLOCK
;
535 case CONSTRUCTOR
: return TS_CONSTRUCTOR
;
536 case ERROR_MARK
: return TS_COMMON
;
537 case IDENTIFIER_NODE
: return TS_IDENTIFIER
;
538 case OMP_CLAUSE
: return TS_OMP_CLAUSE
;
539 case OPTIMIZATION_NODE
: return TS_OPTIMIZATION
;
540 case PLACEHOLDER_EXPR
: return TS_COMMON
;
541 case SSA_NAME
: return TS_SSA_NAME
;
542 case STATEMENT_LIST
: return TS_STATEMENT_LIST
;
543 case TARGET_OPTION_NODE
: return TS_TARGET_OPTION
;
544 case TREE_BINFO
: return TS_BINFO
;
545 case TREE_LIST
: return TS_LIST
;
546 case TREE_VEC
: return TS_VEC
;
554 /* Initialize tree_contains_struct to describe the hierarchy of tree
558 initialize_tree_contains_struct (void)
562 for (i
= ERROR_MARK
; i
< LAST_AND_UNUSED_TREE_CODE
; i
++)
565 enum tree_node_structure_enum ts_code
;
567 code
= (enum tree_code
) i
;
568 ts_code
= tree_node_structure_for_code (code
);
570 /* Mark the TS structure itself. */
571 tree_contains_struct
[code
][ts_code
] = 1;
573 /* Mark all the structures that TS is derived from. */
578 case TS_OPTIMIZATION
:
579 case TS_TARGET_OPTION
:
585 case TS_POLY_INT_CST
:
594 case TS_STATEMENT_LIST
:
595 MARK_TS_TYPED (code
);
599 case TS_DECL_MINIMAL
:
605 MARK_TS_COMMON (code
);
608 case TS_TYPE_WITH_LANG_SPECIFIC
:
609 MARK_TS_TYPE_COMMON (code
);
612 case TS_TYPE_NON_COMMON
:
613 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code
);
617 MARK_TS_DECL_MINIMAL (code
);
622 MARK_TS_DECL_COMMON (code
);
625 case TS_DECL_NON_COMMON
:
626 MARK_TS_DECL_WITH_VIS (code
);
629 case TS_DECL_WITH_VIS
:
633 MARK_TS_DECL_WRTL (code
);
637 MARK_TS_DECL_COMMON (code
);
641 MARK_TS_DECL_WITH_VIS (code
);
645 case TS_FUNCTION_DECL
:
646 MARK_TS_DECL_NON_COMMON (code
);
649 case TS_TRANSLATION_UNIT_DECL
:
650 MARK_TS_DECL_COMMON (code
);
658 /* Basic consistency checks for attributes used in fold. */
659 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_NON_COMMON
]);
660 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_NON_COMMON
]);
661 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_DECL_COMMON
]);
662 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_COMMON
]);
663 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_COMMON
]);
664 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_COMMON
]);
665 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_COMMON
]);
666 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_COMMON
]);
667 gcc_assert (tree_contains_struct
[TRANSLATION_UNIT_DECL
][TS_DECL_COMMON
]);
668 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_COMMON
]);
669 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_DECL_COMMON
]);
670 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_WRTL
]);
671 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_WRTL
]);
672 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_WRTL
]);
673 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_WRTL
]);
674 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_WRTL
]);
675 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_DECL_MINIMAL
]);
676 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_MINIMAL
]);
677 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_MINIMAL
]);
678 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_MINIMAL
]);
679 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_MINIMAL
]);
680 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_MINIMAL
]);
681 gcc_assert (tree_contains_struct
[TRANSLATION_UNIT_DECL
][TS_DECL_MINIMAL
]);
682 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_MINIMAL
]);
683 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_DECL_MINIMAL
]);
684 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_WITH_VIS
]);
685 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_WITH_VIS
]);
686 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_WITH_VIS
]);
687 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_VAR_DECL
]);
688 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_FIELD_DECL
]);
689 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_PARM_DECL
]);
690 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_LABEL_DECL
]);
691 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_RESULT_DECL
]);
692 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_CONST_DECL
]);
693 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_TYPE_DECL
]);
694 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_FUNCTION_DECL
]);
695 gcc_assert (tree_contains_struct
[IMPORTED_DECL
][TS_DECL_MINIMAL
]);
696 gcc_assert (tree_contains_struct
[IMPORTED_DECL
][TS_DECL_COMMON
]);
697 gcc_assert (tree_contains_struct
[NAMELIST_DECL
][TS_DECL_MINIMAL
]);
698 gcc_assert (tree_contains_struct
[NAMELIST_DECL
][TS_DECL_COMMON
]);
707 /* Initialize the hash table of types. */
709 = hash_table
<type_cache_hasher
>::create_ggc (TYPE_HASH_INITIAL_SIZE
);
712 = hash_table
<tree_decl_map_cache_hasher
>::create_ggc (512);
715 = hash_table
<tree_decl_map_cache_hasher
>::create_ggc (512);
717 int_cst_hash_table
= hash_table
<int_cst_hasher
>::create_ggc (1024);
719 poly_int_cst_hash_table
= hash_table
<poly_int_cst_hasher
>::create_ggc (64);
721 int_cst_node
= make_int_cst (1, 1);
723 cl_option_hash_table
= hash_table
<cl_option_hasher
>::create_ggc (64);
725 cl_optimization_node
= make_node (OPTIMIZATION_NODE
);
726 cl_target_option_node
= make_node (TARGET_OPTION_NODE
);
728 /* Initialize the tree_contains_struct array. */
729 initialize_tree_contains_struct ();
730 lang_hooks
.init_ts ();
734 /* The name of the object as the assembler will see it (but before any
735 translations made by ASM_OUTPUT_LABELREF). Often this is the same
736 as DECL_NAME. It is an IDENTIFIER_NODE. */
738 decl_assembler_name (tree decl
)
740 if (!DECL_ASSEMBLER_NAME_SET_P (decl
))
741 lang_hooks
.set_decl_assembler_name (decl
);
742 return DECL_ASSEMBLER_NAME_RAW (decl
);
745 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
746 (either of which may be NULL). Inform the FE, if this changes the
750 overwrite_decl_assembler_name (tree decl
, tree name
)
752 if (DECL_ASSEMBLER_NAME_RAW (decl
) != name
)
753 lang_hooks
.overwrite_decl_assembler_name (decl
, name
);
756 /* Return true if DECL may need an assembler name to be set. */
759 need_assembler_name_p (tree decl
)
761 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
762 Rule merging. This makes type_odr_p to return true on those types during
763 LTO and by comparing the mangled name, we can say what types are intended
764 to be equivalent across compilation unit.
766 We do not store names of type_in_anonymous_namespace_p.
768 Record, union and enumeration type have linkage that allows use
769 to check type_in_anonymous_namespace_p. We do not mangle compound types
770 that always can be compared structurally.
772 Similarly for builtin types, we compare properties of their main variant.
773 A special case are integer types where mangling do make differences
774 between char/signed char/unsigned char etc. Storing name for these makes
775 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
776 See cp/mangle.cc:write_builtin_type for details. */
778 if (TREE_CODE (decl
) == TYPE_DECL
)
781 && decl
== TYPE_NAME (TREE_TYPE (decl
))
782 && TYPE_MAIN_VARIANT (TREE_TYPE (decl
)) == TREE_TYPE (decl
)
783 && !TYPE_ARTIFICIAL (TREE_TYPE (decl
))
784 && ((TREE_CODE (TREE_TYPE (decl
)) != RECORD_TYPE
785 && TREE_CODE (TREE_TYPE (decl
)) != UNION_TYPE
)
786 || TYPE_CXX_ODR_P (TREE_TYPE (decl
)))
787 && (type_with_linkage_p (TREE_TYPE (decl
))
788 || TREE_CODE (TREE_TYPE (decl
)) == INTEGER_TYPE
)
789 && !variably_modified_type_p (TREE_TYPE (decl
), NULL_TREE
))
790 return !DECL_ASSEMBLER_NAME_SET_P (decl
);
793 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
794 if (!VAR_OR_FUNCTION_DECL_P (decl
))
797 /* If DECL already has its assembler name set, it does not need a
799 if (!HAS_DECL_ASSEMBLER_NAME_P (decl
)
800 || DECL_ASSEMBLER_NAME_SET_P (decl
))
803 /* Abstract decls do not need an assembler name. */
804 if (DECL_ABSTRACT_P (decl
))
807 /* For VAR_DECLs, only static, public and external symbols need an
810 && !TREE_STATIC (decl
)
811 && !TREE_PUBLIC (decl
)
812 && !DECL_EXTERNAL (decl
))
815 if (TREE_CODE (decl
) == FUNCTION_DECL
)
817 /* Do not set assembler name on builtins. Allow RTL expansion to
818 decide whether to expand inline or via a regular call. */
819 if (fndecl_built_in_p (decl
)
820 && DECL_BUILT_IN_CLASS (decl
) != BUILT_IN_FRONTEND
)
823 /* Functions represented in the callgraph need an assembler name. */
824 if (cgraph_node::get (decl
) != NULL
)
827 /* Unused and not public functions don't need an assembler name. */
828 if (!TREE_USED (decl
) && !TREE_PUBLIC (decl
))
835 /* If T needs an assembler name, have one created for it. */
838 assign_assembler_name_if_needed (tree t
)
840 if (need_assembler_name_p (t
))
842 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
843 diagnostics that use input_location to show locus
844 information. The problem here is that, at this point,
845 input_location is generally anchored to the end of the file
846 (since the parser is long gone), so we don't have a good
847 position to pin it to.
849 To alleviate this problem, this uses the location of T's
850 declaration. Examples of this are
851 testsuite/g++.dg/template/cond2.C and
852 testsuite/g++.dg/template/pr35240.C. */
853 location_t saved_location
= input_location
;
854 input_location
= DECL_SOURCE_LOCATION (t
);
856 decl_assembler_name (t
);
858 input_location
= saved_location
;
862 /* When the target supports COMDAT groups, this indicates which group the
863 DECL is associated with. This can be either an IDENTIFIER_NODE or a
864 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
866 decl_comdat_group (const_tree node
)
868 struct symtab_node
*snode
= symtab_node::get (node
);
871 return snode
->get_comdat_group ();
874 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
876 decl_comdat_group_id (const_tree node
)
878 struct symtab_node
*snode
= symtab_node::get (node
);
881 return snode
->get_comdat_group_id ();
884 /* When the target supports named section, return its name as IDENTIFIER_NODE
885 or NULL if it is in no section. */
887 decl_section_name (const_tree node
)
889 struct symtab_node
*snode
= symtab_node::get (node
);
892 return snode
->get_section ();
895 /* Set section name of NODE to VALUE (that is expected to be
898 set_decl_section_name (tree node
, const char *value
)
900 struct symtab_node
*snode
;
904 snode
= symtab_node::get (node
);
908 else if (VAR_P (node
))
909 snode
= varpool_node::get_create (node
);
911 snode
= cgraph_node::get_create (node
);
912 snode
->set_section (value
);
915 /* Set section name of NODE to match the section name of OTHER.
917 set_decl_section_name (decl, other) is equivalent to
918 set_decl_section_name (decl, DECL_SECTION_NAME (other)), but possibly more
921 set_decl_section_name (tree decl
, const_tree other
)
923 struct symtab_node
*other_node
= symtab_node::get (other
);
926 struct symtab_node
*decl_node
;
928 decl_node
= varpool_node::get_create (decl
);
930 decl_node
= cgraph_node::get_create (decl
);
931 decl_node
->set_section (*other_node
);
935 struct symtab_node
*decl_node
= symtab_node::get (decl
);
938 decl_node
->set_section (NULL
);
942 /* Return TLS model of a variable NODE. */
944 decl_tls_model (const_tree node
)
946 struct varpool_node
*snode
= varpool_node::get (node
);
948 return TLS_MODEL_NONE
;
949 return snode
->tls_model
;
952 /* Set TLS model of variable NODE to MODEL. */
954 set_decl_tls_model (tree node
, enum tls_model model
)
956 struct varpool_node
*vnode
;
958 if (model
== TLS_MODEL_NONE
)
960 vnode
= varpool_node::get (node
);
965 vnode
= varpool_node::get_create (node
);
966 vnode
->tls_model
= model
;
969 /* Compute the number of bytes occupied by a tree with code CODE.
970 This function cannot be used for nodes that have variable sizes,
971 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
973 tree_code_size (enum tree_code code
)
975 switch (TREE_CODE_CLASS (code
))
977 case tcc_declaration
: /* A decl node */
980 case FIELD_DECL
: return sizeof (tree_field_decl
);
981 case PARM_DECL
: return sizeof (tree_parm_decl
);
982 case VAR_DECL
: return sizeof (tree_var_decl
);
983 case LABEL_DECL
: return sizeof (tree_label_decl
);
984 case RESULT_DECL
: return sizeof (tree_result_decl
);
985 case CONST_DECL
: return sizeof (tree_const_decl
);
986 case TYPE_DECL
: return sizeof (tree_type_decl
);
987 case FUNCTION_DECL
: return sizeof (tree_function_decl
);
988 case DEBUG_EXPR_DECL
: return sizeof (tree_decl_with_rtl
);
989 case TRANSLATION_UNIT_DECL
: return sizeof (tree_translation_unit_decl
);
992 case NAMELIST_DECL
: return sizeof (tree_decl_non_common
);
994 gcc_checking_assert (code
>= NUM_TREE_CODES
);
995 return lang_hooks
.tree_size (code
);
998 case tcc_type
: /* a type node */
1008 case REFERENCE_TYPE
:
1010 case FIXED_POINT_TYPE
:
1016 case QUAL_UNION_TYPE
:
1020 case LANG_TYPE
: return sizeof (tree_type_non_common
);
1022 gcc_checking_assert (code
>= NUM_TREE_CODES
);
1023 return lang_hooks
.tree_size (code
);
1026 case tcc_reference
: /* a reference */
1027 case tcc_expression
: /* an expression */
1028 case tcc_statement
: /* an expression with side effects */
1029 case tcc_comparison
: /* a comparison expression */
1030 case tcc_unary
: /* a unary arithmetic expression */
1031 case tcc_binary
: /* a binary arithmetic expression */
1032 return (sizeof (struct tree_exp
)
1033 + (TREE_CODE_LENGTH (code
) - 1) * sizeof (tree
));
1035 case tcc_constant
: /* a constant */
1038 case VOID_CST
: return sizeof (tree_typed
);
1039 case INTEGER_CST
: gcc_unreachable ();
1040 case POLY_INT_CST
: return sizeof (tree_poly_int_cst
);
1041 case REAL_CST
: return sizeof (tree_real_cst
);
1042 case FIXED_CST
: return sizeof (tree_fixed_cst
);
1043 case COMPLEX_CST
: return sizeof (tree_complex
);
1044 case VECTOR_CST
: gcc_unreachable ();
1045 case STRING_CST
: gcc_unreachable ();
1047 gcc_checking_assert (code
>= NUM_TREE_CODES
);
1048 return lang_hooks
.tree_size (code
);
1051 case tcc_exceptional
: /* something random, like an identifier. */
1054 case IDENTIFIER_NODE
: return lang_hooks
.identifier_size
;
1055 case TREE_LIST
: return sizeof (tree_list
);
1058 case PLACEHOLDER_EXPR
: return sizeof (tree_common
);
1060 case TREE_VEC
: gcc_unreachable ();
1061 case OMP_CLAUSE
: gcc_unreachable ();
1063 case SSA_NAME
: return sizeof (tree_ssa_name
);
1065 case STATEMENT_LIST
: return sizeof (tree_statement_list
);
1066 case BLOCK
: return sizeof (struct tree_block
);
1067 case CONSTRUCTOR
: return sizeof (tree_constructor
);
1068 case OPTIMIZATION_NODE
: return sizeof (tree_optimization_option
);
1069 case TARGET_OPTION_NODE
: return sizeof (tree_target_option
);
1072 gcc_checking_assert (code
>= NUM_TREE_CODES
);
1073 return lang_hooks
.tree_size (code
);
1081 /* Compute the number of bytes occupied by NODE. This routine only
1082 looks at TREE_CODE, except for those nodes that have variable sizes. */
1084 tree_size (const_tree node
)
1086 const enum tree_code code
= TREE_CODE (node
);
1090 return (sizeof (struct tree_int_cst
)
1091 + (TREE_INT_CST_EXT_NUNITS (node
) - 1) * sizeof (HOST_WIDE_INT
));
1094 return (offsetof (struct tree_binfo
, base_binfos
)
1096 ::embedded_size (BINFO_N_BASE_BINFOS (node
)));
1099 return (sizeof (struct tree_vec
)
1100 + (TREE_VEC_LENGTH (node
) - 1) * sizeof (tree
));
1103 return (sizeof (struct tree_vector
)
1104 + (vector_cst_encoded_nelts (node
) - 1) * sizeof (tree
));
1107 return TREE_STRING_LENGTH (node
) + offsetof (struct tree_string
, str
) + 1;
1110 return (sizeof (struct tree_omp_clause
)
1111 + (omp_clause_num_ops
[OMP_CLAUSE_CODE (node
)] - 1)
1115 if (TREE_CODE_CLASS (code
) == tcc_vl_exp
)
1116 return (sizeof (struct tree_exp
)
1117 + (VL_EXP_OPERAND_LENGTH (node
) - 1) * sizeof (tree
));
1119 return tree_code_size (code
);
1123 /* Return tree node kind based on tree CODE. */
1125 static tree_node_kind
1126 get_stats_node_kind (enum tree_code code
)
1128 enum tree_code_class type
= TREE_CODE_CLASS (code
);
1132 case tcc_declaration
: /* A decl node */
1134 case tcc_type
: /* a type node */
1136 case tcc_statement
: /* an expression with side effects */
1138 case tcc_reference
: /* a reference */
1140 case tcc_expression
: /* an expression */
1141 case tcc_comparison
: /* a comparison expression */
1142 case tcc_unary
: /* a unary arithmetic expression */
1143 case tcc_binary
: /* a binary arithmetic expression */
1145 case tcc_constant
: /* a constant */
1147 case tcc_exceptional
: /* something random, like an identifier. */
1150 case IDENTIFIER_NODE
:
1157 return ssa_name_kind
;
1163 return omp_clause_kind
;
1175 /* Record interesting allocation statistics for a tree node with CODE
1179 record_node_allocation_statistics (enum tree_code code
, size_t length
)
1181 if (!GATHER_STATISTICS
)
1184 tree_node_kind kind
= get_stats_node_kind (code
);
1186 tree_code_counts
[(int) code
]++;
1187 tree_node_counts
[(int) kind
]++;
1188 tree_node_sizes
[(int) kind
] += length
;
1191 /* Allocate and return a new UID from the DECL_UID namespace. */
1194 allocate_decl_uid (void)
1196 return next_decl_uid
++;
1199 /* Return a newly allocated node of code CODE. For decl and type
1200 nodes, some other fields are initialized. The rest of the node is
1201 initialized to zero. This function cannot be used for TREE_VEC,
1202 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1205 Achoo! I got a code in the node. */
1208 make_node (enum tree_code code MEM_STAT_DECL
)
1211 enum tree_code_class type
= TREE_CODE_CLASS (code
);
1212 size_t length
= tree_code_size (code
);
1214 record_node_allocation_statistics (code
, length
);
1216 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
1217 TREE_SET_CODE (t
, code
);
1222 if (code
!= DEBUG_BEGIN_STMT
)
1223 TREE_SIDE_EFFECTS (t
) = 1;
1226 case tcc_declaration
:
1227 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_COMMON
))
1229 if (code
== FUNCTION_DECL
)
1231 SET_DECL_ALIGN (t
, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY
));
1232 SET_DECL_MODE (t
, FUNCTION_MODE
);
1235 SET_DECL_ALIGN (t
, 1);
1237 DECL_SOURCE_LOCATION (t
) = input_location
;
1238 if (TREE_CODE (t
) == DEBUG_EXPR_DECL
)
1239 DECL_UID (t
) = --next_debug_decl_uid
;
1242 DECL_UID (t
) = allocate_decl_uid ();
1243 SET_DECL_PT_UID (t
, -1);
1245 if (TREE_CODE (t
) == LABEL_DECL
)
1246 LABEL_DECL_UID (t
) = -1;
1251 TYPE_UID (t
) = next_type_uid
++;
1252 SET_TYPE_ALIGN (t
, BITS_PER_UNIT
);
1253 TYPE_USER_ALIGN (t
) = 0;
1254 TYPE_MAIN_VARIANT (t
) = t
;
1255 TYPE_CANONICAL (t
) = t
;
1257 /* Default to no attributes for type, but let target change that. */
1258 TYPE_ATTRIBUTES (t
) = NULL_TREE
;
1259 targetm
.set_default_type_attributes (t
);
1261 /* We have not yet computed the alias set for this type. */
1262 TYPE_ALIAS_SET (t
) = -1;
1266 TREE_CONSTANT (t
) = 1;
1269 case tcc_expression
:
1275 case PREDECREMENT_EXPR
:
1276 case PREINCREMENT_EXPR
:
1277 case POSTDECREMENT_EXPR
:
1278 case POSTINCREMENT_EXPR
:
1279 /* All of these have side-effects, no matter what their
1281 TREE_SIDE_EFFECTS (t
) = 1;
1289 case tcc_exceptional
:
1292 case TARGET_OPTION_NODE
:
1293 TREE_TARGET_OPTION(t
)
1294 = ggc_cleared_alloc
<struct cl_target_option
> ();
1297 case OPTIMIZATION_NODE
:
1298 TREE_OPTIMIZATION (t
)
1299 = ggc_cleared_alloc
<struct cl_optimization
> ();
1308 /* Other classes need no special treatment. */
1315 /* Free tree node. */
1318 free_node (tree node
)
1320 enum tree_code code
= TREE_CODE (node
);
1321 if (GATHER_STATISTICS
)
1323 enum tree_node_kind kind
= get_stats_node_kind (code
);
1325 gcc_checking_assert (tree_code_counts
[(int) TREE_CODE (node
)] != 0);
1326 gcc_checking_assert (tree_node_counts
[(int) kind
] != 0);
1327 gcc_checking_assert (tree_node_sizes
[(int) kind
] >= tree_size (node
));
1329 tree_code_counts
[(int) TREE_CODE (node
)]--;
1330 tree_node_counts
[(int) kind
]--;
1331 tree_node_sizes
[(int) kind
] -= tree_size (node
);
1333 if (CODE_CONTAINS_STRUCT (code
, TS_CONSTRUCTOR
))
1334 vec_free (CONSTRUCTOR_ELTS (node
));
1335 else if (code
== BLOCK
)
1336 vec_free (BLOCK_NONLOCALIZED_VARS (node
));
1337 else if (code
== TREE_BINFO
)
1338 vec_free (BINFO_BASE_ACCESSES (node
));
1339 else if (code
== OPTIMIZATION_NODE
)
1340 cl_optimization_option_free (TREE_OPTIMIZATION (node
));
1341 else if (code
== TARGET_OPTION_NODE
)
1342 cl_target_option_free (TREE_TARGET_OPTION (node
));
1346 /* Return a new node with the same contents as NODE except that its
1347 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1350 copy_node (tree node MEM_STAT_DECL
)
1353 enum tree_code code
= TREE_CODE (node
);
1356 gcc_assert (code
!= STATEMENT_LIST
);
1358 length
= tree_size (node
);
1359 record_node_allocation_statistics (code
, length
);
1360 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
1361 memcpy (t
, node
, length
);
1363 if (CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
1365 TREE_ASM_WRITTEN (t
) = 0;
1366 TREE_VISITED (t
) = 0;
1368 if (TREE_CODE_CLASS (code
) == tcc_declaration
)
1370 if (code
== DEBUG_EXPR_DECL
)
1371 DECL_UID (t
) = --next_debug_decl_uid
;
1374 DECL_UID (t
) = allocate_decl_uid ();
1375 if (DECL_PT_UID_SET_P (node
))
1376 SET_DECL_PT_UID (t
, DECL_PT_UID (node
));
1378 if ((TREE_CODE (node
) == PARM_DECL
|| VAR_P (node
))
1379 && DECL_HAS_VALUE_EXPR_P (node
))
1381 SET_DECL_VALUE_EXPR (t
, DECL_VALUE_EXPR (node
));
1382 DECL_HAS_VALUE_EXPR_P (t
) = 1;
1384 /* DECL_DEBUG_EXPR is copied explicitly by callers. */
1387 DECL_HAS_DEBUG_EXPR_P (t
) = 0;
1388 t
->decl_with_vis
.symtab_node
= NULL
;
1390 if (VAR_P (node
) && DECL_HAS_INIT_PRIORITY_P (node
))
1392 SET_DECL_INIT_PRIORITY (t
, DECL_INIT_PRIORITY (node
));
1393 DECL_HAS_INIT_PRIORITY_P (t
) = 1;
1395 if (TREE_CODE (node
) == FUNCTION_DECL
)
1397 DECL_STRUCT_FUNCTION (t
) = NULL
;
1398 t
->decl_with_vis
.symtab_node
= NULL
;
1401 else if (TREE_CODE_CLASS (code
) == tcc_type
)
1403 TYPE_UID (t
) = next_type_uid
++;
1404 /* The following is so that the debug code for
1405 the copy is different from the original type.
1406 The two statements usually duplicate each other
1407 (because they clear fields of the same union),
1408 but the optimizer should catch that. */
1409 TYPE_SYMTAB_ADDRESS (t
) = 0;
1410 TYPE_SYMTAB_DIE (t
) = 0;
1412 /* Do not copy the values cache. */
1413 if (TYPE_CACHED_VALUES_P (t
))
1415 TYPE_CACHED_VALUES_P (t
) = 0;
1416 TYPE_CACHED_VALUES (t
) = NULL_TREE
;
1419 else if (code
== TARGET_OPTION_NODE
)
1421 TREE_TARGET_OPTION (t
) = ggc_alloc
<struct cl_target_option
>();
1422 memcpy (TREE_TARGET_OPTION (t
), TREE_TARGET_OPTION (node
),
1423 sizeof (struct cl_target_option
));
1425 else if (code
== OPTIMIZATION_NODE
)
1427 TREE_OPTIMIZATION (t
) = ggc_alloc
<struct cl_optimization
>();
1428 memcpy (TREE_OPTIMIZATION (t
), TREE_OPTIMIZATION (node
),
1429 sizeof (struct cl_optimization
));
1435 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1436 For example, this can copy a list made of TREE_LIST nodes. */
1439 copy_list (tree list
)
1447 head
= prev
= copy_node (list
);
1448 next
= TREE_CHAIN (list
);
1451 TREE_CHAIN (prev
) = copy_node (next
);
1452 prev
= TREE_CHAIN (prev
);
1453 next
= TREE_CHAIN (next
);
1459 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1460 INTEGER_CST with value CST and type TYPE. */
1463 get_int_cst_ext_nunits (tree type
, const wide_int
&cst
)
1465 gcc_checking_assert (cst
.get_precision () == TYPE_PRECISION (type
));
1466 /* We need extra HWIs if CST is an unsigned integer with its
1468 if (TYPE_UNSIGNED (type
) && wi::neg_p (cst
))
1469 return cst
.get_precision () / HOST_BITS_PER_WIDE_INT
+ 1;
1470 return cst
.get_len ();
1473 /* Return a new INTEGER_CST with value CST and type TYPE. */
1476 build_new_int_cst (tree type
, const wide_int
&cst
)
1478 unsigned int len
= cst
.get_len ();
1479 unsigned int ext_len
= get_int_cst_ext_nunits (type
, cst
);
1480 tree nt
= make_int_cst (len
, ext_len
);
1485 TREE_INT_CST_ELT (nt
, ext_len
)
1486 = zext_hwi (-1, cst
.get_precision () % HOST_BITS_PER_WIDE_INT
);
1487 for (unsigned int i
= len
; i
< ext_len
; ++i
)
1488 TREE_INT_CST_ELT (nt
, i
) = -1;
1490 else if (TYPE_UNSIGNED (type
)
1491 && cst
.get_precision () < len
* HOST_BITS_PER_WIDE_INT
)
1494 TREE_INT_CST_ELT (nt
, len
)
1495 = zext_hwi (cst
.elt (len
),
1496 cst
.get_precision () % HOST_BITS_PER_WIDE_INT
);
1499 for (unsigned int i
= 0; i
< len
; i
++)
1500 TREE_INT_CST_ELT (nt
, i
) = cst
.elt (i
);
1501 TREE_TYPE (nt
) = type
;
1505 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1508 build_new_poly_int_cst (tree type
, tree (&coeffs
)[NUM_POLY_INT_COEFFS
]
1511 size_t length
= sizeof (struct tree_poly_int_cst
);
1512 record_node_allocation_statistics (POLY_INT_CST
, length
);
1514 tree t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
1516 TREE_SET_CODE (t
, POLY_INT_CST
);
1517 TREE_CONSTANT (t
) = 1;
1518 TREE_TYPE (t
) = type
;
1519 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1520 POLY_INT_CST_COEFF (t
, i
) = coeffs
[i
];
1524 /* Create a constant tree that contains CST sign-extended to TYPE. */
1527 build_int_cst (tree type
, poly_int64 cst
)
1529 /* Support legacy code. */
1531 type
= integer_type_node
;
1533 return wide_int_to_tree (type
, wi::shwi (cst
, TYPE_PRECISION (type
)));
1536 /* Create a constant tree that contains CST zero-extended to TYPE. */
1539 build_int_cstu (tree type
, poly_uint64 cst
)
1541 return wide_int_to_tree (type
, wi::uhwi (cst
, TYPE_PRECISION (type
)));
1544 /* Create a constant tree that contains CST sign-extended to TYPE. */
1547 build_int_cst_type (tree type
, poly_int64 cst
)
1550 return wide_int_to_tree (type
, wi::shwi (cst
, TYPE_PRECISION (type
)));
1553 /* Constructs tree in type TYPE from with value given by CST. Signedness
1554 of CST is assumed to be the same as the signedness of TYPE. */
1557 double_int_to_tree (tree type
, double_int cst
)
1559 return wide_int_to_tree (type
, widest_int::from (cst
, TYPE_SIGN (type
)));
1562 /* We force the wide_int CST to the range of the type TYPE by sign or
1563 zero extending it. OVERFLOWABLE indicates if we are interested in
1564 overflow of the value, when >0 we are only interested in signed
1565 overflow, for <0 we are interested in any overflow. OVERFLOWED
1566 indicates whether overflow has already occurred. CONST_OVERFLOWED
1567 indicates whether constant overflow has already occurred. We force
1568 T's value to be within range of T's type (by setting to 0 or 1 all
1569 the bits outside the type's range). We set TREE_OVERFLOWED if,
1570 OVERFLOWED is nonzero,
1571 or OVERFLOWABLE is >0 and signed overflow occurs
1572 or OVERFLOWABLE is <0 and any overflow occurs
1573 We return a new tree node for the extended wide_int. The node
1574 is shared if no overflow flags are set. */
1578 force_fit_type (tree type
, const poly_wide_int_ref
&cst
,
1579 int overflowable
, bool overflowed
)
1581 signop sign
= TYPE_SIGN (type
);
1583 /* If we need to set overflow flags, return a new unshared node. */
1584 if (overflowed
|| !wi::fits_to_tree_p (cst
, type
))
1588 || (overflowable
> 0 && sign
== SIGNED
))
1590 poly_wide_int tmp
= poly_wide_int::from (cst
, TYPE_PRECISION (type
),
1593 if (tmp
.is_constant ())
1594 t
= build_new_int_cst (type
, tmp
.coeffs
[0]);
1597 tree coeffs
[NUM_POLY_INT_COEFFS
];
1598 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1600 coeffs
[i
] = build_new_int_cst (type
, tmp
.coeffs
[i
]);
1601 TREE_OVERFLOW (coeffs
[i
]) = 1;
1603 t
= build_new_poly_int_cst (type
, coeffs
);
1605 TREE_OVERFLOW (t
) = 1;
1610 /* Else build a shared node. */
1611 return wide_int_to_tree (type
, cst
);
1614 /* These are the hash table functions for the hash table of INTEGER_CST
1615 nodes of a sizetype. */
1617 /* Return the hash code X, an INTEGER_CST. */
1620 int_cst_hasher::hash (tree x
)
1622 const_tree
const t
= x
;
1623 hashval_t code
= TYPE_UID (TREE_TYPE (t
));
1626 for (i
= 0; i
< TREE_INT_CST_NUNITS (t
); i
++)
1627 code
= iterative_hash_host_wide_int (TREE_INT_CST_ELT(t
, i
), code
);
1632 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1633 is the same as that given by *Y, which is the same. */
1636 int_cst_hasher::equal (tree x
, tree y
)
1638 const_tree
const xt
= x
;
1639 const_tree
const yt
= y
;
1641 if (TREE_TYPE (xt
) != TREE_TYPE (yt
)
1642 || TREE_INT_CST_NUNITS (xt
) != TREE_INT_CST_NUNITS (yt
)
1643 || TREE_INT_CST_EXT_NUNITS (xt
) != TREE_INT_CST_EXT_NUNITS (yt
))
1646 for (int i
= 0; i
< TREE_INT_CST_NUNITS (xt
); i
++)
1647 if (TREE_INT_CST_ELT (xt
, i
) != TREE_INT_CST_ELT (yt
, i
))
1653 /* Cache wide_int CST into the TYPE_CACHED_VALUES cache for TYPE.
1654 SLOT is the slot entry to store it in, and MAX_SLOTS is the maximum
1655 number of slots that can be cached for the type. */
1658 cache_wide_int_in_type_cache (tree type
, const wide_int
&cst
,
1659 int slot
, int max_slots
)
1661 gcc_checking_assert (slot
>= 0);
1662 /* Initialize cache. */
1663 if (!TYPE_CACHED_VALUES_P (type
))
1665 TYPE_CACHED_VALUES_P (type
) = 1;
1666 TYPE_CACHED_VALUES (type
) = make_tree_vec (max_slots
);
1668 tree t
= TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), slot
);
1671 /* Create a new shared int. */
1672 t
= build_new_int_cst (type
, cst
);
1673 TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), slot
) = t
;
1678 /* Create an INT_CST node of TYPE and value CST.
1679 The returned node is always shared. For small integers we use a
1680 per-type vector cache, for larger ones we use a single hash table.
1681 The value is extended from its precision according to the sign of
1682 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1683 the upper bits and ensures that hashing and value equality based
1684 upon the underlying HOST_WIDE_INTs works without masking. */
1687 wide_int_to_tree_1 (tree type
, const wide_int_ref
&pcst
)
1694 unsigned int prec
= TYPE_PRECISION (type
);
1695 signop sgn
= TYPE_SIGN (type
);
1697 /* Verify that everything is canonical. */
1698 int l
= pcst
.get_len ();
1701 if (pcst
.elt (l
- 1) == 0)
1702 gcc_checking_assert (pcst
.elt (l
- 2) < 0);
1703 if (pcst
.elt (l
- 1) == HOST_WIDE_INT_M1
)
1704 gcc_checking_assert (pcst
.elt (l
- 2) >= 0);
1707 wide_int cst
= wide_int::from (pcst
, prec
, sgn
);
1708 unsigned int ext_len
= get_int_cst_ext_nunits (type
, cst
);
1710 enum tree_code code
= TREE_CODE (type
);
1711 if (code
== POINTER_TYPE
|| code
== REFERENCE_TYPE
)
1713 /* Cache NULL pointer and zero bounds. */
1716 /* Cache upper bounds of pointers. */
1717 else if (cst
== wi::max_value (prec
, sgn
))
1719 /* Cache 1 which is used for a non-zero range. */
1725 t
= cache_wide_int_in_type_cache (type
, cst
, ix
, 3);
1726 /* Make sure no one is clobbering the shared constant. */
1727 gcc_checking_assert (TREE_TYPE (t
) == type
1728 && cst
== wi::to_wide (t
));
1734 /* We just need to store a single HOST_WIDE_INT. */
1736 if (TYPE_UNSIGNED (type
))
1737 hwi
= cst
.to_uhwi ();
1739 hwi
= cst
.to_shwi ();
1744 gcc_assert (hwi
== 0);
1748 case REFERENCE_TYPE
:
1749 /* Ignore pointers, as they were already handled above. */
1753 /* Cache false or true. */
1755 if (IN_RANGE (hwi
, 0, 1))
1761 if (TYPE_SIGN (type
) == UNSIGNED
)
1764 limit
= param_integer_share_limit
;
1765 if (IN_RANGE (hwi
, 0, param_integer_share_limit
- 1))
1770 /* Cache [-1, N). */
1771 limit
= param_integer_share_limit
+ 1;
1772 if (IN_RANGE (hwi
, -1, param_integer_share_limit
- 1))
1786 t
= cache_wide_int_in_type_cache (type
, cst
, ix
, limit
);
1787 /* Make sure no one is clobbering the shared constant. */
1788 gcc_checking_assert (TREE_TYPE (t
) == type
1789 && TREE_INT_CST_NUNITS (t
) == 1
1790 && TREE_INT_CST_OFFSET_NUNITS (t
) == 1
1791 && TREE_INT_CST_EXT_NUNITS (t
) == 1
1792 && TREE_INT_CST_ELT (t
, 0) == hwi
);
1797 /* Use the cache of larger shared ints, using int_cst_node as
1800 TREE_INT_CST_ELT (int_cst_node
, 0) = hwi
;
1801 TREE_TYPE (int_cst_node
) = type
;
1803 tree
*slot
= int_cst_hash_table
->find_slot (int_cst_node
, INSERT
);
1807 /* Insert this one into the hash table. */
1810 /* Make a new node for next time round. */
1811 int_cst_node
= make_int_cst (1, 1);
1817 /* The value either hashes properly or we drop it on the floor
1818 for the gc to take care of. There will not be enough of them
1821 tree nt
= build_new_int_cst (type
, cst
);
1822 tree
*slot
= int_cst_hash_table
->find_slot (nt
, INSERT
);
1826 /* Insert this one into the hash table. */
1838 poly_int_cst_hasher::hash (tree t
)
1840 inchash::hash hstate
;
1842 hstate
.add_int (TYPE_UID (TREE_TYPE (t
)));
1843 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1844 hstate
.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t
, i
)));
1846 return hstate
.end ();
1850 poly_int_cst_hasher::equal (tree x
, const compare_type
&y
)
1852 if (TREE_TYPE (x
) != y
.first
)
1854 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1855 if (wi::to_wide (POLY_INT_CST_COEFF (x
, i
)) != y
.second
->coeffs
[i
])
1860 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1861 The elements must also have type TYPE. */
1864 build_poly_int_cst (tree type
, const poly_wide_int_ref
&values
)
1866 unsigned int prec
= TYPE_PRECISION (type
);
1867 gcc_assert (prec
<= values
.coeffs
[0].get_precision ());
1868 poly_wide_int c
= poly_wide_int::from (values
, prec
, SIGNED
);
1871 h
.add_int (TYPE_UID (type
));
1872 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1873 h
.add_wide_int (c
.coeffs
[i
]);
1874 poly_int_cst_hasher::compare_type
comp (type
, &c
);
1875 tree
*slot
= poly_int_cst_hash_table
->find_slot_with_hash (comp
, h
.end (),
1877 if (*slot
== NULL_TREE
)
1879 tree coeffs
[NUM_POLY_INT_COEFFS
];
1880 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1881 coeffs
[i
] = wide_int_to_tree_1 (type
, c
.coeffs
[i
]);
1882 *slot
= build_new_poly_int_cst (type
, coeffs
);
1887 /* Create a constant tree with value VALUE in type TYPE. */
1890 wide_int_to_tree (tree type
, const poly_wide_int_ref
&value
)
1892 if (value
.is_constant ())
1893 return wide_int_to_tree_1 (type
, value
.coeffs
[0]);
1894 return build_poly_int_cst (type
, value
);
1897 /* Insert INTEGER_CST T into a cache of integer constants. And return
1898 the cached constant (which may or may not be T). If MIGHT_DUPLICATE
1899 is false, and T falls into the type's 'smaller values' range, there
1900 cannot be an existing entry. Otherwise, if MIGHT_DUPLICATE is true,
1901 or the value is large, should an existing entry exist, it is
1902 returned (rather than inserting T). */
1905 cache_integer_cst (tree t
, bool might_duplicate ATTRIBUTE_UNUSED
)
1907 tree type
= TREE_TYPE (t
);
1910 int prec
= TYPE_PRECISION (type
);
1912 gcc_assert (!TREE_OVERFLOW (t
));
1914 /* The caching indices here must match those in
1915 wide_int_to_type_1. */
1916 switch (TREE_CODE (type
))
1919 gcc_checking_assert (integer_zerop (t
));
1923 case REFERENCE_TYPE
:
1925 if (integer_zerop (t
))
1927 else if (integer_onep (t
))
1936 /* Cache false or true. */
1938 if (wi::ltu_p (wi::to_wide (t
), 2))
1939 ix
= TREE_INT_CST_ELT (t
, 0);
1944 if (TYPE_UNSIGNED (type
))
1947 limit
= param_integer_share_limit
;
1949 /* This is a little hokie, but if the prec is smaller than
1950 what is necessary to hold param_integer_share_limit, then the
1951 obvious test will not get the correct answer. */
1952 if (prec
< HOST_BITS_PER_WIDE_INT
)
1954 if (tree_to_uhwi (t
)
1955 < (unsigned HOST_WIDE_INT
) param_integer_share_limit
)
1956 ix
= tree_to_uhwi (t
);
1958 else if (wi::ltu_p (wi::to_wide (t
), param_integer_share_limit
))
1959 ix
= tree_to_uhwi (t
);
1964 limit
= param_integer_share_limit
+ 1;
1966 if (integer_minus_onep (t
))
1968 else if (!wi::neg_p (wi::to_wide (t
)))
1970 if (prec
< HOST_BITS_PER_WIDE_INT
)
1972 if (tree_to_shwi (t
) < param_integer_share_limit
)
1973 ix
= tree_to_shwi (t
) + 1;
1975 else if (wi::ltu_p (wi::to_wide (t
), param_integer_share_limit
))
1976 ix
= tree_to_shwi (t
) + 1;
1982 /* The slot used by TYPE_CACHED_VALUES is used for the enum
1992 /* Look for it in the type's vector of small shared ints. */
1993 if (!TYPE_CACHED_VALUES_P (type
))
1995 TYPE_CACHED_VALUES_P (type
) = 1;
1996 TYPE_CACHED_VALUES (type
) = make_tree_vec (limit
);
1999 if (tree r
= TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
))
2001 gcc_checking_assert (might_duplicate
);
2005 TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
) = t
;
2009 /* Use the cache of larger shared ints. */
2010 tree
*slot
= int_cst_hash_table
->find_slot (t
, INSERT
);
2013 /* If there is already an entry for the number verify it's the
2015 gcc_checking_assert (wi::to_wide (tree (r
)) == wi::to_wide (t
));
2016 /* And return the cached value. */
2020 /* Otherwise insert this one into the hash table. */
2028 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
2029 and the rest are zeros. */
2032 build_low_bits_mask (tree type
, unsigned bits
)
2034 gcc_assert (bits
<= TYPE_PRECISION (type
));
2036 return wide_int_to_tree (type
, wi::mask (bits
, false,
2037 TYPE_PRECISION (type
)));
2040 /* Checks that X is integer constant that can be expressed in (unsigned)
2041 HOST_WIDE_INT without loss of precision. */
2044 cst_and_fits_in_hwi (const_tree x
)
2046 return (TREE_CODE (x
) == INTEGER_CST
2047 && (tree_fits_shwi_p (x
) || tree_fits_uhwi_p (x
)));
2050 /* Build a newly constructed VECTOR_CST with the given values of
2051 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
2054 make_vector (unsigned log2_npatterns
,
2055 unsigned int nelts_per_pattern MEM_STAT_DECL
)
2057 gcc_assert (IN_RANGE (nelts_per_pattern
, 1, 3));
2059 unsigned npatterns
= 1 << log2_npatterns
;
2060 unsigned encoded_nelts
= npatterns
* nelts_per_pattern
;
2061 unsigned length
= (sizeof (struct tree_vector
)
2062 + (encoded_nelts
- 1) * sizeof (tree
));
2064 record_node_allocation_statistics (VECTOR_CST
, length
);
2066 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
2068 TREE_SET_CODE (t
, VECTOR_CST
);
2069 TREE_CONSTANT (t
) = 1;
2070 VECTOR_CST_LOG2_NPATTERNS (t
) = log2_npatterns
;
2071 VECTOR_CST_NELTS_PER_PATTERN (t
) = nelts_per_pattern
;
2076 /* Return a new VECTOR_CST node whose type is TYPE and whose values
2077 are extracted from V, a vector of CONSTRUCTOR_ELT. */
2080 build_vector_from_ctor (tree type
, const vec
<constructor_elt
, va_gc
> *v
)
2082 if (vec_safe_length (v
) == 0)
2083 return build_zero_cst (type
);
2085 unsigned HOST_WIDE_INT idx
, nelts
;
2088 /* We can't construct a VECTOR_CST for a variable number of elements. */
2089 nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
2090 tree_vector_builder
vec (type
, nelts
, 1);
2091 FOR_EACH_CONSTRUCTOR_VALUE (v
, idx
, value
)
2093 if (TREE_CODE (value
) == VECTOR_CST
)
2095 /* If NELTS is constant then this must be too. */
2096 unsigned int sub_nelts
= VECTOR_CST_NELTS (value
).to_constant ();
2097 for (unsigned i
= 0; i
< sub_nelts
; ++i
)
2098 vec
.quick_push (VECTOR_CST_ELT (value
, i
));
2101 vec
.quick_push (value
);
2103 while (vec
.length () < nelts
)
2104 vec
.quick_push (build_zero_cst (TREE_TYPE (type
)));
2106 return vec
.build ();
2109 /* Build a vector of type VECTYPE where all the elements are SCs. */
2111 build_vector_from_val (tree vectype
, tree sc
)
2113 unsigned HOST_WIDE_INT i
, nunits
;
2115 if (sc
== error_mark_node
)
2118 /* Verify that the vector type is suitable for SC. Note that there
2119 is some inconsistency in the type-system with respect to restrict
2120 qualifications of pointers. Vector types always have a main-variant
2121 element type and the qualification is applied to the vector-type.
2122 So TREE_TYPE (vector-type) does not return a properly qualified
2123 vector element-type. */
2124 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc
)),
2125 TREE_TYPE (vectype
)));
2127 if (CONSTANT_CLASS_P (sc
))
2129 tree_vector_builder
v (vectype
, 1, 1);
2133 else if (!TYPE_VECTOR_SUBPARTS (vectype
).is_constant (&nunits
))
2134 return fold_build1 (VEC_DUPLICATE_EXPR
, vectype
, sc
);
2137 vec
<constructor_elt
, va_gc
> *v
;
2138 vec_alloc (v
, nunits
);
2139 for (i
= 0; i
< nunits
; ++i
)
2140 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, sc
);
2141 return build_constructor (vectype
, v
);
2145 /* If TYPE is not a vector type, just return SC, otherwise return
2146 build_vector_from_val (TYPE, SC). */
2149 build_uniform_cst (tree type
, tree sc
)
2151 if (!VECTOR_TYPE_P (type
))
2154 return build_vector_from_val (type
, sc
);
2157 /* Build a vector series of type TYPE in which element I has the value
2158 BASE + I * STEP. The result is a constant if BASE and STEP are constant
2159 and a VEC_SERIES_EXPR otherwise. */
2162 build_vec_series (tree type
, tree base
, tree step
)
2164 if (integer_zerop (step
))
2165 return build_vector_from_val (type
, base
);
2166 if (TREE_CODE (base
) == INTEGER_CST
&& TREE_CODE (step
) == INTEGER_CST
)
2168 tree_vector_builder
builder (type
, 1, 3);
2169 tree elt1
= wide_int_to_tree (TREE_TYPE (base
),
2170 wi::to_wide (base
) + wi::to_wide (step
));
2171 tree elt2
= wide_int_to_tree (TREE_TYPE (base
),
2172 wi::to_wide (elt1
) + wi::to_wide (step
));
2173 builder
.quick_push (base
);
2174 builder
.quick_push (elt1
);
2175 builder
.quick_push (elt2
);
2176 return builder
.build ();
2178 return build2 (VEC_SERIES_EXPR
, type
, base
, step
);
2181 /* Return a vector with the same number of units and number of bits
2182 as VEC_TYPE, but in which the elements are a linear series of unsigned
2183 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
2186 build_index_vector (tree vec_type
, poly_uint64 base
, poly_uint64 step
)
2188 tree index_vec_type
= vec_type
;
2189 tree index_elt_type
= TREE_TYPE (vec_type
);
2190 poly_uint64 nunits
= TYPE_VECTOR_SUBPARTS (vec_type
);
2191 if (!INTEGRAL_TYPE_P (index_elt_type
) || !TYPE_UNSIGNED (index_elt_type
))
2193 index_elt_type
= build_nonstandard_integer_type
2194 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type
)), true);
2195 index_vec_type
= build_vector_type (index_elt_type
, nunits
);
2198 tree_vector_builder
v (index_vec_type
, 1, 3);
2199 for (unsigned int i
= 0; i
< 3; ++i
)
2200 v
.quick_push (build_int_cstu (index_elt_type
, base
+ i
* step
));
2204 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
2205 elements are A and the rest are B. */
2208 build_vector_a_then_b (tree vec_type
, unsigned int num_a
, tree a
, tree b
)
2210 gcc_assert (known_le (num_a
, TYPE_VECTOR_SUBPARTS (vec_type
)));
2211 unsigned int count
= constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type
));
2212 /* Optimize the constant case. */
2213 if ((count
& 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type
).is_constant ())
2215 tree_vector_builder
builder (vec_type
, count
, 2);
2216 for (unsigned int i
= 0; i
< count
* 2; ++i
)
2217 builder
.quick_push (i
< num_a
? a
: b
);
2218 return builder
.build ();
2221 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2222 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
2225 recompute_constructor_flags (tree c
)
2229 bool constant_p
= true;
2230 bool side_effects_p
= false;
2231 vec
<constructor_elt
, va_gc
> *vals
= CONSTRUCTOR_ELTS (c
);
2233 FOR_EACH_CONSTRUCTOR_VALUE (vals
, i
, val
)
2235 /* Mostly ctors will have elts that don't have side-effects, so
2236 the usual case is to scan all the elements. Hence a single
2237 loop for both const and side effects, rather than one loop
2238 each (with early outs). */
2239 if (!TREE_CONSTANT (val
))
2241 if (TREE_SIDE_EFFECTS (val
))
2242 side_effects_p
= true;
2245 TREE_SIDE_EFFECTS (c
) = side_effects_p
;
2246 TREE_CONSTANT (c
) = constant_p
;
2249 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2253 verify_constructor_flags (tree c
)
2257 bool constant_p
= TREE_CONSTANT (c
);
2258 bool side_effects_p
= TREE_SIDE_EFFECTS (c
);
2259 vec
<constructor_elt
, va_gc
> *vals
= CONSTRUCTOR_ELTS (c
);
2261 FOR_EACH_CONSTRUCTOR_VALUE (vals
, i
, val
)
2263 if (constant_p
&& !TREE_CONSTANT (val
))
2264 internal_error ("non-constant element in constant CONSTRUCTOR");
2265 if (!side_effects_p
&& TREE_SIDE_EFFECTS (val
))
2266 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2270 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2271 are in the vec pointed to by VALS. */
2273 build_constructor (tree type
, vec
<constructor_elt
, va_gc
> *vals MEM_STAT_DECL
)
2275 tree c
= make_node (CONSTRUCTOR PASS_MEM_STAT
);
2277 TREE_TYPE (c
) = type
;
2278 CONSTRUCTOR_ELTS (c
) = vals
;
2280 recompute_constructor_flags (c
);
2285 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2288 build_constructor_single (tree type
, tree index
, tree value
)
2290 vec
<constructor_elt
, va_gc
> *v
;
2291 constructor_elt elt
= {index
, value
};
2294 v
->quick_push (elt
);
2296 return build_constructor (type
, v
);
2300 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2301 are in a list pointed to by VALS. */
2303 build_constructor_from_list (tree type
, tree vals
)
2306 vec
<constructor_elt
, va_gc
> *v
= NULL
;
2310 vec_alloc (v
, list_length (vals
));
2311 for (t
= vals
; t
; t
= TREE_CHAIN (t
))
2312 CONSTRUCTOR_APPEND_ELT (v
, TREE_PURPOSE (t
), TREE_VALUE (t
));
2315 return build_constructor (type
, v
);
2318 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2319 are in a vector pointed to by VALS. Note that the TREE_PURPOSE
2320 fields in the constructor remain null. */
2323 build_constructor_from_vec (tree type
, const vec
<tree
, va_gc
> *vals
)
2325 vec
<constructor_elt
, va_gc
> *v
= NULL
;
2328 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, t
);
2330 return build_constructor (type
, v
);
2333 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2334 of elements, provided as index/value pairs. */
2337 build_constructor_va (tree type
, int nelts
, ...)
2339 vec
<constructor_elt
, va_gc
> *v
= NULL
;
2342 va_start (p
, nelts
);
2343 vec_alloc (v
, nelts
);
2346 tree index
= va_arg (p
, tree
);
2347 tree value
= va_arg (p
, tree
);
2348 CONSTRUCTOR_APPEND_ELT (v
, index
, value
);
2351 return build_constructor (type
, v
);
2354 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2357 build_clobber (tree type
, enum clobber_kind kind
)
2359 tree clobber
= build_constructor (type
, NULL
);
2360 TREE_THIS_VOLATILE (clobber
) = true;
2361 CLOBBER_KIND (clobber
) = kind
;
2365 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2368 build_fixed (tree type
, FIXED_VALUE_TYPE f
)
2371 FIXED_VALUE_TYPE
*fp
;
2373 v
= make_node (FIXED_CST
);
2374 fp
= ggc_alloc
<fixed_value
> ();
2375 memcpy (fp
, &f
, sizeof (FIXED_VALUE_TYPE
));
2377 TREE_TYPE (v
) = type
;
2378 TREE_FIXED_CST_PTR (v
) = fp
;
2382 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2385 build_real (tree type
, REAL_VALUE_TYPE d
)
2390 /* dconst{0,1,2,m1,half} are used in various places in
2391 the middle-end and optimizers, allow them here
2392 even for decimal floating point types as an exception
2393 by converting them to decimal. */
2394 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type
))
2395 && (d
.cl
== rvc_normal
|| d
.cl
== rvc_zero
)
2398 if (memcmp (&d
, &dconst1
, sizeof (d
)) == 0)
2399 decimal_real_from_string (&d
, "1");
2400 else if (memcmp (&d
, &dconst2
, sizeof (d
)) == 0)
2401 decimal_real_from_string (&d
, "2");
2402 else if (memcmp (&d
, &dconstm1
, sizeof (d
)) == 0)
2403 decimal_real_from_string (&d
, "-1");
2404 else if (memcmp (&d
, &dconsthalf
, sizeof (d
)) == 0)
2405 decimal_real_from_string (&d
, "0.5");
2406 else if (memcmp (&d
, &dconst0
, sizeof (d
)) == 0)
2408 /* Make sure to give zero the minimum quantum exponent for
2409 the type (which corresponds to all bits zero). */
2410 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
2412 sprintf (buf
, "0e%d", fmt
->emin
- fmt
->p
);
2413 decimal_real_from_string (&d
, buf
);
2419 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2420 Consider doing it via real_convert now. */
2422 v
= make_node (REAL_CST
);
2423 TREE_TYPE (v
) = type
;
2424 memcpy (TREE_REAL_CST_PTR (v
), &d
, sizeof (REAL_VALUE_TYPE
));
2425 TREE_OVERFLOW (v
) = overflow
;
2429 /* Like build_real, but first truncate D to the type. */
2432 build_real_truncate (tree type
, REAL_VALUE_TYPE d
)
2434 return build_real (type
, real_value_truncate (TYPE_MODE (type
), d
));
2437 /* Return a new REAL_CST node whose type is TYPE
2438 and whose value is the integer value of the INTEGER_CST node I. */
2441 real_value_from_int_cst (const_tree type
, const_tree i
)
2445 /* Clear all bits of the real value type so that we can later do
2446 bitwise comparisons to see if two values are the same. */
2447 memset (&d
, 0, sizeof d
);
2449 real_from_integer (&d
, type
? TYPE_MODE (type
) : VOIDmode
, wi::to_wide (i
),
2450 TYPE_SIGN (TREE_TYPE (i
)));
2454 /* Given a tree representing an integer constant I, return a tree
2455 representing the same value as a floating-point constant of type TYPE. */
2458 build_real_from_int_cst (tree type
, const_tree i
)
2461 int overflow
= TREE_OVERFLOW (i
);
2463 v
= build_real (type
, real_value_from_int_cst (type
, i
));
2465 TREE_OVERFLOW (v
) |= overflow
;
2469 /* Return a new REAL_CST node whose type is TYPE
2470 and whose value is the integer value I which has sign SGN. */
2473 build_real_from_wide (tree type
, const wide_int_ref
&i
, signop sgn
)
2477 /* Clear all bits of the real value type so that we can later do
2478 bitwise comparisons to see if two values are the same. */
2479 memset (&d
, 0, sizeof d
);
2481 real_from_integer (&d
, TYPE_MODE (type
), i
, sgn
);
2482 return build_real (type
, d
);
2485 /* Return a newly constructed STRING_CST node whose value is the LEN
2486 characters at STR when STR is nonnull, or all zeros otherwise.
2487 Note that for a C string literal, LEN should include the trailing NUL.
2488 The TREE_TYPE is not initialized. */
2491 build_string (unsigned len
, const char *str
/*= NULL */)
2493 /* Do not waste bytes provided by padding of struct tree_string. */
2494 unsigned size
= len
+ offsetof (struct tree_string
, str
) + 1;
2496 record_node_allocation_statistics (STRING_CST
, size
);
2498 tree s
= (tree
) ggc_internal_alloc (size
);
2500 memset (s
, 0, sizeof (struct tree_typed
));
2501 TREE_SET_CODE (s
, STRING_CST
);
2502 TREE_CONSTANT (s
) = 1;
2503 TREE_STRING_LENGTH (s
) = len
;
2505 memcpy (s
->string
.str
, str
, len
);
2507 memset (s
->string
.str
, 0, len
);
2508 s
->string
.str
[len
] = '\0';
2513 /* Return a newly constructed COMPLEX_CST node whose value is
2514 specified by the real and imaginary parts REAL and IMAG.
2515 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2516 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2519 build_complex (tree type
, tree real
, tree imag
)
2521 gcc_assert (CONSTANT_CLASS_P (real
));
2522 gcc_assert (CONSTANT_CLASS_P (imag
));
2524 tree t
= make_node (COMPLEX_CST
);
2526 TREE_REALPART (t
) = real
;
2527 TREE_IMAGPART (t
) = imag
;
2528 TREE_TYPE (t
) = type
? type
: build_complex_type (TREE_TYPE (real
));
2529 TREE_OVERFLOW (t
) = TREE_OVERFLOW (real
) | TREE_OVERFLOW (imag
);
2533 /* Build a complex (inf +- 0i), such as for the result of cproj.
2534 TYPE is the complex tree type of the result. If NEG is true, the
2535 imaginary zero is negative. */
2538 build_complex_inf (tree type
, bool neg
)
2540 REAL_VALUE_TYPE rzero
= dconst0
;
2543 return build_complex (type
, build_real (TREE_TYPE (type
), dconstinf
),
2544 build_real (TREE_TYPE (type
), rzero
));
2547 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2548 element is set to 1. In particular, this is 1 + i for complex types. */
2551 build_each_one_cst (tree type
)
2553 if (TREE_CODE (type
) == COMPLEX_TYPE
)
2555 tree scalar
= build_one_cst (TREE_TYPE (type
));
2556 return build_complex (type
, scalar
, scalar
);
2559 return build_one_cst (type
);
2562 /* Return a constant of arithmetic type TYPE which is the
2563 multiplicative identity of the set TYPE. */
2566 build_one_cst (tree type
)
2568 switch (TREE_CODE (type
))
2570 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2571 case POINTER_TYPE
: case REFERENCE_TYPE
:
2573 return build_int_cst (type
, 1);
2576 return build_real (type
, dconst1
);
2578 case FIXED_POINT_TYPE
:
2579 /* We can only generate 1 for accum types. */
2580 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type
)));
2581 return build_fixed (type
, FCONST1 (TYPE_MODE (type
)));
2585 tree scalar
= build_one_cst (TREE_TYPE (type
));
2587 return build_vector_from_val (type
, scalar
);
2591 return build_complex (type
,
2592 build_one_cst (TREE_TYPE (type
)),
2593 build_zero_cst (TREE_TYPE (type
)));
2600 /* Return an integer of type TYPE containing all 1's in as much precision as
2601 it contains, or a complex or vector whose subparts are such integers. */
2604 build_all_ones_cst (tree type
)
2606 if (TREE_CODE (type
) == COMPLEX_TYPE
)
2608 tree scalar
= build_all_ones_cst (TREE_TYPE (type
));
2609 return build_complex (type
, scalar
, scalar
);
2612 return build_minus_one_cst (type
);
2615 /* Return a constant of arithmetic type TYPE which is the
2616 opposite of the multiplicative identity of the set TYPE. */
2619 build_minus_one_cst (tree type
)
2621 switch (TREE_CODE (type
))
2623 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2624 case POINTER_TYPE
: case REFERENCE_TYPE
:
2626 return build_int_cst (type
, -1);
2629 return build_real (type
, dconstm1
);
2631 case FIXED_POINT_TYPE
:
2632 /* We can only generate 1 for accum types. */
2633 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type
)));
2634 return build_fixed (type
,
2635 fixed_from_double_int (double_int_minus_one
,
2636 SCALAR_TYPE_MODE (type
)));
2640 tree scalar
= build_minus_one_cst (TREE_TYPE (type
));
2642 return build_vector_from_val (type
, scalar
);
2646 return build_complex (type
,
2647 build_minus_one_cst (TREE_TYPE (type
)),
2648 build_zero_cst (TREE_TYPE (type
)));
2655 /* Build 0 constant of type TYPE. This is used by constructor folding
2656 and thus the constant should be represented in memory by
2660 build_zero_cst (tree type
)
2662 switch (TREE_CODE (type
))
2664 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2665 case POINTER_TYPE
: case REFERENCE_TYPE
:
2666 case OFFSET_TYPE
: case NULLPTR_TYPE
:
2667 return build_int_cst (type
, 0);
2670 return build_real (type
, dconst0
);
2672 case FIXED_POINT_TYPE
:
2673 return build_fixed (type
, FCONST0 (TYPE_MODE (type
)));
2677 tree scalar
= build_zero_cst (TREE_TYPE (type
));
2679 return build_vector_from_val (type
, scalar
);
2684 tree zero
= build_zero_cst (TREE_TYPE (type
));
2686 return build_complex (type
, zero
, zero
);
2690 if (!AGGREGATE_TYPE_P (type
))
2691 return fold_convert (type
, integer_zero_node
);
2692 return build_constructor (type
, NULL
);
2697 /* Build a BINFO with LEN language slots. */
2700 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL
)
2703 size_t length
= (offsetof (struct tree_binfo
, base_binfos
)
2704 + vec
<tree
, va_gc
>::embedded_size (base_binfos
));
2706 record_node_allocation_statistics (TREE_BINFO
, length
);
2708 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
2710 memset (t
, 0, offsetof (struct tree_binfo
, base_binfos
));
2712 TREE_SET_CODE (t
, TREE_BINFO
);
2714 BINFO_BASE_BINFOS (t
)->embedded_init (base_binfos
);
2719 /* Create a CASE_LABEL_EXPR tree node and return it. */
2722 build_case_label (tree low_value
, tree high_value
, tree label_decl
)
2724 tree t
= make_node (CASE_LABEL_EXPR
);
2726 TREE_TYPE (t
) = void_type_node
;
2727 SET_EXPR_LOCATION (t
, DECL_SOURCE_LOCATION (label_decl
));
2729 CASE_LOW (t
) = low_value
;
2730 CASE_HIGH (t
) = high_value
;
2731 CASE_LABEL (t
) = label_decl
;
2732 CASE_CHAIN (t
) = NULL_TREE
;
2737 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2738 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2739 The latter determines the length of the HOST_WIDE_INT vector. */
2742 make_int_cst (int len
, int ext_len MEM_STAT_DECL
)
2745 int length
= ((ext_len
- 1) * sizeof (HOST_WIDE_INT
)
2746 + sizeof (struct tree_int_cst
));
2749 record_node_allocation_statistics (INTEGER_CST
, length
);
2751 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
2753 TREE_SET_CODE (t
, INTEGER_CST
);
2754 TREE_INT_CST_NUNITS (t
) = len
;
2755 TREE_INT_CST_EXT_NUNITS (t
) = ext_len
;
2756 /* to_offset can only be applied to trees that are offset_int-sized
2757 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2758 must be exactly the precision of offset_int and so LEN is correct. */
2759 if (ext_len
<= OFFSET_INT_ELTS
)
2760 TREE_INT_CST_OFFSET_NUNITS (t
) = ext_len
;
2762 TREE_INT_CST_OFFSET_NUNITS (t
) = len
;
2764 TREE_CONSTANT (t
) = 1;
2769 /* Build a newly constructed TREE_VEC node of length LEN. */
2772 make_tree_vec (int len MEM_STAT_DECL
)
2775 size_t length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2777 record_node_allocation_statistics (TREE_VEC
, length
);
2779 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
2781 TREE_SET_CODE (t
, TREE_VEC
);
2782 TREE_VEC_LENGTH (t
) = len
;
2787 /* Grow a TREE_VEC node to new length LEN. */
2790 grow_tree_vec (tree v
, int len MEM_STAT_DECL
)
2792 gcc_assert (TREE_CODE (v
) == TREE_VEC
);
2794 int oldlen
= TREE_VEC_LENGTH (v
);
2795 gcc_assert (len
> oldlen
);
2797 size_t oldlength
= (oldlen
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2798 size_t length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2800 record_node_allocation_statistics (TREE_VEC
, length
- oldlength
);
2802 v
= (tree
) ggc_realloc (v
, length PASS_MEM_STAT
);
2804 TREE_VEC_LENGTH (v
) = len
;
2809 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2810 fixed, and scalar, complex or vector. */
2813 zerop (const_tree expr
)
2815 return (integer_zerop (expr
)
2816 || real_zerop (expr
)
2817 || fixed_zerop (expr
));
2820 /* Return 1 if EXPR is the integer constant zero or a complex constant
2821 of zero, or a location wrapper for such a constant. */
2824 integer_zerop (const_tree expr
)
2826 STRIP_ANY_LOCATION_WRAPPER (expr
);
2828 switch (TREE_CODE (expr
))
2831 return wi::to_wide (expr
) == 0;
2833 return (integer_zerop (TREE_REALPART (expr
))
2834 && integer_zerop (TREE_IMAGPART (expr
)));
2836 return (VECTOR_CST_NPATTERNS (expr
) == 1
2837 && VECTOR_CST_DUPLICATE_P (expr
)
2838 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr
, 0)));
2844 /* Return 1 if EXPR is the integer constant one or the corresponding
2845 complex constant, or a location wrapper for such a constant. */
2848 integer_onep (const_tree expr
)
2850 STRIP_ANY_LOCATION_WRAPPER (expr
);
2852 switch (TREE_CODE (expr
))
2855 return wi::eq_p (wi::to_widest (expr
), 1);
2857 return (integer_onep (TREE_REALPART (expr
))
2858 && integer_zerop (TREE_IMAGPART (expr
)));
2860 return (VECTOR_CST_NPATTERNS (expr
) == 1
2861 && VECTOR_CST_DUPLICATE_P (expr
)
2862 && integer_onep (VECTOR_CST_ENCODED_ELT (expr
, 0)));
2868 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2869 return 1 if every piece is the integer constant one.
2870 Also return 1 for location wrappers for such a constant. */
2873 integer_each_onep (const_tree expr
)
2875 STRIP_ANY_LOCATION_WRAPPER (expr
);
2877 if (TREE_CODE (expr
) == COMPLEX_CST
)
2878 return (integer_onep (TREE_REALPART (expr
))
2879 && integer_onep (TREE_IMAGPART (expr
)));
2881 return integer_onep (expr
);
2884 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2885 it contains, or a complex or vector whose subparts are such integers,
2886 or a location wrapper for such a constant. */
2889 integer_all_onesp (const_tree expr
)
2891 STRIP_ANY_LOCATION_WRAPPER (expr
);
2893 if (TREE_CODE (expr
) == COMPLEX_CST
2894 && integer_all_onesp (TREE_REALPART (expr
))
2895 && integer_all_onesp (TREE_IMAGPART (expr
)))
2898 else if (TREE_CODE (expr
) == VECTOR_CST
)
2899 return (VECTOR_CST_NPATTERNS (expr
) == 1
2900 && VECTOR_CST_DUPLICATE_P (expr
)
2901 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr
, 0)));
2903 else if (TREE_CODE (expr
) != INTEGER_CST
)
2906 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr
)), UNSIGNED
)
2907 == wi::to_wide (expr
));
2910 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
2911 for such a constant. */
2914 integer_minus_onep (const_tree expr
)
2916 STRIP_ANY_LOCATION_WRAPPER (expr
);
2918 if (TREE_CODE (expr
) == COMPLEX_CST
)
2919 return (integer_all_onesp (TREE_REALPART (expr
))
2920 && integer_zerop (TREE_IMAGPART (expr
)));
2922 return integer_all_onesp (expr
);
2925 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2926 one bit on), or a location wrapper for such a constant. */
2929 integer_pow2p (const_tree expr
)
2931 STRIP_ANY_LOCATION_WRAPPER (expr
);
2933 if (TREE_CODE (expr
) == COMPLEX_CST
2934 && integer_pow2p (TREE_REALPART (expr
))
2935 && integer_zerop (TREE_IMAGPART (expr
)))
2938 if (TREE_CODE (expr
) != INTEGER_CST
)
2941 return wi::popcount (wi::to_wide (expr
)) == 1;
2944 /* Return 1 if EXPR is an integer constant other than zero or a
2945 complex constant other than zero, or a location wrapper for such a
2949 integer_nonzerop (const_tree expr
)
2951 STRIP_ANY_LOCATION_WRAPPER (expr
);
2953 return ((TREE_CODE (expr
) == INTEGER_CST
2954 && wi::to_wide (expr
) != 0)
2955 || (TREE_CODE (expr
) == COMPLEX_CST
2956 && (integer_nonzerop (TREE_REALPART (expr
))
2957 || integer_nonzerop (TREE_IMAGPART (expr
)))));
2960 /* Return 1 if EXPR is the integer constant one. For vector,
2961 return 1 if every piece is the integer constant minus one
2962 (representing the value TRUE).
2963 Also return 1 for location wrappers for such a constant. */
2966 integer_truep (const_tree expr
)
2968 STRIP_ANY_LOCATION_WRAPPER (expr
);
2970 if (TREE_CODE (expr
) == VECTOR_CST
)
2971 return integer_all_onesp (expr
);
2972 return integer_onep (expr
);
2975 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
2976 for such a constant. */
2979 fixed_zerop (const_tree expr
)
2981 STRIP_ANY_LOCATION_WRAPPER (expr
);
2983 return (TREE_CODE (expr
) == FIXED_CST
2984 && TREE_FIXED_CST (expr
).data
.is_zero ());
2987 /* Return the power of two represented by a tree node known to be a
2991 tree_log2 (const_tree expr
)
2993 if (TREE_CODE (expr
) == COMPLEX_CST
)
2994 return tree_log2 (TREE_REALPART (expr
));
2996 return wi::exact_log2 (wi::to_wide (expr
));
2999 /* Similar, but return the largest integer Y such that 2 ** Y is less
3000 than or equal to EXPR. */
3003 tree_floor_log2 (const_tree expr
)
3005 if (TREE_CODE (expr
) == COMPLEX_CST
)
3006 return tree_log2 (TREE_REALPART (expr
));
3008 return wi::floor_log2 (wi::to_wide (expr
));
3011 /* Return number of known trailing zero bits in EXPR, or, if the value of
3012 EXPR is known to be zero, the precision of it's type. */
3015 tree_ctz (const_tree expr
)
3017 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr
))
3018 && !POINTER_TYPE_P (TREE_TYPE (expr
)))
3021 unsigned int ret1
, ret2
, prec
= TYPE_PRECISION (TREE_TYPE (expr
));
3022 switch (TREE_CODE (expr
))
3025 ret1
= wi::ctz (wi::to_wide (expr
));
3026 return MIN (ret1
, prec
);
3028 ret1
= wi::ctz (get_nonzero_bits (expr
));
3029 return MIN (ret1
, prec
);
3036 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3039 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
3040 return MIN (ret1
, ret2
);
3041 case POINTER_PLUS_EXPR
:
3042 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3043 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
3044 /* Second operand is sizetype, which could be in theory
3045 wider than pointer's precision. Make sure we never
3046 return more than prec. */
3047 ret2
= MIN (ret2
, prec
);
3048 return MIN (ret1
, ret2
);
3050 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3051 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
3052 return MAX (ret1
, ret2
);
3054 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3055 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
3056 return MIN (ret1
+ ret2
, prec
);
3058 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3059 if (tree_fits_uhwi_p (TREE_OPERAND (expr
, 1))
3060 && (tree_to_uhwi (TREE_OPERAND (expr
, 1)) < prec
))
3062 ret2
= tree_to_uhwi (TREE_OPERAND (expr
, 1));
3063 return MIN (ret1
+ ret2
, prec
);
3067 if (tree_fits_uhwi_p (TREE_OPERAND (expr
, 1))
3068 && (tree_to_uhwi (TREE_OPERAND (expr
, 1)) < prec
))
3070 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3071 ret2
= tree_to_uhwi (TREE_OPERAND (expr
, 1));
3076 case TRUNC_DIV_EXPR
:
3078 case FLOOR_DIV_EXPR
:
3079 case ROUND_DIV_EXPR
:
3080 case EXACT_DIV_EXPR
:
3081 if (TREE_CODE (TREE_OPERAND (expr
, 1)) == INTEGER_CST
3082 && tree_int_cst_sgn (TREE_OPERAND (expr
, 1)) == 1)
3084 int l
= tree_log2 (TREE_OPERAND (expr
, 1));
3087 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3095 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3096 if (ret1
&& ret1
== TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr
, 0))))
3098 return MIN (ret1
, prec
);
3100 return tree_ctz (TREE_OPERAND (expr
, 0));
3102 ret1
= tree_ctz (TREE_OPERAND (expr
, 1));
3105 ret2
= tree_ctz (TREE_OPERAND (expr
, 2));
3106 return MIN (ret1
, ret2
);
3108 return tree_ctz (TREE_OPERAND (expr
, 1));
3110 ret1
= get_pointer_alignment (CONST_CAST_TREE (expr
));
3111 if (ret1
> BITS_PER_UNIT
)
3113 ret1
= ctz_hwi (ret1
/ BITS_PER_UNIT
);
3114 return MIN (ret1
, prec
);
3122 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
3123 decimal float constants, so don't return 1 for them.
3124 Also return 1 for location wrappers around such a constant. */
3127 real_zerop (const_tree expr
)
3129 STRIP_ANY_LOCATION_WRAPPER (expr
);
3131 switch (TREE_CODE (expr
))
3134 return real_equal (&TREE_REAL_CST (expr
), &dconst0
)
3135 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
3137 return real_zerop (TREE_REALPART (expr
))
3138 && real_zerop (TREE_IMAGPART (expr
));
3141 /* Don't simply check for a duplicate because the predicate
3142 accepts both +0.0 and -0.0. */
3143 unsigned count
= vector_cst_encoded_nelts (expr
);
3144 for (unsigned int i
= 0; i
< count
; ++i
)
3145 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr
, i
)))
3154 /* Return 1 if EXPR is the real constant one in real or complex form.
3155 Trailing zeroes matter for decimal float constants, so don't return
3157 Also return 1 for location wrappers around such a constant. */
3160 real_onep (const_tree expr
)
3162 STRIP_ANY_LOCATION_WRAPPER (expr
);
3164 switch (TREE_CODE (expr
))
3167 return real_equal (&TREE_REAL_CST (expr
), &dconst1
)
3168 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
3170 return real_onep (TREE_REALPART (expr
))
3171 && real_zerop (TREE_IMAGPART (expr
));
3173 return (VECTOR_CST_NPATTERNS (expr
) == 1
3174 && VECTOR_CST_DUPLICATE_P (expr
)
3175 && real_onep (VECTOR_CST_ENCODED_ELT (expr
, 0)));
3181 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
3182 matter for decimal float constants, so don't return 1 for them.
3183 Also return 1 for location wrappers around such a constant. */
3186 real_minus_onep (const_tree expr
)
3188 STRIP_ANY_LOCATION_WRAPPER (expr
);
3190 switch (TREE_CODE (expr
))
3193 return real_equal (&TREE_REAL_CST (expr
), &dconstm1
)
3194 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
3196 return real_minus_onep (TREE_REALPART (expr
))
3197 && real_zerop (TREE_IMAGPART (expr
));
3199 return (VECTOR_CST_NPATTERNS (expr
) == 1
3200 && VECTOR_CST_DUPLICATE_P (expr
)
3201 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr
, 0)));
3207 /* Nonzero if EXP is a constant or a cast of a constant. */
3210 really_constant_p (const_tree exp
)
3212 /* This is not quite the same as STRIP_NOPS. It does more. */
3213 while (CONVERT_EXPR_P (exp
)
3214 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3215 exp
= TREE_OPERAND (exp
, 0);
3216 return TREE_CONSTANT (exp
);
3219 /* Return true if T holds a polynomial pointer difference, storing it in
3220 *VALUE if so. A true return means that T's precision is no greater
3221 than 64 bits, which is the largest address space we support, so *VALUE
3222 never loses precision. However, the signedness of the result does
3223 not necessarily match the signedness of T: sometimes an unsigned type
3224 like sizetype is used to encode a value that is actually negative. */
3227 ptrdiff_tree_p (const_tree t
, poly_int64_pod
*value
)
3231 if (TREE_CODE (t
) == INTEGER_CST
)
3233 if (!cst_and_fits_in_hwi (t
))
3235 *value
= int_cst_value (t
);
3238 if (POLY_INT_CST_P (t
))
3240 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
3241 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t
, i
)))
3243 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
3244 value
->coeffs
[i
] = int_cst_value (POLY_INT_CST_COEFF (t
, i
));
3251 tree_to_poly_int64 (const_tree t
)
3253 gcc_assert (tree_fits_poly_int64_p (t
));
3254 if (POLY_INT_CST_P (t
))
3255 return poly_int_cst_value (t
).force_shwi ();
3256 return TREE_INT_CST_LOW (t
);
3260 tree_to_poly_uint64 (const_tree t
)
3262 gcc_assert (tree_fits_poly_uint64_p (t
));
3263 if (POLY_INT_CST_P (t
))
3264 return poly_int_cst_value (t
).force_uhwi ();
3265 return TREE_INT_CST_LOW (t
);
3268 /* Return first list element whose TREE_VALUE is ELEM.
3269 Return 0 if ELEM is not in LIST. */
3272 value_member (tree elem
, tree list
)
3276 if (elem
== TREE_VALUE (list
))
3278 list
= TREE_CHAIN (list
);
3283 /* Return first list element whose TREE_PURPOSE is ELEM.
3284 Return 0 if ELEM is not in LIST. */
3287 purpose_member (const_tree elem
, tree list
)
3291 if (elem
== TREE_PURPOSE (list
))
3293 list
= TREE_CHAIN (list
);
3298 /* Return true if ELEM is in V. */
3301 vec_member (const_tree elem
, vec
<tree
, va_gc
> *v
)
3305 FOR_EACH_VEC_SAFE_ELT (v
, ix
, t
)
3311 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3315 chain_index (int idx
, tree chain
)
3317 for (; chain
&& idx
> 0; --idx
)
3318 chain
= TREE_CHAIN (chain
);
3322 /* Return nonzero if ELEM is part of the chain CHAIN. */
3325 chain_member (const_tree elem
, const_tree chain
)
3331 chain
= DECL_CHAIN (chain
);
3337 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3338 We expect a null pointer to mark the end of the chain.
3339 This is the Lisp primitive `length'. */
3342 list_length (const_tree t
)
3345 #ifdef ENABLE_TREE_CHECKING
3353 #ifdef ENABLE_TREE_CHECKING
3356 gcc_assert (p
!= q
);
3364 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3365 UNION_TYPE TYPE, or NULL_TREE if none. */
3368 first_field (const_tree type
)
3370 tree t
= TYPE_FIELDS (type
);
3371 while (t
&& TREE_CODE (t
) != FIELD_DECL
)
3376 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3377 UNION_TYPE TYPE, or NULL_TREE if none. */
3380 last_field (const_tree type
)
3382 tree last
= NULL_TREE
;
3384 for (tree fld
= TYPE_FIELDS (type
); fld
; fld
= TREE_CHAIN (fld
))
3386 if (TREE_CODE (fld
) != FIELD_DECL
)
3395 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3396 by modifying the last node in chain 1 to point to chain 2.
3397 This is the Lisp primitive `nconc'. */
3400 chainon (tree op1
, tree op2
)
3409 for (t1
= op1
; TREE_CHAIN (t1
); t1
= TREE_CHAIN (t1
))
3411 TREE_CHAIN (t1
) = op2
;
3413 #ifdef ENABLE_TREE_CHECKING
3416 for (t2
= op2
; t2
; t2
= TREE_CHAIN (t2
))
3417 gcc_assert (t2
!= t1
);
3424 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3427 tree_last (tree chain
)
3431 while ((next
= TREE_CHAIN (chain
)))
3436 /* Reverse the order of elements in the chain T,
3437 and return the new head of the chain (old last element). */
3442 tree prev
= 0, decl
, next
;
3443 for (decl
= t
; decl
; decl
= next
)
3445 /* We shouldn't be using this function to reverse BLOCK chains; we
3446 have blocks_nreverse for that. */
3447 gcc_checking_assert (TREE_CODE (decl
) != BLOCK
);
3448 next
= TREE_CHAIN (decl
);
3449 TREE_CHAIN (decl
) = prev
;
3455 /* Return a newly created TREE_LIST node whose
3456 purpose and value fields are PARM and VALUE. */
3459 build_tree_list (tree parm
, tree value MEM_STAT_DECL
)
3461 tree t
= make_node (TREE_LIST PASS_MEM_STAT
);
3462 TREE_PURPOSE (t
) = parm
;
3463 TREE_VALUE (t
) = value
;
3467 /* Build a chain of TREE_LIST nodes from a vector. */
3470 build_tree_list_vec (const vec
<tree
, va_gc
> *vec MEM_STAT_DECL
)
3472 tree ret
= NULL_TREE
;
3476 FOR_EACH_VEC_SAFE_ELT (vec
, i
, t
)
3478 *pp
= build_tree_list (NULL
, t PASS_MEM_STAT
);
3479 pp
= &TREE_CHAIN (*pp
);
3484 /* Return a newly created TREE_LIST node whose
3485 purpose and value fields are PURPOSE and VALUE
3486 and whose TREE_CHAIN is CHAIN. */
3489 tree_cons (tree purpose
, tree value
, tree chain MEM_STAT_DECL
)
3493 node
= ggc_alloc_tree_node_stat (sizeof (struct tree_list
) PASS_MEM_STAT
);
3494 memset (node
, 0, sizeof (struct tree_common
));
3496 record_node_allocation_statistics (TREE_LIST
, sizeof (struct tree_list
));
3498 TREE_SET_CODE (node
, TREE_LIST
);
3499 TREE_CHAIN (node
) = chain
;
3500 TREE_PURPOSE (node
) = purpose
;
3501 TREE_VALUE (node
) = value
;
3505 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3509 ctor_to_vec (tree ctor
)
3511 vec
<tree
, va_gc
> *vec
;
3512 vec_alloc (vec
, CONSTRUCTOR_NELTS (ctor
));
3516 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor
), ix
, val
)
3517 vec
->quick_push (val
);
3522 /* Return the size nominally occupied by an object of type TYPE
3523 when it resides in memory. The value is measured in units of bytes,
3524 and its data type is that normally used for type sizes
3525 (which is the first type created by make_signed_type or
3526 make_unsigned_type). */
3529 size_in_bytes_loc (location_t loc
, const_tree type
)
3533 if (type
== error_mark_node
)
3534 return integer_zero_node
;
3536 type
= TYPE_MAIN_VARIANT (type
);
3537 t
= TYPE_SIZE_UNIT (type
);
3541 lang_hooks
.types
.incomplete_type_error (loc
, NULL_TREE
, type
);
3542 return size_zero_node
;
3548 /* Return the size of TYPE (in bytes) as a wide integer
3549 or return -1 if the size can vary or is larger than an integer. */
3552 int_size_in_bytes (const_tree type
)
3556 if (type
== error_mark_node
)
3559 type
= TYPE_MAIN_VARIANT (type
);
3560 t
= TYPE_SIZE_UNIT (type
);
3562 if (t
&& tree_fits_uhwi_p (t
))
3563 return TREE_INT_CST_LOW (t
);
3568 /* Return the maximum size of TYPE (in bytes) as a wide integer
3569 or return -1 if the size can vary or is larger than an integer. */
3572 max_int_size_in_bytes (const_tree type
)
3574 HOST_WIDE_INT size
= -1;
3577 /* If this is an array type, check for a possible MAX_SIZE attached. */
3579 if (TREE_CODE (type
) == ARRAY_TYPE
)
3581 size_tree
= TYPE_ARRAY_MAX_SIZE (type
);
3583 if (size_tree
&& tree_fits_uhwi_p (size_tree
))
3584 size
= tree_to_uhwi (size_tree
);
3587 /* If we still haven't been able to get a size, see if the language
3588 can compute a maximum size. */
3592 size_tree
= lang_hooks
.types
.max_size (type
);
3594 if (size_tree
&& tree_fits_uhwi_p (size_tree
))
3595 size
= tree_to_uhwi (size_tree
);
3601 /* Return the bit position of FIELD, in bits from the start of the record.
3602 This is a tree of type bitsizetype. */
3605 bit_position (const_tree field
)
3607 return bit_from_pos (DECL_FIELD_OFFSET (field
),
3608 DECL_FIELD_BIT_OFFSET (field
));
3611 /* Return the byte position of FIELD, in bytes from the start of the record.
3612 This is a tree of type sizetype. */
3615 byte_position (const_tree field
)
3617 return byte_from_pos (DECL_FIELD_OFFSET (field
),
3618 DECL_FIELD_BIT_OFFSET (field
));
3621 /* Likewise, but return as an integer. It must be representable in
3622 that way (since it could be a signed value, we don't have the
3623 option of returning -1 like int_size_in_byte can. */
3626 int_byte_position (const_tree field
)
3628 return tree_to_shwi (byte_position (field
));
3631 /* Return, as a tree node, the number of elements for TYPE (which is an
3632 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3635 array_type_nelts (const_tree type
)
3637 tree index_type
, min
, max
;
3639 /* If they did it with unspecified bounds, then we should have already
3640 given an error about it before we got here. */
3641 if (! TYPE_DOMAIN (type
))
3642 return error_mark_node
;
3644 index_type
= TYPE_DOMAIN (type
);
3645 min
= TYPE_MIN_VALUE (index_type
);
3646 max
= TYPE_MAX_VALUE (index_type
);
3648 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3651 /* zero sized arrays are represented from C FE as complete types with
3652 NULL TYPE_MAX_VALUE and zero TYPE_SIZE, while C++ FE represents
3653 them as min 0, max -1. */
3654 if (COMPLETE_TYPE_P (type
)
3655 && integer_zerop (TYPE_SIZE (type
))
3656 && integer_zerop (min
))
3657 return build_int_cst (TREE_TYPE (min
), -1);
3659 return error_mark_node
;
3662 return (integer_zerop (min
)
3664 : fold_build2 (MINUS_EXPR
, TREE_TYPE (max
), max
, min
));
3667 /* If arg is static -- a reference to an object in static storage -- then
3668 return the object. This is not the same as the C meaning of `static'.
3669 If arg isn't static, return NULL. */
3674 switch (TREE_CODE (arg
))
3677 /* Nested functions are static, even though taking their address will
3678 involve a trampoline as we unnest the nested function and create
3679 the trampoline on the tree level. */
3683 return ((TREE_STATIC (arg
) || DECL_EXTERNAL (arg
))
3684 && ! DECL_THREAD_LOCAL_P (arg
)
3685 && ! DECL_DLLIMPORT_P (arg
)
3689 return ((TREE_STATIC (arg
) || DECL_EXTERNAL (arg
))
3693 return TREE_STATIC (arg
) ? arg
: NULL
;
3700 /* If the thing being referenced is not a field, then it is
3701 something language specific. */
3702 gcc_assert (TREE_CODE (TREE_OPERAND (arg
, 1)) == FIELD_DECL
);
3704 /* If we are referencing a bitfield, we can't evaluate an
3705 ADDR_EXPR at compile time and so it isn't a constant. */
3706 if (DECL_BIT_FIELD (TREE_OPERAND (arg
, 1)))
3709 return staticp (TREE_OPERAND (arg
, 0));
3715 return TREE_CONSTANT (TREE_OPERAND (arg
, 0)) ? arg
: NULL
;
3718 case ARRAY_RANGE_REF
:
3719 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg
))) == INTEGER_CST
3720 && TREE_CODE (TREE_OPERAND (arg
, 1)) == INTEGER_CST
)
3721 return staticp (TREE_OPERAND (arg
, 0));
3725 case COMPOUND_LITERAL_EXPR
:
3726 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg
)) ? arg
: NULL
;
3736 /* Return whether OP is a DECL whose address is function-invariant. */
3739 decl_address_invariant_p (const_tree op
)
3741 /* The conditions below are slightly less strict than the one in
3744 switch (TREE_CODE (op
))
3753 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3754 || DECL_THREAD_LOCAL_P (op
)
3755 || DECL_CONTEXT (op
) == current_function_decl
3756 || decl_function_context (op
) == current_function_decl
)
3761 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3762 || decl_function_context (op
) == current_function_decl
)
3773 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3776 decl_address_ip_invariant_p (const_tree op
)
3778 /* The conditions below are slightly less strict than the one in
3781 switch (TREE_CODE (op
))
3789 if (((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3790 && !DECL_DLLIMPORT_P (op
))
3791 || DECL_THREAD_LOCAL_P (op
))
3796 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
)))
3808 /* Return true if T is function-invariant (internal function, does
3809 not handle arithmetic; that's handled in skip_simple_arithmetic and
3810 tree_invariant_p). */
3813 tree_invariant_p_1 (tree t
)
3817 if (TREE_CONSTANT (t
)
3818 || (TREE_READONLY (t
) && !TREE_SIDE_EFFECTS (t
)))
3821 switch (TREE_CODE (t
))
3827 op
= TREE_OPERAND (t
, 0);
3828 while (handled_component_p (op
))
3830 switch (TREE_CODE (op
))
3833 case ARRAY_RANGE_REF
:
3834 if (!tree_invariant_p (TREE_OPERAND (op
, 1))
3835 || TREE_OPERAND (op
, 2) != NULL_TREE
3836 || TREE_OPERAND (op
, 3) != NULL_TREE
)
3841 if (TREE_OPERAND (op
, 2) != NULL_TREE
)
3847 op
= TREE_OPERAND (op
, 0);
3850 return CONSTANT_CLASS_P (op
) || decl_address_invariant_p (op
);
3859 /* Return true if T is function-invariant. */
3862 tree_invariant_p (tree t
)
3864 tree inner
= skip_simple_arithmetic (t
);
3865 return tree_invariant_p_1 (inner
);
3868 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3869 Do this to any expression which may be used in more than one place,
3870 but must be evaluated only once.
3872 Normally, expand_expr would reevaluate the expression each time.
3873 Calling save_expr produces something that is evaluated and recorded
3874 the first time expand_expr is called on it. Subsequent calls to
3875 expand_expr just reuse the recorded value.
3877 The call to expand_expr that generates code that actually computes
3878 the value is the first call *at compile time*. Subsequent calls
3879 *at compile time* generate code to use the saved value.
3880 This produces correct result provided that *at run time* control
3881 always flows through the insns made by the first expand_expr
3882 before reaching the other places where the save_expr was evaluated.
3883 You, the caller of save_expr, must make sure this is so.
3885 Constants, and certain read-only nodes, are returned with no
3886 SAVE_EXPR because that is safe. Expressions containing placeholders
3887 are not touched; see tree.def for an explanation of what these
3891 save_expr (tree expr
)
3895 /* If the tree evaluates to a constant, then we don't want to hide that
3896 fact (i.e. this allows further folding, and direct checks for constants).
3897 However, a read-only object that has side effects cannot be bypassed.
3898 Since it is no problem to reevaluate literals, we just return the
3900 inner
= skip_simple_arithmetic (expr
);
3901 if (TREE_CODE (inner
) == ERROR_MARK
)
3904 if (tree_invariant_p_1 (inner
))
3907 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3908 it means that the size or offset of some field of an object depends on
3909 the value within another field.
3911 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3912 and some variable since it would then need to be both evaluated once and
3913 evaluated more than once. Front-ends must assure this case cannot
3914 happen by surrounding any such subexpressions in their own SAVE_EXPR
3915 and forcing evaluation at the proper time. */
3916 if (contains_placeholder_p (inner
))
3919 expr
= build1_loc (EXPR_LOCATION (expr
), SAVE_EXPR
, TREE_TYPE (expr
), expr
);
3921 /* This expression might be placed ahead of a jump to ensure that the
3922 value was computed on both sides of the jump. So make sure it isn't
3923 eliminated as dead. */
3924 TREE_SIDE_EFFECTS (expr
) = 1;
3928 /* Look inside EXPR into any simple arithmetic operations. Return the
3929 outermost non-arithmetic or non-invariant node. */
3932 skip_simple_arithmetic (tree expr
)
3934 /* We don't care about whether this can be used as an lvalue in this
3936 while (TREE_CODE (expr
) == NON_LVALUE_EXPR
)
3937 expr
= TREE_OPERAND (expr
, 0);
3939 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3940 a constant, it will be more efficient to not make another SAVE_EXPR since
3941 it will allow better simplification and GCSE will be able to merge the
3942 computations if they actually occur. */
3945 if (UNARY_CLASS_P (expr
))
3946 expr
= TREE_OPERAND (expr
, 0);
3947 else if (BINARY_CLASS_P (expr
))
3949 if (tree_invariant_p (TREE_OPERAND (expr
, 1)))
3950 expr
= TREE_OPERAND (expr
, 0);
3951 else if (tree_invariant_p (TREE_OPERAND (expr
, 0)))
3952 expr
= TREE_OPERAND (expr
, 1);
3963 /* Look inside EXPR into simple arithmetic operations involving constants.
3964 Return the outermost non-arithmetic or non-constant node. */
3967 skip_simple_constant_arithmetic (tree expr
)
3969 while (TREE_CODE (expr
) == NON_LVALUE_EXPR
)
3970 expr
= TREE_OPERAND (expr
, 0);
3974 if (UNARY_CLASS_P (expr
))
3975 expr
= TREE_OPERAND (expr
, 0);
3976 else if (BINARY_CLASS_P (expr
))
3978 if (TREE_CONSTANT (TREE_OPERAND (expr
, 1)))
3979 expr
= TREE_OPERAND (expr
, 0);
3980 else if (TREE_CONSTANT (TREE_OPERAND (expr
, 0)))
3981 expr
= TREE_OPERAND (expr
, 1);
3992 /* Return which tree structure is used by T. */
3994 enum tree_node_structure_enum
3995 tree_node_structure (const_tree t
)
3997 const enum tree_code code
= TREE_CODE (t
);
3998 return tree_node_structure_for_code (code
);
4001 /* Set various status flags when building a CALL_EXPR object T. */
4004 process_call_operands (tree t
)
4006 bool side_effects
= TREE_SIDE_EFFECTS (t
);
4007 bool read_only
= false;
4008 int i
= call_expr_flags (t
);
4010 /* Calls have side-effects, except those to const or pure functions. */
4011 if ((i
& ECF_LOOPING_CONST_OR_PURE
) || !(i
& (ECF_CONST
| ECF_PURE
)))
4012 side_effects
= true;
4013 /* Propagate TREE_READONLY of arguments for const functions. */
4017 if (!side_effects
|| read_only
)
4018 for (i
= 1; i
< TREE_OPERAND_LENGTH (t
); i
++)
4020 tree op
= TREE_OPERAND (t
, i
);
4021 if (op
&& TREE_SIDE_EFFECTS (op
))
4022 side_effects
= true;
4023 if (op
&& !TREE_READONLY (op
) && !CONSTANT_CLASS_P (op
))
4027 TREE_SIDE_EFFECTS (t
) = side_effects
;
4028 TREE_READONLY (t
) = read_only
;
4031 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
4032 size or offset that depends on a field within a record. */
4035 contains_placeholder_p (const_tree exp
)
4037 enum tree_code code
;
4042 code
= TREE_CODE (exp
);
4043 if (code
== PLACEHOLDER_EXPR
)
4046 switch (TREE_CODE_CLASS (code
))
4049 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
4050 position computations since they will be converted into a
4051 WITH_RECORD_EXPR involving the reference, which will assume
4052 here will be valid. */
4053 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0));
4055 case tcc_exceptional
:
4056 if (code
== TREE_LIST
)
4057 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp
))
4058 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp
)));
4063 case tcc_comparison
:
4064 case tcc_expression
:
4068 /* Ignoring the first operand isn't quite right, but works best. */
4069 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1));
4072 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0))
4073 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1))
4074 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 2)));
4077 /* The save_expr function never wraps anything containing
4078 a PLACEHOLDER_EXPR. */
4085 switch (TREE_CODE_LENGTH (code
))
4088 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0));
4090 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0))
4091 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1)));
4102 const_call_expr_arg_iterator iter
;
4103 FOR_EACH_CONST_CALL_EXPR_ARG (arg
, iter
, exp
)
4104 if (CONTAINS_PLACEHOLDER_P (arg
))
4118 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
4119 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
4123 type_contains_placeholder_1 (const_tree type
)
4125 /* If the size contains a placeholder or the parent type (component type in
4126 the case of arrays) type involves a placeholder, this type does. */
4127 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type
))
4128 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type
))
4129 || (!POINTER_TYPE_P (type
)
4131 && type_contains_placeholder_p (TREE_TYPE (type
))))
4134 /* Now do type-specific checks. Note that the last part of the check above
4135 greatly limits what we have to do below. */
4136 switch (TREE_CODE (type
))
4145 case REFERENCE_TYPE
:
4154 case FIXED_POINT_TYPE
:
4155 /* Here we just check the bounds. */
4156 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type
))
4157 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type
)));
4160 /* We have already checked the component type above, so just check
4161 the domain type. Flexible array members have a null domain. */
4162 return TYPE_DOMAIN (type
) ?
4163 type_contains_placeholder_p (TYPE_DOMAIN (type
)) : false;
4167 case QUAL_UNION_TYPE
:
4171 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
4172 if (TREE_CODE (field
) == FIELD_DECL
4173 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field
))
4174 || (TREE_CODE (type
) == QUAL_UNION_TYPE
4175 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field
)))
4176 || type_contains_placeholder_p (TREE_TYPE (field
))))
4187 /* Wrapper around above function used to cache its result. */
4190 type_contains_placeholder_p (tree type
)
4194 /* If the contains_placeholder_bits field has been initialized,
4195 then we know the answer. */
4196 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) > 0)
4197 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) - 1;
4199 /* Indicate that we've seen this type node, and the answer is false.
4200 This is what we want to return if we run into recursion via fields. */
4201 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) = 1;
4203 /* Compute the real value. */
4204 result
= type_contains_placeholder_1 (type
);
4206 /* Store the real value. */
4207 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) = result
+ 1;
4212 /* Push tree EXP onto vector QUEUE if it is not already present. */
4215 push_without_duplicates (tree exp
, vec
<tree
> *queue
)
4220 FOR_EACH_VEC_ELT (*queue
, i
, iter
)
4221 if (simple_cst_equal (iter
, exp
) == 1)
4225 queue
->safe_push (exp
);
4228 /* Given a tree EXP, find all occurrences of references to fields
4229 in a PLACEHOLDER_EXPR and place them in vector REFS without
4230 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
4231 we assume here that EXP contains only arithmetic expressions
4232 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
4236 find_placeholder_in_expr (tree exp
, vec
<tree
> *refs
)
4238 enum tree_code code
= TREE_CODE (exp
);
4242 /* We handle TREE_LIST and COMPONENT_REF separately. */
4243 if (code
== TREE_LIST
)
4245 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp
), refs
);
4246 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp
), refs
);
4248 else if (code
== COMPONENT_REF
)
4250 for (inner
= TREE_OPERAND (exp
, 0);
4251 REFERENCE_CLASS_P (inner
);
4252 inner
= TREE_OPERAND (inner
, 0))
4255 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
4256 push_without_duplicates (exp
, refs
);
4258 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), refs
);
4261 switch (TREE_CODE_CLASS (code
))
4266 case tcc_declaration
:
4267 /* Variables allocated to static storage can stay. */
4268 if (!TREE_STATIC (exp
))
4269 push_without_duplicates (exp
, refs
);
4272 case tcc_expression
:
4273 /* This is the pattern built in ada/make_aligning_type. */
4274 if (code
== ADDR_EXPR
4275 && TREE_CODE (TREE_OPERAND (exp
, 0)) == PLACEHOLDER_EXPR
)
4277 push_without_duplicates (exp
, refs
);
4283 case tcc_exceptional
:
4286 case tcc_comparison
:
4288 for (i
= 0; i
< TREE_CODE_LENGTH (code
); i
++)
4289 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, i
), refs
);
4293 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
4294 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, i
), refs
);
4302 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4303 return a tree with all occurrences of references to F in a
4304 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4305 CONST_DECLs. Note that we assume here that EXP contains only
4306 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4307 occurring only in their argument list. */
4310 substitute_in_expr (tree exp
, tree f
, tree r
)
4312 enum tree_code code
= TREE_CODE (exp
);
4313 tree op0
, op1
, op2
, op3
;
4316 /* We handle TREE_LIST and COMPONENT_REF separately. */
4317 if (code
== TREE_LIST
)
4319 op0
= SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp
), f
, r
);
4320 op1
= SUBSTITUTE_IN_EXPR (TREE_VALUE (exp
), f
, r
);
4321 if (op0
== TREE_CHAIN (exp
) && op1
== TREE_VALUE (exp
))
4324 return tree_cons (TREE_PURPOSE (exp
), op1
, op0
);
4326 else if (code
== COMPONENT_REF
)
4330 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4331 and it is the right field, replace it with R. */
4332 for (inner
= TREE_OPERAND (exp
, 0);
4333 REFERENCE_CLASS_P (inner
);
4334 inner
= TREE_OPERAND (inner
, 0))
4338 op1
= TREE_OPERAND (exp
, 1);
4340 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
&& op1
== f
)
4343 /* If this expression hasn't been completed let, leave it alone. */
4344 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
&& !TREE_TYPE (inner
))
4347 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4348 if (op0
== TREE_OPERAND (exp
, 0))
4352 = fold_build3 (COMPONENT_REF
, TREE_TYPE (exp
), op0
, op1
, NULL_TREE
);
4355 switch (TREE_CODE_CLASS (code
))
4360 case tcc_declaration
:
4366 case tcc_expression
:
4372 case tcc_exceptional
:
4375 case tcc_comparison
:
4377 switch (TREE_CODE_LENGTH (code
))
4383 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4384 if (op0
== TREE_OPERAND (exp
, 0))
4387 new_tree
= fold_build1 (code
, TREE_TYPE (exp
), op0
);
4391 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4392 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
4394 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1))
4397 new_tree
= fold_build2 (code
, TREE_TYPE (exp
), op0
, op1
);
4401 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4402 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
4403 op2
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 2), f
, r
);
4405 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4406 && op2
== TREE_OPERAND (exp
, 2))
4409 new_tree
= fold_build3 (code
, TREE_TYPE (exp
), op0
, op1
, op2
);
4413 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4414 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
4415 op2
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 2), f
, r
);
4416 op3
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 3), f
, r
);
4418 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4419 && op2
== TREE_OPERAND (exp
, 2)
4420 && op3
== TREE_OPERAND (exp
, 3))
4424 = fold (build4 (code
, TREE_TYPE (exp
), op0
, op1
, op2
, op3
));
4436 new_tree
= NULL_TREE
;
4438 /* If we are trying to replace F with a constant or with another
4439 instance of one of the arguments of the call, inline back
4440 functions which do nothing else than computing a value from
4441 the arguments they are passed. This makes it possible to
4442 fold partially or entirely the replacement expression. */
4443 if (code
== CALL_EXPR
)
4445 bool maybe_inline
= false;
4446 if (CONSTANT_CLASS_P (r
))
4447 maybe_inline
= true;
4449 for (i
= 3; i
< TREE_OPERAND_LENGTH (exp
); i
++)
4450 if (operand_equal_p (TREE_OPERAND (exp
, i
), r
, 0))
4452 maybe_inline
= true;
4457 tree t
= maybe_inline_call_in_expr (exp
);
4459 return SUBSTITUTE_IN_EXPR (t
, f
, r
);
4463 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
4465 tree op
= TREE_OPERAND (exp
, i
);
4466 tree new_op
= SUBSTITUTE_IN_EXPR (op
, f
, r
);
4470 new_tree
= copy_node (exp
);
4471 TREE_OPERAND (new_tree
, i
) = new_op
;
4477 new_tree
= fold (new_tree
);
4478 if (TREE_CODE (new_tree
) == CALL_EXPR
)
4479 process_call_operands (new_tree
);
4490 TREE_READONLY (new_tree
) |= TREE_READONLY (exp
);
4492 if (code
== INDIRECT_REF
|| code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
4493 TREE_THIS_NOTRAP (new_tree
) |= TREE_THIS_NOTRAP (exp
);
4498 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4499 for it within OBJ, a tree that is an object or a chain of references. */
4502 substitute_placeholder_in_expr (tree exp
, tree obj
)
4504 enum tree_code code
= TREE_CODE (exp
);
4505 tree op0
, op1
, op2
, op3
;
4508 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4509 in the chain of OBJ. */
4510 if (code
== PLACEHOLDER_EXPR
)
4512 tree need_type
= TYPE_MAIN_VARIANT (TREE_TYPE (exp
));
4515 for (elt
= obj
; elt
!= 0;
4516 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
4517 || TREE_CODE (elt
) == COND_EXPR
)
4518 ? TREE_OPERAND (elt
, 1)
4519 : (REFERENCE_CLASS_P (elt
)
4520 || UNARY_CLASS_P (elt
)
4521 || BINARY_CLASS_P (elt
)
4522 || VL_EXP_CLASS_P (elt
)
4523 || EXPRESSION_CLASS_P (elt
))
4524 ? TREE_OPERAND (elt
, 0) : 0))
4525 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
4528 for (elt
= obj
; elt
!= 0;
4529 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
4530 || TREE_CODE (elt
) == COND_EXPR
)
4531 ? TREE_OPERAND (elt
, 1)
4532 : (REFERENCE_CLASS_P (elt
)
4533 || UNARY_CLASS_P (elt
)
4534 || BINARY_CLASS_P (elt
)
4535 || VL_EXP_CLASS_P (elt
)
4536 || EXPRESSION_CLASS_P (elt
))
4537 ? TREE_OPERAND (elt
, 0) : 0))
4538 if (POINTER_TYPE_P (TREE_TYPE (elt
))
4539 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
4541 return fold_build1 (INDIRECT_REF
, need_type
, elt
);
4543 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4544 survives until RTL generation, there will be an error. */
4548 /* TREE_LIST is special because we need to look at TREE_VALUE
4549 and TREE_CHAIN, not TREE_OPERANDS. */
4550 else if (code
== TREE_LIST
)
4552 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp
), obj
);
4553 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp
), obj
);
4554 if (op0
== TREE_CHAIN (exp
) && op1
== TREE_VALUE (exp
))
4557 return tree_cons (TREE_PURPOSE (exp
), op1
, op0
);
4560 switch (TREE_CODE_CLASS (code
))
4563 case tcc_declaration
:
4566 case tcc_exceptional
:
4569 case tcc_comparison
:
4570 case tcc_expression
:
4573 switch (TREE_CODE_LENGTH (code
))
4579 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4580 if (op0
== TREE_OPERAND (exp
, 0))
4583 new_tree
= fold_build1 (code
, TREE_TYPE (exp
), op0
);
4587 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4588 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
4590 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1))
4593 new_tree
= fold_build2 (code
, TREE_TYPE (exp
), op0
, op1
);
4597 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4598 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
4599 op2
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 2), obj
);
4601 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4602 && op2
== TREE_OPERAND (exp
, 2))
4605 new_tree
= fold_build3 (code
, TREE_TYPE (exp
), op0
, op1
, op2
);
4609 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4610 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
4611 op2
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 2), obj
);
4612 op3
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 3), obj
);
4614 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4615 && op2
== TREE_OPERAND (exp
, 2)
4616 && op3
== TREE_OPERAND (exp
, 3))
4620 = fold (build4 (code
, TREE_TYPE (exp
), op0
, op1
, op2
, op3
));
4632 new_tree
= NULL_TREE
;
4634 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
4636 tree op
= TREE_OPERAND (exp
, i
);
4637 tree new_op
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (op
, obj
);
4641 new_tree
= copy_node (exp
);
4642 TREE_OPERAND (new_tree
, i
) = new_op
;
4648 new_tree
= fold (new_tree
);
4649 if (TREE_CODE (new_tree
) == CALL_EXPR
)
4650 process_call_operands (new_tree
);
4661 TREE_READONLY (new_tree
) |= TREE_READONLY (exp
);
4663 if (code
== INDIRECT_REF
|| code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
4664 TREE_THIS_NOTRAP (new_tree
) |= TREE_THIS_NOTRAP (exp
);
4670 /* Subroutine of stabilize_reference; this is called for subtrees of
4671 references. Any expression with side-effects must be put in a SAVE_EXPR
4672 to ensure that it is only evaluated once.
4674 We don't put SAVE_EXPR nodes around everything, because assigning very
4675 simple expressions to temporaries causes us to miss good opportunities
4676 for optimizations. Among other things, the opportunity to fold in the
4677 addition of a constant into an addressing mode often gets lost, e.g.
4678 "y[i+1] += x;". In general, we take the approach that we should not make
4679 an assignment unless we are forced into it - i.e., that any non-side effect
4680 operator should be allowed, and that cse should take care of coalescing
4681 multiple utterances of the same expression should that prove fruitful. */
4684 stabilize_reference_1 (tree e
)
4687 enum tree_code code
= TREE_CODE (e
);
4689 /* We cannot ignore const expressions because it might be a reference
4690 to a const array but whose index contains side-effects. But we can
4691 ignore things that are actual constant or that already have been
4692 handled by this function. */
4694 if (tree_invariant_p (e
))
4697 switch (TREE_CODE_CLASS (code
))
4699 case tcc_exceptional
:
4700 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4701 have side-effects. */
4702 if (code
== STATEMENT_LIST
)
4703 return save_expr (e
);
4706 case tcc_declaration
:
4707 case tcc_comparison
:
4709 case tcc_expression
:
4712 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4713 so that it will only be evaluated once. */
4714 /* The reference (r) and comparison (<) classes could be handled as
4715 below, but it is generally faster to only evaluate them once. */
4716 if (TREE_SIDE_EFFECTS (e
))
4717 return save_expr (e
);
4721 /* Constants need no processing. In fact, we should never reach
4726 /* Division is slow and tends to be compiled with jumps,
4727 especially the division by powers of 2 that is often
4728 found inside of an array reference. So do it just once. */
4729 if (code
== TRUNC_DIV_EXPR
|| code
== TRUNC_MOD_EXPR
4730 || code
== FLOOR_DIV_EXPR
|| code
== FLOOR_MOD_EXPR
4731 || code
== CEIL_DIV_EXPR
|| code
== CEIL_MOD_EXPR
4732 || code
== ROUND_DIV_EXPR
|| code
== ROUND_MOD_EXPR
)
4733 return save_expr (e
);
4734 /* Recursively stabilize each operand. */
4735 result
= build_nt (code
, stabilize_reference_1 (TREE_OPERAND (e
, 0)),
4736 stabilize_reference_1 (TREE_OPERAND (e
, 1)));
4740 /* Recursively stabilize each operand. */
4741 result
= build_nt (code
, stabilize_reference_1 (TREE_OPERAND (e
, 0)));
4748 TREE_TYPE (result
) = TREE_TYPE (e
);
4749 TREE_READONLY (result
) = TREE_READONLY (e
);
4750 TREE_SIDE_EFFECTS (result
) = TREE_SIDE_EFFECTS (e
);
4751 TREE_THIS_VOLATILE (result
) = TREE_THIS_VOLATILE (e
);
4756 /* Stabilize a reference so that we can use it any number of times
4757 without causing its operands to be evaluated more than once.
4758 Returns the stabilized reference. This works by means of save_expr,
4759 so see the caveats in the comments about save_expr.
4761 Also allows conversion expressions whose operands are references.
4762 Any other kind of expression is returned unchanged. */
4765 stabilize_reference (tree ref
)
4768 enum tree_code code
= TREE_CODE (ref
);
4775 /* No action is needed in this case. */
4780 case FIX_TRUNC_EXPR
:
4781 result
= build_nt (code
, stabilize_reference (TREE_OPERAND (ref
, 0)));
4785 result
= build_nt (INDIRECT_REF
,
4786 stabilize_reference_1 (TREE_OPERAND (ref
, 0)));
4790 result
= build_nt (COMPONENT_REF
,
4791 stabilize_reference (TREE_OPERAND (ref
, 0)),
4792 TREE_OPERAND (ref
, 1), NULL_TREE
);
4796 result
= build_nt (BIT_FIELD_REF
,
4797 stabilize_reference (TREE_OPERAND (ref
, 0)),
4798 TREE_OPERAND (ref
, 1), TREE_OPERAND (ref
, 2));
4799 REF_REVERSE_STORAGE_ORDER (result
) = REF_REVERSE_STORAGE_ORDER (ref
);
4803 result
= build_nt (ARRAY_REF
,
4804 stabilize_reference (TREE_OPERAND (ref
, 0)),
4805 stabilize_reference_1 (TREE_OPERAND (ref
, 1)),
4806 TREE_OPERAND (ref
, 2), TREE_OPERAND (ref
, 3));
4809 case ARRAY_RANGE_REF
:
4810 result
= build_nt (ARRAY_RANGE_REF
,
4811 stabilize_reference (TREE_OPERAND (ref
, 0)),
4812 stabilize_reference_1 (TREE_OPERAND (ref
, 1)),
4813 TREE_OPERAND (ref
, 2), TREE_OPERAND (ref
, 3));
4817 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4818 it wouldn't be ignored. This matters when dealing with
4820 return stabilize_reference_1 (ref
);
4822 /* If arg isn't a kind of lvalue we recognize, make no change.
4823 Caller should recognize the error for an invalid lvalue. */
4828 return error_mark_node
;
4831 TREE_TYPE (result
) = TREE_TYPE (ref
);
4832 TREE_READONLY (result
) = TREE_READONLY (ref
);
4833 TREE_SIDE_EFFECTS (result
) = TREE_SIDE_EFFECTS (ref
);
4834 TREE_THIS_VOLATILE (result
) = TREE_THIS_VOLATILE (ref
);
4835 protected_set_expr_location (result
, EXPR_LOCATION (ref
));
4840 /* Low-level constructors for expressions. */
4842 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4843 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4846 recompute_tree_invariant_for_addr_expr (tree t
)
4849 bool tc
= true, se
= false;
4851 gcc_assert (TREE_CODE (t
) == ADDR_EXPR
);
4853 /* We started out assuming this address is both invariant and constant, but
4854 does not have side effects. Now go down any handled components and see if
4855 any of them involve offsets that are either non-constant or non-invariant.
4856 Also check for side-effects.
4858 ??? Note that this code makes no attempt to deal with the case where
4859 taking the address of something causes a copy due to misalignment. */
4861 #define UPDATE_FLAGS(NODE) \
4862 do { tree _node = (NODE); \
4863 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4864 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4866 for (node
= TREE_OPERAND (t
, 0); handled_component_p (node
);
4867 node
= TREE_OPERAND (node
, 0))
4869 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4870 array reference (probably made temporarily by the G++ front end),
4871 so ignore all the operands. */
4872 if ((TREE_CODE (node
) == ARRAY_REF
4873 || TREE_CODE (node
) == ARRAY_RANGE_REF
)
4874 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node
, 0))) == ARRAY_TYPE
)
4876 UPDATE_FLAGS (TREE_OPERAND (node
, 1));
4877 if (TREE_OPERAND (node
, 2))
4878 UPDATE_FLAGS (TREE_OPERAND (node
, 2));
4879 if (TREE_OPERAND (node
, 3))
4880 UPDATE_FLAGS (TREE_OPERAND (node
, 3));
4882 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4883 FIELD_DECL, apparently. The G++ front end can put something else
4884 there, at least temporarily. */
4885 else if (TREE_CODE (node
) == COMPONENT_REF
4886 && TREE_CODE (TREE_OPERAND (node
, 1)) == FIELD_DECL
)
4888 if (TREE_OPERAND (node
, 2))
4889 UPDATE_FLAGS (TREE_OPERAND (node
, 2));
4893 node
= lang_hooks
.expr_to_decl (node
, &tc
, &se
);
4895 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4896 the address, since &(*a)->b is a form of addition. If it's a constant, the
4897 address is constant too. If it's a decl, its address is constant if the
4898 decl is static. Everything else is not constant and, furthermore,
4899 taking the address of a volatile variable is not volatile. */
4900 if (TREE_CODE (node
) == INDIRECT_REF
4901 || TREE_CODE (node
) == MEM_REF
)
4902 UPDATE_FLAGS (TREE_OPERAND (node
, 0));
4903 else if (CONSTANT_CLASS_P (node
))
4905 else if (DECL_P (node
))
4906 tc
&= (staticp (node
) != NULL_TREE
);
4910 se
|= TREE_SIDE_EFFECTS (node
);
4914 TREE_CONSTANT (t
) = tc
;
4915 TREE_SIDE_EFFECTS (t
) = se
;
4919 /* Build an expression of code CODE, data type TYPE, and operands as
4920 specified. Expressions and reference nodes can be created this way.
4921 Constants, decls, types and misc nodes cannot be.
4923 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4924 enough for all extant tree codes. */
4927 build0 (enum tree_code code
, tree tt MEM_STAT_DECL
)
4931 gcc_assert (TREE_CODE_LENGTH (code
) == 0);
4933 t
= make_node (code PASS_MEM_STAT
);
4940 build1 (enum tree_code code
, tree type
, tree node MEM_STAT_DECL
)
4942 int length
= sizeof (struct tree_exp
);
4945 record_node_allocation_statistics (code
, length
);
4947 gcc_assert (TREE_CODE_LENGTH (code
) == 1);
4949 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
4951 memset (t
, 0, sizeof (struct tree_common
));
4953 TREE_SET_CODE (t
, code
);
4955 TREE_TYPE (t
) = type
;
4956 SET_EXPR_LOCATION (t
, UNKNOWN_LOCATION
);
4957 TREE_OPERAND (t
, 0) = node
;
4958 if (node
&& !TYPE_P (node
))
4960 TREE_SIDE_EFFECTS (t
) = TREE_SIDE_EFFECTS (node
);
4961 TREE_READONLY (t
) = TREE_READONLY (node
);
4964 if (TREE_CODE_CLASS (code
) == tcc_statement
)
4966 if (code
!= DEBUG_BEGIN_STMT
)
4967 TREE_SIDE_EFFECTS (t
) = 1;
4972 /* All of these have side-effects, no matter what their
4974 TREE_SIDE_EFFECTS (t
) = 1;
4975 TREE_READONLY (t
) = 0;
4979 /* Whether a dereference is readonly has nothing to do with whether
4980 its operand is readonly. */
4981 TREE_READONLY (t
) = 0;
4986 recompute_tree_invariant_for_addr_expr (t
);
4990 if ((TREE_CODE_CLASS (code
) == tcc_unary
|| code
== VIEW_CONVERT_EXPR
)
4991 && node
&& !TYPE_P (node
)
4992 && TREE_CONSTANT (node
))
4993 TREE_CONSTANT (t
) = 1;
4994 if (TREE_CODE_CLASS (code
) == tcc_reference
4995 && node
&& TREE_THIS_VOLATILE (node
))
4996 TREE_THIS_VOLATILE (t
) = 1;
5003 #define PROCESS_ARG(N) \
5005 TREE_OPERAND (t, N) = arg##N; \
5006 if (arg##N &&!TYPE_P (arg##N)) \
5008 if (TREE_SIDE_EFFECTS (arg##N)) \
5010 if (!TREE_READONLY (arg##N) \
5011 && !CONSTANT_CLASS_P (arg##N)) \
5012 (void) (read_only = 0); \
5013 if (!TREE_CONSTANT (arg##N)) \
5014 (void) (constant = 0); \
5019 build2 (enum tree_code code
, tree tt
, tree arg0
, tree arg1 MEM_STAT_DECL
)
5021 bool constant
, read_only
, side_effects
, div_by_zero
;
5024 gcc_assert (TREE_CODE_LENGTH (code
) == 2);
5026 if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
|| code
== MULT_EXPR
)
5027 && arg0
&& arg1
&& tt
&& POINTER_TYPE_P (tt
)
5028 /* When sizetype precision doesn't match that of pointers
5029 we need to be able to build explicit extensions or truncations
5030 of the offset argument. */
5031 && TYPE_PRECISION (sizetype
) == TYPE_PRECISION (tt
))
5032 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
5033 && TREE_CODE (arg1
) == INTEGER_CST
);
5035 if (code
== POINTER_PLUS_EXPR
&& arg0
&& arg1
&& tt
)
5036 gcc_assert (POINTER_TYPE_P (tt
) && POINTER_TYPE_P (TREE_TYPE (arg0
))
5037 && ptrofftype_p (TREE_TYPE (arg1
)));
5039 t
= make_node (code PASS_MEM_STAT
);
5042 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
5043 result based on those same flags for the arguments. But if the
5044 arguments aren't really even `tree' expressions, we shouldn't be trying
5047 /* Expressions without side effects may be constant if their
5048 arguments are as well. */
5049 constant
= (TREE_CODE_CLASS (code
) == tcc_comparison
5050 || TREE_CODE_CLASS (code
) == tcc_binary
);
5052 side_effects
= TREE_SIDE_EFFECTS (t
);
5056 case TRUNC_DIV_EXPR
:
5058 case FLOOR_DIV_EXPR
:
5059 case ROUND_DIV_EXPR
:
5060 case EXACT_DIV_EXPR
:
5062 case FLOOR_MOD_EXPR
:
5063 case ROUND_MOD_EXPR
:
5064 case TRUNC_MOD_EXPR
:
5065 div_by_zero
= integer_zerop (arg1
);
5068 div_by_zero
= false;
5074 TREE_SIDE_EFFECTS (t
) = side_effects
;
5075 if (code
== MEM_REF
)
5077 if (arg0
&& TREE_CODE (arg0
) == ADDR_EXPR
)
5079 tree o
= TREE_OPERAND (arg0
, 0);
5080 TREE_READONLY (t
) = TREE_READONLY (o
);
5081 TREE_THIS_VOLATILE (t
) = TREE_THIS_VOLATILE (o
);
5086 TREE_READONLY (t
) = read_only
;
5087 /* Don't mark X / 0 as constant. */
5088 TREE_CONSTANT (t
) = constant
&& !div_by_zero
;
5089 TREE_THIS_VOLATILE (t
)
5090 = (TREE_CODE_CLASS (code
) == tcc_reference
5091 && arg0
&& TREE_THIS_VOLATILE (arg0
));
5099 build3 (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
5100 tree arg2 MEM_STAT_DECL
)
5102 bool constant
, read_only
, side_effects
;
5105 gcc_assert (TREE_CODE_LENGTH (code
) == 3);
5106 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
5108 t
= make_node (code PASS_MEM_STAT
);
5113 /* As a special exception, if COND_EXPR has NULL branches, we
5114 assume that it is a gimple statement and always consider
5115 it to have side effects. */
5116 if (code
== COND_EXPR
5117 && tt
== void_type_node
5118 && arg1
== NULL_TREE
5119 && arg2
== NULL_TREE
)
5120 side_effects
= true;
5122 side_effects
= TREE_SIDE_EFFECTS (t
);
5128 if (code
== COND_EXPR
)
5129 TREE_READONLY (t
) = read_only
;
5131 TREE_SIDE_EFFECTS (t
) = side_effects
;
5132 TREE_THIS_VOLATILE (t
)
5133 = (TREE_CODE_CLASS (code
) == tcc_reference
5134 && arg0
&& TREE_THIS_VOLATILE (arg0
));
5140 build4 (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
5141 tree arg2
, tree arg3 MEM_STAT_DECL
)
5143 bool constant
, read_only
, side_effects
;
5146 gcc_assert (TREE_CODE_LENGTH (code
) == 4);
5148 t
= make_node (code PASS_MEM_STAT
);
5151 side_effects
= TREE_SIDE_EFFECTS (t
);
5158 TREE_SIDE_EFFECTS (t
) = side_effects
;
5159 TREE_THIS_VOLATILE (t
)
5160 = (TREE_CODE_CLASS (code
) == tcc_reference
5161 && arg0
&& TREE_THIS_VOLATILE (arg0
));
5167 build5 (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
5168 tree arg2
, tree arg3
, tree arg4 MEM_STAT_DECL
)
5170 bool constant
, read_only
, side_effects
;
5173 gcc_assert (TREE_CODE_LENGTH (code
) == 5);
5175 t
= make_node (code PASS_MEM_STAT
);
5178 side_effects
= TREE_SIDE_EFFECTS (t
);
5186 TREE_SIDE_EFFECTS (t
) = side_effects
;
5187 if (code
== TARGET_MEM_REF
)
5189 if (arg0
&& TREE_CODE (arg0
) == ADDR_EXPR
)
5191 tree o
= TREE_OPERAND (arg0
, 0);
5192 TREE_READONLY (t
) = TREE_READONLY (o
);
5193 TREE_THIS_VOLATILE (t
) = TREE_THIS_VOLATILE (o
);
5197 TREE_THIS_VOLATILE (t
)
5198 = (TREE_CODE_CLASS (code
) == tcc_reference
5199 && arg0
&& TREE_THIS_VOLATILE (arg0
));
5204 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
5205 on the pointer PTR. */
5208 build_simple_mem_ref_loc (location_t loc
, tree ptr
)
5210 poly_int64 offset
= 0;
5211 tree ptype
= TREE_TYPE (ptr
);
5213 /* For convenience allow addresses that collapse to a simple base
5215 if (TREE_CODE (ptr
) == ADDR_EXPR
5216 && (handled_component_p (TREE_OPERAND (ptr
, 0))
5217 || TREE_CODE (TREE_OPERAND (ptr
, 0)) == MEM_REF
))
5219 ptr
= get_addr_base_and_unit_offset (TREE_OPERAND (ptr
, 0), &offset
);
5221 if (TREE_CODE (ptr
) == MEM_REF
)
5223 offset
+= mem_ref_offset (ptr
).force_shwi ();
5224 ptr
= TREE_OPERAND (ptr
, 0);
5227 ptr
= build_fold_addr_expr (ptr
);
5228 gcc_assert (is_gimple_reg (ptr
) || is_gimple_min_invariant (ptr
));
5230 tem
= build2 (MEM_REF
, TREE_TYPE (ptype
),
5231 ptr
, build_int_cst (ptype
, offset
));
5232 SET_EXPR_LOCATION (tem
, loc
);
5236 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
5239 mem_ref_offset (const_tree t
)
5241 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t
, 1)),
5245 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
5246 offsetted by OFFSET units. */
5249 build_invariant_address (tree type
, tree base
, poly_int64 offset
)
5251 tree ref
= fold_build2 (MEM_REF
, TREE_TYPE (type
),
5252 build_fold_addr_expr (base
),
5253 build_int_cst (ptr_type_node
, offset
));
5254 tree addr
= build1 (ADDR_EXPR
, type
, ref
);
5255 recompute_tree_invariant_for_addr_expr (addr
);
5259 /* Similar except don't specify the TREE_TYPE
5260 and leave the TREE_SIDE_EFFECTS as 0.
5261 It is permissible for arguments to be null,
5262 or even garbage if their values do not matter. */
5265 build_nt (enum tree_code code
, ...)
5272 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
5276 t
= make_node (code
);
5277 length
= TREE_CODE_LENGTH (code
);
5279 for (i
= 0; i
< length
; i
++)
5280 TREE_OPERAND (t
, i
) = va_arg (p
, tree
);
5286 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5290 build_nt_call_vec (tree fn
, vec
<tree
, va_gc
> *args
)
5295 ret
= build_vl_exp (CALL_EXPR
, vec_safe_length (args
) + 3);
5296 CALL_EXPR_FN (ret
) = fn
;
5297 CALL_EXPR_STATIC_CHAIN (ret
) = NULL_TREE
;
5298 FOR_EACH_VEC_SAFE_ELT (args
, ix
, t
)
5299 CALL_EXPR_ARG (ret
, ix
) = t
;
5303 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5305 We do NOT enter this node in any sort of symbol table.
5307 LOC is the location of the decl.
5309 layout_decl is used to set up the decl's storage layout.
5310 Other slots are initialized to 0 or null pointers. */
5313 build_decl (location_t loc
, enum tree_code code
, tree name
,
5314 tree type MEM_STAT_DECL
)
5318 t
= make_node (code PASS_MEM_STAT
);
5319 DECL_SOURCE_LOCATION (t
) = loc
;
5321 /* if (type == error_mark_node)
5322 type = integer_type_node; */
5323 /* That is not done, deliberately, so that having error_mark_node
5324 as the type can suppress useless errors in the use of this variable. */
5326 DECL_NAME (t
) = name
;
5327 TREE_TYPE (t
) = type
;
5329 if (code
== VAR_DECL
|| code
== PARM_DECL
|| code
== RESULT_DECL
)
5335 /* Create and return a DEBUG_EXPR_DECL node of the given TYPE. */
5338 build_debug_expr_decl (tree type
)
5340 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
5341 DECL_ARTIFICIAL (vexpr
) = 1;
5342 TREE_TYPE (vexpr
) = type
;
5343 SET_DECL_MODE (vexpr
, TYPE_MODE (type
));
5347 /* Builds and returns function declaration with NAME and TYPE. */
5350 build_fn_decl (const char *name
, tree type
)
5352 tree id
= get_identifier (name
);
5353 tree decl
= build_decl (input_location
, FUNCTION_DECL
, id
, type
);
5355 DECL_EXTERNAL (decl
) = 1;
5356 TREE_PUBLIC (decl
) = 1;
5357 DECL_ARTIFICIAL (decl
) = 1;
5358 TREE_NOTHROW (decl
) = 1;
5363 vec
<tree
, va_gc
> *all_translation_units
;
5365 /* Builds a new translation-unit decl with name NAME, queues it in the
5366 global list of translation-unit decls and returns it. */
5369 build_translation_unit_decl (tree name
)
5371 tree tu
= build_decl (UNKNOWN_LOCATION
, TRANSLATION_UNIT_DECL
,
5373 TRANSLATION_UNIT_LANGUAGE (tu
) = lang_hooks
.name
;
5374 vec_safe_push (all_translation_units
, tu
);
5379 /* BLOCK nodes are used to represent the structure of binding contours
5380 and declarations, once those contours have been exited and their contents
5381 compiled. This information is used for outputting debugging info. */
5384 build_block (tree vars
, tree subblocks
, tree supercontext
, tree chain
)
5386 tree block
= make_node (BLOCK
);
5388 BLOCK_VARS (block
) = vars
;
5389 BLOCK_SUBBLOCKS (block
) = subblocks
;
5390 BLOCK_SUPERCONTEXT (block
) = supercontext
;
5391 BLOCK_CHAIN (block
) = chain
;
5396 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5398 LOC is the location to use in tree T. */
5401 protected_set_expr_location (tree t
, location_t loc
)
5403 if (CAN_HAVE_LOCATION_P (t
))
5404 SET_EXPR_LOCATION (t
, loc
);
5405 else if (t
&& TREE_CODE (t
) == STATEMENT_LIST
)
5407 t
= expr_single (t
);
5408 if (t
&& CAN_HAVE_LOCATION_P (t
))
5409 SET_EXPR_LOCATION (t
, loc
);
5413 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
5414 UNKNOWN_LOCATION. */
5417 protected_set_expr_location_if_unset (tree t
, location_t loc
)
5419 t
= expr_single (t
);
5420 if (t
&& !EXPR_HAS_LOCATION (t
))
5421 protected_set_expr_location (t
, loc
);
5424 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5425 of the various TYPE_QUAL values. */
5428 set_type_quals (tree type
, int type_quals
)
5430 TYPE_READONLY (type
) = (type_quals
& TYPE_QUAL_CONST
) != 0;
5431 TYPE_VOLATILE (type
) = (type_quals
& TYPE_QUAL_VOLATILE
) != 0;
5432 TYPE_RESTRICT (type
) = (type_quals
& TYPE_QUAL_RESTRICT
) != 0;
5433 TYPE_ATOMIC (type
) = (type_quals
& TYPE_QUAL_ATOMIC
) != 0;
5434 TYPE_ADDR_SPACE (type
) = DECODE_QUAL_ADDR_SPACE (type_quals
);
5437 /* Returns true iff CAND and BASE have equivalent language-specific
5441 check_lang_type (const_tree cand
, const_tree base
)
5443 if (lang_hooks
.types
.type_hash_eq
== NULL
)
5445 /* type_hash_eq currently only applies to these types. */
5446 if (TREE_CODE (cand
) != FUNCTION_TYPE
5447 && TREE_CODE (cand
) != METHOD_TYPE
)
5449 return lang_hooks
.types
.type_hash_eq (cand
, base
);
5452 /* This function checks to see if TYPE matches the size one of the built-in
5453 atomic types, and returns that core atomic type. */
5456 find_atomic_core_type (const_tree type
)
5458 tree base_atomic_type
;
5460 /* Only handle complete types. */
5461 if (!tree_fits_uhwi_p (TYPE_SIZE (type
)))
5464 switch (tree_to_uhwi (TYPE_SIZE (type
)))
5467 base_atomic_type
= atomicQI_type_node
;
5471 base_atomic_type
= atomicHI_type_node
;
5475 base_atomic_type
= atomicSI_type_node
;
5479 base_atomic_type
= atomicDI_type_node
;
5483 base_atomic_type
= atomicTI_type_node
;
5487 base_atomic_type
= NULL_TREE
;
5490 return base_atomic_type
;
5493 /* Returns true iff unqualified CAND and BASE are equivalent. */
5496 check_base_type (const_tree cand
, const_tree base
)
5498 if (TYPE_NAME (cand
) != TYPE_NAME (base
)
5499 /* Apparently this is needed for Objective-C. */
5500 || TYPE_CONTEXT (cand
) != TYPE_CONTEXT (base
)
5501 || !attribute_list_equal (TYPE_ATTRIBUTES (cand
),
5502 TYPE_ATTRIBUTES (base
)))
5504 /* Check alignment. */
5505 if (TYPE_ALIGN (cand
) == TYPE_ALIGN (base
)
5506 && TYPE_USER_ALIGN (cand
) == TYPE_USER_ALIGN (base
))
5508 /* Atomic types increase minimal alignment. We must to do so as well
5509 or we get duplicated canonical types. See PR88686. */
5510 if ((TYPE_QUALS (cand
) & TYPE_QUAL_ATOMIC
))
5512 /* See if this object can map to a basic atomic type. */
5513 tree atomic_type
= find_atomic_core_type (cand
);
5514 if (atomic_type
&& TYPE_ALIGN (atomic_type
) == TYPE_ALIGN (cand
))
5520 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5523 check_qualified_type (const_tree cand
, const_tree base
, int type_quals
)
5525 return (TYPE_QUALS (cand
) == type_quals
5526 && check_base_type (cand
, base
)
5527 && check_lang_type (cand
, base
));
5530 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
5533 check_aligned_type (const_tree cand
, const_tree base
, unsigned int align
)
5535 return (TYPE_QUALS (cand
) == TYPE_QUALS (base
)
5536 && TYPE_NAME (cand
) == TYPE_NAME (base
)
5537 /* Apparently this is needed for Objective-C. */
5538 && TYPE_CONTEXT (cand
) == TYPE_CONTEXT (base
)
5539 /* Check alignment. */
5540 && TYPE_ALIGN (cand
) == align
5541 /* Check this is a user-aligned type as build_aligned_type
5543 && TYPE_USER_ALIGN (cand
)
5544 && attribute_list_equal (TYPE_ATTRIBUTES (cand
),
5545 TYPE_ATTRIBUTES (base
))
5546 && check_lang_type (cand
, base
));
5549 /* Return a version of the TYPE, qualified as indicated by the
5550 TYPE_QUALS, if one exists. If no qualified version exists yet,
5551 return NULL_TREE. */
5554 get_qualified_type (tree type
, int type_quals
)
5556 if (TYPE_QUALS (type
) == type_quals
)
5559 tree mv
= TYPE_MAIN_VARIANT (type
);
5560 if (check_qualified_type (mv
, type
, type_quals
))
5563 /* Search the chain of variants to see if there is already one there just
5564 like the one we need to have. If so, use that existing one. We must
5565 preserve the TYPE_NAME, since there is code that depends on this. */
5566 for (tree
*tp
= &TYPE_NEXT_VARIANT (mv
); *tp
; tp
= &TYPE_NEXT_VARIANT (*tp
))
5567 if (check_qualified_type (*tp
, type
, type_quals
))
5569 /* Put the found variant at the head of the variant list so
5570 frequently searched variants get found faster. The C++ FE
5571 benefits greatly from this. */
5573 *tp
= TYPE_NEXT_VARIANT (t
);
5574 TYPE_NEXT_VARIANT (t
) = TYPE_NEXT_VARIANT (mv
);
5575 TYPE_NEXT_VARIANT (mv
) = t
;
5582 /* Like get_qualified_type, but creates the type if it does not
5583 exist. This function never returns NULL_TREE. */
5586 build_qualified_type (tree type
, int type_quals MEM_STAT_DECL
)
5590 /* See if we already have the appropriate qualified variant. */
5591 t
= get_qualified_type (type
, type_quals
);
5593 /* If not, build it. */
5596 t
= build_variant_type_copy (type PASS_MEM_STAT
);
5597 set_type_quals (t
, type_quals
);
5599 if (((type_quals
& TYPE_QUAL_ATOMIC
) == TYPE_QUAL_ATOMIC
))
5601 /* See if this object can map to a basic atomic type. */
5602 tree atomic_type
= find_atomic_core_type (type
);
5605 /* Ensure the alignment of this type is compatible with
5606 the required alignment of the atomic type. */
5607 if (TYPE_ALIGN (atomic_type
) > TYPE_ALIGN (t
))
5608 SET_TYPE_ALIGN (t
, TYPE_ALIGN (atomic_type
));
5612 if (TYPE_STRUCTURAL_EQUALITY_P (type
))
5613 /* Propagate structural equality. */
5614 SET_TYPE_STRUCTURAL_EQUALITY (t
);
5615 else if (TYPE_CANONICAL (type
) != type
)
5616 /* Build the underlying canonical type, since it is different
5619 tree c
= build_qualified_type (TYPE_CANONICAL (type
), type_quals
);
5620 TYPE_CANONICAL (t
) = TYPE_CANONICAL (c
);
5623 /* T is its own canonical type. */
5624 TYPE_CANONICAL (t
) = t
;
5631 /* Create a variant of type T with alignment ALIGN. */
5634 build_aligned_type (tree type
, unsigned int align
)
5638 if (TYPE_PACKED (type
)
5639 || TYPE_ALIGN (type
) == align
)
5642 for (t
= TYPE_MAIN_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
5643 if (check_aligned_type (t
, type
, align
))
5646 t
= build_variant_type_copy (type
);
5647 SET_TYPE_ALIGN (t
, align
);
5648 TYPE_USER_ALIGN (t
) = 1;
5653 /* Create a new distinct copy of TYPE. The new type is made its own
5654 MAIN_VARIANT. If TYPE requires structural equality checks, the
5655 resulting type requires structural equality checks; otherwise, its
5656 TYPE_CANONICAL points to itself. */
5659 build_distinct_type_copy (tree type MEM_STAT_DECL
)
5661 tree t
= copy_node (type PASS_MEM_STAT
);
5663 TYPE_POINTER_TO (t
) = 0;
5664 TYPE_REFERENCE_TO (t
) = 0;
5666 /* Set the canonical type either to a new equivalence class, or
5667 propagate the need for structural equality checks. */
5668 if (TYPE_STRUCTURAL_EQUALITY_P (type
))
5669 SET_TYPE_STRUCTURAL_EQUALITY (t
);
5671 TYPE_CANONICAL (t
) = t
;
5673 /* Make it its own variant. */
5674 TYPE_MAIN_VARIANT (t
) = t
;
5675 TYPE_NEXT_VARIANT (t
) = 0;
5677 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
5678 whose TREE_TYPE is not t. This can also happen in the Ada
5679 frontend when using subtypes. */
5684 /* Create a new variant of TYPE, equivalent but distinct. This is so
5685 the caller can modify it. TYPE_CANONICAL for the return type will
5686 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
5687 are considered equal by the language itself (or that both types
5688 require structural equality checks). */
5691 build_variant_type_copy (tree type MEM_STAT_DECL
)
5693 tree t
, m
= TYPE_MAIN_VARIANT (type
);
5695 t
= build_distinct_type_copy (type PASS_MEM_STAT
);
5697 /* Since we're building a variant, assume that it is a non-semantic
5698 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
5699 TYPE_CANONICAL (t
) = TYPE_CANONICAL (type
);
5700 /* Type variants have no alias set defined. */
5701 TYPE_ALIAS_SET (t
) = -1;
5703 /* Add the new type to the chain of variants of TYPE. */
5704 TYPE_NEXT_VARIANT (t
) = TYPE_NEXT_VARIANT (m
);
5705 TYPE_NEXT_VARIANT (m
) = t
;
5706 TYPE_MAIN_VARIANT (t
) = m
;
5711 /* Return true if the from tree in both tree maps are equal. */
5714 tree_map_base_eq (const void *va
, const void *vb
)
5716 const struct tree_map_base
*const a
= (const struct tree_map_base
*) va
,
5717 *const b
= (const struct tree_map_base
*) vb
;
5718 return (a
->from
== b
->from
);
5721 /* Hash a from tree in a tree_base_map. */
5724 tree_map_base_hash (const void *item
)
5726 return htab_hash_pointer (((const struct tree_map_base
*)item
)->from
);
5729 /* Return true if this tree map structure is marked for garbage collection
5730 purposes. We simply return true if the from tree is marked, so that this
5731 structure goes away when the from tree goes away. */
5734 tree_map_base_marked_p (const void *p
)
5736 return ggc_marked_p (((const struct tree_map_base
*) p
)->from
);
5739 /* Hash a from tree in a tree_map. */
5742 tree_map_hash (const void *item
)
5744 return (((const struct tree_map
*) item
)->hash
);
5747 /* Hash a from tree in a tree_decl_map. */
5750 tree_decl_map_hash (const void *item
)
5752 return DECL_UID (((const struct tree_decl_map
*) item
)->base
.from
);
5755 /* Return the initialization priority for DECL. */
5758 decl_init_priority_lookup (tree decl
)
5760 symtab_node
*snode
= symtab_node::get (decl
);
5763 return DEFAULT_INIT_PRIORITY
;
5765 snode
->get_init_priority ();
5768 /* Return the finalization priority for DECL. */
5771 decl_fini_priority_lookup (tree decl
)
5773 cgraph_node
*node
= cgraph_node::get (decl
);
5776 return DEFAULT_INIT_PRIORITY
;
5778 node
->get_fini_priority ();
5781 /* Set the initialization priority for DECL to PRIORITY. */
5784 decl_init_priority_insert (tree decl
, priority_type priority
)
5786 struct symtab_node
*snode
;
5788 if (priority
== DEFAULT_INIT_PRIORITY
)
5790 snode
= symtab_node::get (decl
);
5794 else if (VAR_P (decl
))
5795 snode
= varpool_node::get_create (decl
);
5797 snode
= cgraph_node::get_create (decl
);
5798 snode
->set_init_priority (priority
);
5801 /* Set the finalization priority for DECL to PRIORITY. */
5804 decl_fini_priority_insert (tree decl
, priority_type priority
)
5806 struct cgraph_node
*node
;
5808 if (priority
== DEFAULT_INIT_PRIORITY
)
5810 node
= cgraph_node::get (decl
);
5815 node
= cgraph_node::get_create (decl
);
5816 node
->set_fini_priority (priority
);
5819 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
5822 print_debug_expr_statistics (void)
5824 fprintf (stderr
, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
5825 (long) debug_expr_for_decl
->size (),
5826 (long) debug_expr_for_decl
->elements (),
5827 debug_expr_for_decl
->collisions ());
5830 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
5833 print_value_expr_statistics (void)
5835 fprintf (stderr
, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
5836 (long) value_expr_for_decl
->size (),
5837 (long) value_expr_for_decl
->elements (),
5838 value_expr_for_decl
->collisions ());
5841 /* Lookup a debug expression for FROM, and return it if we find one. */
5844 decl_debug_expr_lookup (tree from
)
5846 struct tree_decl_map
*h
, in
;
5847 in
.base
.from
= from
;
5849 h
= debug_expr_for_decl
->find_with_hash (&in
, DECL_UID (from
));
5855 /* Insert a mapping FROM->TO in the debug expression hashtable. */
5858 decl_debug_expr_insert (tree from
, tree to
)
5860 struct tree_decl_map
*h
;
5862 h
= ggc_alloc
<tree_decl_map
> ();
5863 h
->base
.from
= from
;
5865 *debug_expr_for_decl
->find_slot_with_hash (h
, DECL_UID (from
), INSERT
) = h
;
5868 /* Lookup a value expression for FROM, and return it if we find one. */
5871 decl_value_expr_lookup (tree from
)
5873 struct tree_decl_map
*h
, in
;
5874 in
.base
.from
= from
;
5876 h
= value_expr_for_decl
->find_with_hash (&in
, DECL_UID (from
));
5882 /* Insert a mapping FROM->TO in the value expression hashtable. */
5885 decl_value_expr_insert (tree from
, tree to
)
5887 struct tree_decl_map
*h
;
5889 h
= ggc_alloc
<tree_decl_map
> ();
5890 h
->base
.from
= from
;
5892 *value_expr_for_decl
->find_slot_with_hash (h
, DECL_UID (from
), INSERT
) = h
;
5895 /* Lookup a vector of debug arguments for FROM, and return it if we
5899 decl_debug_args_lookup (tree from
)
5901 struct tree_vec_map
*h
, in
;
5903 if (!DECL_HAS_DEBUG_ARGS_P (from
))
5905 gcc_checking_assert (debug_args_for_decl
!= NULL
);
5906 in
.base
.from
= from
;
5907 h
= debug_args_for_decl
->find_with_hash (&in
, DECL_UID (from
));
5913 /* Insert a mapping FROM->empty vector of debug arguments in the value
5914 expression hashtable. */
5917 decl_debug_args_insert (tree from
)
5919 struct tree_vec_map
*h
;
5922 if (DECL_HAS_DEBUG_ARGS_P (from
))
5923 return decl_debug_args_lookup (from
);
5924 if (debug_args_for_decl
== NULL
)
5925 debug_args_for_decl
= hash_table
<tree_vec_map_cache_hasher
>::create_ggc (64);
5926 h
= ggc_alloc
<tree_vec_map
> ();
5927 h
->base
.from
= from
;
5929 loc
= debug_args_for_decl
->find_slot_with_hash (h
, DECL_UID (from
), INSERT
);
5931 DECL_HAS_DEBUG_ARGS_P (from
) = 1;
5935 /* Hashing of types so that we don't make duplicates.
5936 The entry point is `type_hash_canon'. */
5938 /* Generate the default hash code for TYPE. This is designed for
5939 speed, rather than maximum entropy. */
5942 type_hash_canon_hash (tree type
)
5944 inchash::hash hstate
;
5946 hstate
.add_int (TREE_CODE (type
));
5948 if (TREE_TYPE (type
))
5949 hstate
.add_object (TYPE_HASH (TREE_TYPE (type
)));
5951 for (tree t
= TYPE_ATTRIBUTES (type
); t
; t
= TREE_CHAIN (t
))
5952 /* Just the identifier is adequate to distinguish. */
5953 hstate
.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t
)));
5955 switch (TREE_CODE (type
))
5958 hstate
.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type
)));
5961 for (tree t
= TYPE_ARG_TYPES (type
); t
; t
= TREE_CHAIN (t
))
5962 if (TREE_VALUE (t
) != error_mark_node
)
5963 hstate
.add_object (TYPE_HASH (TREE_VALUE (t
)));
5967 hstate
.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type
)));
5972 if (TYPE_DOMAIN (type
))
5973 hstate
.add_object (TYPE_HASH (TYPE_DOMAIN (type
)));
5974 if (!AGGREGATE_TYPE_P (TREE_TYPE (type
)))
5976 unsigned typeless
= TYPE_TYPELESS_STORAGE (type
);
5977 hstate
.add_object (typeless
);
5984 tree t
= TYPE_MAX_VALUE (type
);
5986 t
= TYPE_MIN_VALUE (type
);
5987 for (int i
= 0; i
< TREE_INT_CST_NUNITS (t
); i
++)
5988 hstate
.add_object (TREE_INT_CST_ELT (t
, i
));
5993 case FIXED_POINT_TYPE
:
5995 unsigned prec
= TYPE_PRECISION (type
);
5996 hstate
.add_object (prec
);
6001 hstate
.add_poly_int (TYPE_VECTOR_SUBPARTS (type
));
6008 return hstate
.end ();
6011 /* These are the Hashtable callback functions. */
6013 /* Returns true iff the types are equivalent. */
6016 type_cache_hasher::equal (type_hash
*a
, type_hash
*b
)
6018 /* First test the things that are the same for all types. */
6019 if (a
->hash
!= b
->hash
6020 || TREE_CODE (a
->type
) != TREE_CODE (b
->type
)
6021 || TREE_TYPE (a
->type
) != TREE_TYPE (b
->type
)
6022 || !attribute_list_equal (TYPE_ATTRIBUTES (a
->type
),
6023 TYPE_ATTRIBUTES (b
->type
))
6024 || (TREE_CODE (a
->type
) != COMPLEX_TYPE
6025 && TYPE_NAME (a
->type
) != TYPE_NAME (b
->type
)))
6028 /* Be careful about comparing arrays before and after the element type
6029 has been completed; don't compare TYPE_ALIGN unless both types are
6031 if (COMPLETE_TYPE_P (a
->type
) && COMPLETE_TYPE_P (b
->type
)
6032 && (TYPE_ALIGN (a
->type
) != TYPE_ALIGN (b
->type
)
6033 || TYPE_MODE (a
->type
) != TYPE_MODE (b
->type
)))
6036 switch (TREE_CODE (a
->type
))
6042 case REFERENCE_TYPE
:
6047 return known_eq (TYPE_VECTOR_SUBPARTS (a
->type
),
6048 TYPE_VECTOR_SUBPARTS (b
->type
));
6051 if (TYPE_VALUES (a
->type
) != TYPE_VALUES (b
->type
)
6052 && !(TYPE_VALUES (a
->type
)
6053 && TREE_CODE (TYPE_VALUES (a
->type
)) == TREE_LIST
6054 && TYPE_VALUES (b
->type
)
6055 && TREE_CODE (TYPE_VALUES (b
->type
)) == TREE_LIST
6056 && type_list_equal (TYPE_VALUES (a
->type
),
6057 TYPE_VALUES (b
->type
))))
6065 if (TYPE_PRECISION (a
->type
) != TYPE_PRECISION (b
->type
))
6067 return ((TYPE_MAX_VALUE (a
->type
) == TYPE_MAX_VALUE (b
->type
)
6068 || tree_int_cst_equal (TYPE_MAX_VALUE (a
->type
),
6069 TYPE_MAX_VALUE (b
->type
)))
6070 && (TYPE_MIN_VALUE (a
->type
) == TYPE_MIN_VALUE (b
->type
)
6071 || tree_int_cst_equal (TYPE_MIN_VALUE (a
->type
),
6072 TYPE_MIN_VALUE (b
->type
))));
6074 case FIXED_POINT_TYPE
:
6075 return TYPE_SATURATING (a
->type
) == TYPE_SATURATING (b
->type
);
6078 return TYPE_OFFSET_BASETYPE (a
->type
) == TYPE_OFFSET_BASETYPE (b
->type
);
6081 if (TYPE_METHOD_BASETYPE (a
->type
) == TYPE_METHOD_BASETYPE (b
->type
)
6082 && (TYPE_ARG_TYPES (a
->type
) == TYPE_ARG_TYPES (b
->type
)
6083 || (TYPE_ARG_TYPES (a
->type
)
6084 && TREE_CODE (TYPE_ARG_TYPES (a
->type
)) == TREE_LIST
6085 && TYPE_ARG_TYPES (b
->type
)
6086 && TREE_CODE (TYPE_ARG_TYPES (b
->type
)) == TREE_LIST
6087 && type_list_equal (TYPE_ARG_TYPES (a
->type
),
6088 TYPE_ARG_TYPES (b
->type
)))))
6092 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6093 where the flag should be inherited from the element type
6094 and can change after ARRAY_TYPEs are created; on non-aggregates
6095 compare it and hash it, scalars will never have that flag set
6096 and we need to differentiate between arrays created by different
6097 front-ends or middle-end created arrays. */
6098 return (TYPE_DOMAIN (a
->type
) == TYPE_DOMAIN (b
->type
)
6099 && (AGGREGATE_TYPE_P (TREE_TYPE (a
->type
))
6100 || (TYPE_TYPELESS_STORAGE (a
->type
)
6101 == TYPE_TYPELESS_STORAGE (b
->type
))));
6105 case QUAL_UNION_TYPE
:
6106 return (TYPE_FIELDS (a
->type
) == TYPE_FIELDS (b
->type
)
6107 || (TYPE_FIELDS (a
->type
)
6108 && TREE_CODE (TYPE_FIELDS (a
->type
)) == TREE_LIST
6109 && TYPE_FIELDS (b
->type
)
6110 && TREE_CODE (TYPE_FIELDS (b
->type
)) == TREE_LIST
6111 && type_list_equal (TYPE_FIELDS (a
->type
),
6112 TYPE_FIELDS (b
->type
))));
6115 if ((TYPE_ARG_TYPES (a
->type
) == TYPE_ARG_TYPES (b
->type
)
6116 && (TYPE_NO_NAMED_ARGS_STDARG_P (a
->type
)
6117 == TYPE_NO_NAMED_ARGS_STDARG_P (b
->type
)))
6118 || (TYPE_ARG_TYPES (a
->type
)
6119 && TREE_CODE (TYPE_ARG_TYPES (a
->type
)) == TREE_LIST
6120 && TYPE_ARG_TYPES (b
->type
)
6121 && TREE_CODE (TYPE_ARG_TYPES (b
->type
)) == TREE_LIST
6122 && type_list_equal (TYPE_ARG_TYPES (a
->type
),
6123 TYPE_ARG_TYPES (b
->type
))))
6131 if (lang_hooks
.types
.type_hash_eq
!= NULL
)
6132 return lang_hooks
.types
.type_hash_eq (a
->type
, b
->type
);
6137 /* Given TYPE, and HASHCODE its hash code, return the canonical
6138 object for an identical type if one already exists.
6139 Otherwise, return TYPE, and record it as the canonical object.
6141 To use this function, first create a type of the sort you want.
6142 Then compute its hash code from the fields of the type that
6143 make it different from other similar types.
6144 Then call this function and use the value. */
6147 type_hash_canon (unsigned int hashcode
, tree type
)
6152 /* The hash table only contains main variants, so ensure that's what we're
6154 gcc_assert (TYPE_MAIN_VARIANT (type
) == type
);
6156 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6157 must call that routine before comparing TYPE_ALIGNs. */
6163 loc
= type_hash_table
->find_slot_with_hash (&in
, hashcode
, INSERT
);
6166 tree t1
= ((type_hash
*) *loc
)->type
;
6167 gcc_assert (TYPE_MAIN_VARIANT (t1
) == t1
6169 if (TYPE_UID (type
) + 1 == next_type_uid
)
6171 /* Free also min/max values and the cache for integer
6172 types. This can't be done in free_node, as LTO frees
6173 those on its own. */
6174 if (TREE_CODE (type
) == INTEGER_TYPE
)
6176 if (TYPE_MIN_VALUE (type
)
6177 && TREE_TYPE (TYPE_MIN_VALUE (type
)) == type
)
6179 /* Zero is always in TYPE_CACHED_VALUES. */
6180 if (! TYPE_UNSIGNED (type
))
6181 int_cst_hash_table
->remove_elt (TYPE_MIN_VALUE (type
));
6182 ggc_free (TYPE_MIN_VALUE (type
));
6184 if (TYPE_MAX_VALUE (type
)
6185 && TREE_TYPE (TYPE_MAX_VALUE (type
)) == type
)
6187 int_cst_hash_table
->remove_elt (TYPE_MAX_VALUE (type
));
6188 ggc_free (TYPE_MAX_VALUE (type
));
6190 if (TYPE_CACHED_VALUES_P (type
))
6191 ggc_free (TYPE_CACHED_VALUES (type
));
6198 struct type_hash
*h
;
6200 h
= ggc_alloc
<type_hash
> ();
6210 print_type_hash_statistics (void)
6212 fprintf (stderr
, "Type hash: size %ld, %ld elements, %f collisions\n",
6213 (long) type_hash_table
->size (),
6214 (long) type_hash_table
->elements (),
6215 type_hash_table
->collisions ());
6218 /* Given two lists of types
6219 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6220 return 1 if the lists contain the same types in the same order.
6221 Also, the TREE_PURPOSEs must match. */
6224 type_list_equal (const_tree l1
, const_tree l2
)
6228 for (t1
= l1
, t2
= l2
; t1
&& t2
; t1
= TREE_CHAIN (t1
), t2
= TREE_CHAIN (t2
))
6229 if (TREE_VALUE (t1
) != TREE_VALUE (t2
)
6230 || (TREE_PURPOSE (t1
) != TREE_PURPOSE (t2
)
6231 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1
), TREE_PURPOSE (t2
))
6232 && (TREE_TYPE (TREE_PURPOSE (t1
))
6233 == TREE_TYPE (TREE_PURPOSE (t2
))))))
6239 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6240 given by TYPE. If the argument list accepts variable arguments,
6241 then this function counts only the ordinary arguments. */
6244 type_num_arguments (const_tree fntype
)
6248 for (tree t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
6249 /* If the function does not take a variable number of arguments,
6250 the last element in the list will have type `void'. */
6251 if (VOID_TYPE_P (TREE_VALUE (t
)))
6259 /* Return the type of the function TYPE's argument ARGNO if known.
6260 For vararg function's where ARGNO refers to one of the variadic
6261 arguments return null. Otherwise, return a void_type_node for
6262 out-of-bounds ARGNO. */
6265 type_argument_type (const_tree fntype
, unsigned argno
)
6267 /* Treat zero the same as an out-of-bounds argument number. */
6269 return void_type_node
;
6271 function_args_iterator iter
;
6275 FOREACH_FUNCTION_ARGS (fntype
, argtype
, iter
)
6277 /* A vararg function's argument list ends in a null. Otherwise,
6278 an ordinary function's argument list ends with void. Return
6279 null if ARGNO refers to a vararg argument, void_type_node if
6280 it's out of bounds, and the formal argument type otherwise. */
6284 if (i
== argno
|| VOID_TYPE_P (argtype
))
6293 /* Nonzero if integer constants T1 and T2
6294 represent the same constant value. */
6297 tree_int_cst_equal (const_tree t1
, const_tree t2
)
6302 if (t1
== 0 || t2
== 0)
6305 STRIP_ANY_LOCATION_WRAPPER (t1
);
6306 STRIP_ANY_LOCATION_WRAPPER (t2
);
6308 if (TREE_CODE (t1
) == INTEGER_CST
6309 && TREE_CODE (t2
) == INTEGER_CST
6310 && wi::to_widest (t1
) == wi::to_widest (t2
))
6316 /* Return true if T is an INTEGER_CST whose numerical value (extended
6317 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6320 tree_fits_shwi_p (const_tree t
)
6322 return (t
!= NULL_TREE
6323 && TREE_CODE (t
) == INTEGER_CST
6324 && wi::fits_shwi_p (wi::to_widest (t
)));
6327 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6328 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
6331 tree_fits_poly_int64_p (const_tree t
)
6335 if (POLY_INT_CST_P (t
))
6337 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; i
++)
6338 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t
, i
))))
6342 return (TREE_CODE (t
) == INTEGER_CST
6343 && wi::fits_shwi_p (wi::to_widest (t
)));
6346 /* Return true if T is an INTEGER_CST whose numerical value (extended
6347 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6350 tree_fits_uhwi_p (const_tree t
)
6352 return (t
!= NULL_TREE
6353 && TREE_CODE (t
) == INTEGER_CST
6354 && wi::fits_uhwi_p (wi::to_widest (t
)));
6357 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6358 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
6361 tree_fits_poly_uint64_p (const_tree t
)
6365 if (POLY_INT_CST_P (t
))
6367 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; i
++)
6368 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t
, i
))))
6372 return (TREE_CODE (t
) == INTEGER_CST
6373 && wi::fits_uhwi_p (wi::to_widest (t
)));
6376 /* T is an INTEGER_CST whose numerical value (extended according to
6377 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6381 tree_to_shwi (const_tree t
)
6383 gcc_assert (tree_fits_shwi_p (t
));
6384 return TREE_INT_CST_LOW (t
);
6387 /* T is an INTEGER_CST whose numerical value (extended according to
6388 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6391 unsigned HOST_WIDE_INT
6392 tree_to_uhwi (const_tree t
)
6394 gcc_assert (tree_fits_uhwi_p (t
));
6395 return TREE_INT_CST_LOW (t
);
6398 /* Return the most significant (sign) bit of T. */
6401 tree_int_cst_sign_bit (const_tree t
)
6403 unsigned bitno
= TYPE_PRECISION (TREE_TYPE (t
)) - 1;
6405 return wi::extract_uhwi (wi::to_wide (t
), bitno
, 1);
6408 /* Return an indication of the sign of the integer constant T.
6409 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6410 Note that -1 will never be returned if T's type is unsigned. */
6413 tree_int_cst_sgn (const_tree t
)
6415 if (wi::to_wide (t
) == 0)
6417 else if (TYPE_UNSIGNED (TREE_TYPE (t
)))
6419 else if (wi::neg_p (wi::to_wide (t
)))
6425 /* Return the minimum number of bits needed to represent VALUE in a
6426 signed or unsigned type, UNSIGNEDP says which. */
6429 tree_int_cst_min_precision (tree value
, signop sgn
)
6431 /* If the value is negative, compute its negative minus 1. The latter
6432 adjustment is because the absolute value of the largest negative value
6433 is one larger than the largest positive value. This is equivalent to
6434 a bit-wise negation, so use that operation instead. */
6436 if (tree_int_cst_sgn (value
) < 0)
6437 value
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (value
), value
);
6439 /* Return the number of bits needed, taking into account the fact
6440 that we need one more bit for a signed than unsigned type.
6441 If value is 0 or -1, the minimum precision is 1 no matter
6442 whether unsignedp is true or false. */
6444 if (integer_zerop (value
))
6447 return tree_floor_log2 (value
) + 1 + (sgn
== SIGNED
? 1 : 0) ;
6450 /* Return truthvalue of whether T1 is the same tree structure as T2.
6451 Return 1 if they are the same.
6452 Return 0 if they are understandably different.
6453 Return -1 if either contains tree structure not understood by
6457 simple_cst_equal (const_tree t1
, const_tree t2
)
6459 enum tree_code code1
, code2
;
6465 if (t1
== 0 || t2
== 0)
6468 /* For location wrappers to be the same, they must be at the same
6469 source location (and wrap the same thing). */
6470 if (location_wrapper_p (t1
) && location_wrapper_p (t2
))
6472 if (EXPR_LOCATION (t1
) != EXPR_LOCATION (t2
))
6474 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6477 code1
= TREE_CODE (t1
);
6478 code2
= TREE_CODE (t2
);
6480 if (CONVERT_EXPR_CODE_P (code1
) || code1
== NON_LVALUE_EXPR
)
6482 if (CONVERT_EXPR_CODE_P (code2
)
6483 || code2
== NON_LVALUE_EXPR
)
6484 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6486 return simple_cst_equal (TREE_OPERAND (t1
, 0), t2
);
6489 else if (CONVERT_EXPR_CODE_P (code2
)
6490 || code2
== NON_LVALUE_EXPR
)
6491 return simple_cst_equal (t1
, TREE_OPERAND (t2
, 0));
6499 return wi::to_widest (t1
) == wi::to_widest (t2
);
6502 return real_identical (&TREE_REAL_CST (t1
), &TREE_REAL_CST (t2
));
6505 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1
), TREE_FIXED_CST (t2
));
6508 return (TREE_STRING_LENGTH (t1
) == TREE_STRING_LENGTH (t2
)
6509 && ! memcmp (TREE_STRING_POINTER (t1
), TREE_STRING_POINTER (t2
),
6510 TREE_STRING_LENGTH (t1
)));
6514 unsigned HOST_WIDE_INT idx
;
6515 vec
<constructor_elt
, va_gc
> *v1
= CONSTRUCTOR_ELTS (t1
);
6516 vec
<constructor_elt
, va_gc
> *v2
= CONSTRUCTOR_ELTS (t2
);
6518 if (vec_safe_length (v1
) != vec_safe_length (v2
))
6521 for (idx
= 0; idx
< vec_safe_length (v1
); ++idx
)
6522 /* ??? Should we handle also fields here? */
6523 if (!simple_cst_equal ((*v1
)[idx
].value
, (*v2
)[idx
].value
))
6529 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6532 cmp
= simple_cst_equal (CALL_EXPR_FN (t1
), CALL_EXPR_FN (t2
));
6535 if (call_expr_nargs (t1
) != call_expr_nargs (t2
))
6538 const_tree arg1
, arg2
;
6539 const_call_expr_arg_iterator iter1
, iter2
;
6540 for (arg1
= first_const_call_expr_arg (t1
, &iter1
),
6541 arg2
= first_const_call_expr_arg (t2
, &iter2
);
6543 arg1
= next_const_call_expr_arg (&iter1
),
6544 arg2
= next_const_call_expr_arg (&iter2
))
6546 cmp
= simple_cst_equal (arg1
, arg2
);
6550 return arg1
== arg2
;
6554 /* Special case: if either target is an unallocated VAR_DECL,
6555 it means that it's going to be unified with whatever the
6556 TARGET_EXPR is really supposed to initialize, so treat it
6557 as being equivalent to anything. */
6558 if ((TREE_CODE (TREE_OPERAND (t1
, 0)) == VAR_DECL
6559 && DECL_NAME (TREE_OPERAND (t1
, 0)) == NULL_TREE
6560 && !DECL_RTL_SET_P (TREE_OPERAND (t1
, 0)))
6561 || (TREE_CODE (TREE_OPERAND (t2
, 0)) == VAR_DECL
6562 && DECL_NAME (TREE_OPERAND (t2
, 0)) == NULL_TREE
6563 && !DECL_RTL_SET_P (TREE_OPERAND (t2
, 0))))
6566 cmp
= simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6571 return simple_cst_equal (TREE_OPERAND (t1
, 1), TREE_OPERAND (t2
, 1));
6573 case WITH_CLEANUP_EXPR
:
6574 cmp
= simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6578 return simple_cst_equal (TREE_OPERAND (t1
, 1), TREE_OPERAND (t1
, 1));
6581 if (TREE_OPERAND (t1
, 1) == TREE_OPERAND (t2
, 1))
6582 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6593 if (POLY_INT_CST_P (t1
))
6594 /* A false return means maybe_ne rather than known_ne. */
6595 return known_eq (poly_widest_int::from (poly_int_cst_value (t1
),
6596 TYPE_SIGN (TREE_TYPE (t1
))),
6597 poly_widest_int::from (poly_int_cst_value (t2
),
6598 TYPE_SIGN (TREE_TYPE (t2
))));
6602 /* This general rule works for most tree codes. All exceptions should be
6603 handled above. If this is a language-specific tree code, we can't
6604 trust what might be in the operand, so say we don't know
6606 if ((int) code1
>= (int) LAST_AND_UNUSED_TREE_CODE
)
6609 switch (TREE_CODE_CLASS (code1
))
6613 case tcc_comparison
:
6614 case tcc_expression
:
6618 for (i
= 0; i
< TREE_CODE_LENGTH (code1
); i
++)
6620 cmp
= simple_cst_equal (TREE_OPERAND (t1
, i
), TREE_OPERAND (t2
, i
));
6632 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
6633 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
6634 than U, respectively. */
6637 compare_tree_int (const_tree t
, unsigned HOST_WIDE_INT u
)
6639 if (tree_int_cst_sgn (t
) < 0)
6641 else if (!tree_fits_uhwi_p (t
))
6643 else if (TREE_INT_CST_LOW (t
) == u
)
6645 else if (TREE_INT_CST_LOW (t
) < u
)
6651 /* Return true if SIZE represents a constant size that is in bounds of
6652 what the middle-end and the backend accepts (covering not more than
6653 half of the address-space).
6654 When PERR is non-null, set *PERR on failure to the description of
6655 why SIZE is not valid. */
6658 valid_constant_size_p (const_tree size
, cst_size_error
*perr
/* = NULL */)
6660 if (POLY_INT_CST_P (size
))
6662 if (TREE_OVERFLOW (size
))
6664 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
6665 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size
, i
)))
6670 cst_size_error error
;
6674 if (TREE_CODE (size
) != INTEGER_CST
)
6676 *perr
= cst_size_not_constant
;
6680 if (TREE_OVERFLOW_P (size
))
6682 *perr
= cst_size_overflow
;
6686 if (tree_int_cst_sgn (size
) < 0)
6688 *perr
= cst_size_negative
;
6691 if (!tree_fits_uhwi_p (size
)
6692 || (wi::to_widest (TYPE_MAX_VALUE (sizetype
))
6693 < wi::to_widest (size
) * 2))
6695 *perr
= cst_size_too_big
;
6702 /* Return the precision of the type, or for a complex or vector type the
6703 precision of the type of its elements. */
6706 element_precision (const_tree type
)
6709 type
= TREE_TYPE (type
);
6710 enum tree_code code
= TREE_CODE (type
);
6711 if (code
== COMPLEX_TYPE
|| code
== VECTOR_TYPE
)
6712 type
= TREE_TYPE (type
);
6714 return TYPE_PRECISION (type
);
6717 /* Return true if CODE represents an associative tree code. Otherwise
6720 associative_tree_code (enum tree_code code
)
6739 /* Return true if CODE represents a commutative tree code. Otherwise
6742 commutative_tree_code (enum tree_code code
)
6748 case MULT_HIGHPART_EXPR
:
6756 case UNORDERED_EXPR
:
6760 case TRUTH_AND_EXPR
:
6761 case TRUTH_XOR_EXPR
:
6763 case WIDEN_MULT_EXPR
:
6764 case VEC_WIDEN_MULT_HI_EXPR
:
6765 case VEC_WIDEN_MULT_LO_EXPR
:
6766 case VEC_WIDEN_MULT_EVEN_EXPR
:
6767 case VEC_WIDEN_MULT_ODD_EXPR
:
6776 /* Return true if CODE represents a ternary tree code for which the
6777 first two operands are commutative. Otherwise return false. */
6779 commutative_ternary_tree_code (enum tree_code code
)
6783 case WIDEN_MULT_PLUS_EXPR
:
6784 case WIDEN_MULT_MINUS_EXPR
:
6794 /* Returns true if CODE can overflow. */
6797 operation_can_overflow (enum tree_code code
)
6805 /* Can overflow in various ways. */
6807 case TRUNC_DIV_EXPR
:
6808 case EXACT_DIV_EXPR
:
6809 case FLOOR_DIV_EXPR
:
6811 /* For INT_MIN / -1. */
6818 /* These operators cannot overflow. */
6823 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
6824 ftrapv doesn't generate trapping insns for CODE. */
6827 operation_no_trapping_overflow (tree type
, enum tree_code code
)
6829 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type
));
6831 /* We don't generate instructions that trap on overflow for complex or vector
6833 if (!INTEGRAL_TYPE_P (type
))
6836 if (!TYPE_OVERFLOW_TRAPS (type
))
6846 /* These operators can overflow, and -ftrapv generates trapping code for
6849 case TRUNC_DIV_EXPR
:
6850 case EXACT_DIV_EXPR
:
6851 case FLOOR_DIV_EXPR
:
6854 /* These operators can overflow, but -ftrapv does not generate trapping
6858 /* These operators cannot overflow. */
6863 /* Constructors for pointer, array and function types.
6864 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
6865 constructed by language-dependent code, not here.) */
6867 /* Construct, lay out and return the type of pointers to TO_TYPE with
6868 mode MODE. If MODE is VOIDmode, a pointer mode for the address
6869 space of TO_TYPE will be picked. If CAN_ALIAS_ALL is TRUE,
6870 indicate this type can reference all of memory. If such a type has
6871 already been constructed, reuse it. */
6874 build_pointer_type_for_mode (tree to_type
, machine_mode mode
,
6878 bool could_alias
= can_alias_all
;
6880 if (to_type
== error_mark_node
)
6881 return error_mark_node
;
6883 if (mode
== VOIDmode
)
6885 addr_space_t as
= TYPE_ADDR_SPACE (to_type
);
6886 mode
= targetm
.addr_space
.pointer_mode (as
);
6889 /* If the pointed-to type has the may_alias attribute set, force
6890 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6891 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type
)))
6892 can_alias_all
= true;
6894 /* In some cases, languages will have things that aren't a POINTER_TYPE
6895 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
6896 In that case, return that type without regard to the rest of our
6899 ??? This is a kludge, but consistent with the way this function has
6900 always operated and there doesn't seem to be a good way to avoid this
6902 if (TYPE_POINTER_TO (to_type
) != 0
6903 && TREE_CODE (TYPE_POINTER_TO (to_type
)) != POINTER_TYPE
)
6904 return TYPE_POINTER_TO (to_type
);
6906 /* First, if we already have a type for pointers to TO_TYPE and it's
6907 the proper mode, use it. */
6908 for (t
= TYPE_POINTER_TO (to_type
); t
; t
= TYPE_NEXT_PTR_TO (t
))
6909 if (TYPE_MODE (t
) == mode
&& TYPE_REF_CAN_ALIAS_ALL (t
) == can_alias_all
)
6912 t
= make_node (POINTER_TYPE
);
6914 TREE_TYPE (t
) = to_type
;
6915 SET_TYPE_MODE (t
, mode
);
6916 TYPE_REF_CAN_ALIAS_ALL (t
) = can_alias_all
;
6917 TYPE_NEXT_PTR_TO (t
) = TYPE_POINTER_TO (to_type
);
6918 TYPE_POINTER_TO (to_type
) = t
;
6920 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6921 if (TYPE_STRUCTURAL_EQUALITY_P (to_type
) || in_lto_p
)
6922 SET_TYPE_STRUCTURAL_EQUALITY (t
);
6923 else if (TYPE_CANONICAL (to_type
) != to_type
|| could_alias
)
6925 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type
),
6928 /* Lay out the type. This function has many callers that are concerned
6929 with expression-construction, and this simplifies them all. */
6935 /* By default build pointers in ptr_mode. */
6938 build_pointer_type (tree to_type
)
6940 return build_pointer_type_for_mode (to_type
, VOIDmode
, false);
6943 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
6946 build_reference_type_for_mode (tree to_type
, machine_mode mode
,
6950 bool could_alias
= can_alias_all
;
6952 if (to_type
== error_mark_node
)
6953 return error_mark_node
;
6955 if (mode
== VOIDmode
)
6957 addr_space_t as
= TYPE_ADDR_SPACE (to_type
);
6958 mode
= targetm
.addr_space
.pointer_mode (as
);
6961 /* If the pointed-to type has the may_alias attribute set, force
6962 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6963 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type
)))
6964 can_alias_all
= true;
6966 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
6967 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
6968 In that case, return that type without regard to the rest of our
6971 ??? This is a kludge, but consistent with the way this function has
6972 always operated and there doesn't seem to be a good way to avoid this
6974 if (TYPE_REFERENCE_TO (to_type
) != 0
6975 && TREE_CODE (TYPE_REFERENCE_TO (to_type
)) != REFERENCE_TYPE
)
6976 return TYPE_REFERENCE_TO (to_type
);
6978 /* First, if we already have a type for pointers to TO_TYPE and it's
6979 the proper mode, use it. */
6980 for (t
= TYPE_REFERENCE_TO (to_type
); t
; t
= TYPE_NEXT_REF_TO (t
))
6981 if (TYPE_MODE (t
) == mode
&& TYPE_REF_CAN_ALIAS_ALL (t
) == can_alias_all
)
6984 t
= make_node (REFERENCE_TYPE
);
6986 TREE_TYPE (t
) = to_type
;
6987 SET_TYPE_MODE (t
, mode
);
6988 TYPE_REF_CAN_ALIAS_ALL (t
) = can_alias_all
;
6989 TYPE_NEXT_REF_TO (t
) = TYPE_REFERENCE_TO (to_type
);
6990 TYPE_REFERENCE_TO (to_type
) = t
;
6992 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6993 if (TYPE_STRUCTURAL_EQUALITY_P (to_type
) || in_lto_p
)
6994 SET_TYPE_STRUCTURAL_EQUALITY (t
);
6995 else if (TYPE_CANONICAL (to_type
) != to_type
|| could_alias
)
6997 = build_reference_type_for_mode (TYPE_CANONICAL (to_type
),
7006 /* Build the node for the type of references-to-TO_TYPE by default
7010 build_reference_type (tree to_type
)
7012 return build_reference_type_for_mode (to_type
, VOIDmode
, false);
7015 #define MAX_INT_CACHED_PREC \
7016 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7017 static GTY(()) tree nonstandard_integer_type_cache
[2 * MAX_INT_CACHED_PREC
+ 2];
7020 clear_nonstandard_integer_type_cache (void)
7022 for (size_t i
= 0 ; i
< 2 * MAX_INT_CACHED_PREC
+ 2 ; i
++)
7024 nonstandard_integer_type_cache
[i
] = NULL
;
7028 /* Builds a signed or unsigned integer type of precision PRECISION.
7029 Used for C bitfields whose precision does not match that of
7030 built-in target types. */
7032 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision
,
7038 unsignedp
= MAX_INT_CACHED_PREC
+ 1;
7040 if (precision
<= MAX_INT_CACHED_PREC
)
7042 itype
= nonstandard_integer_type_cache
[precision
+ unsignedp
];
7047 itype
= make_node (INTEGER_TYPE
);
7048 TYPE_PRECISION (itype
) = precision
;
7051 fixup_unsigned_type (itype
);
7053 fixup_signed_type (itype
);
7055 inchash::hash hstate
;
7056 inchash::add_expr (TYPE_MAX_VALUE (itype
), hstate
);
7057 ret
= type_hash_canon (hstate
.end (), itype
);
7058 if (precision
<= MAX_INT_CACHED_PREC
)
7059 nonstandard_integer_type_cache
[precision
+ unsignedp
] = ret
;
7064 #define MAX_BOOL_CACHED_PREC \
7065 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7066 static GTY(()) tree nonstandard_boolean_type_cache
[MAX_BOOL_CACHED_PREC
+ 1];
7068 /* Builds a boolean type of precision PRECISION.
7069 Used for boolean vectors to choose proper vector element size. */
7071 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision
)
7075 if (precision
<= MAX_BOOL_CACHED_PREC
)
7077 type
= nonstandard_boolean_type_cache
[precision
];
7082 type
= make_node (BOOLEAN_TYPE
);
7083 TYPE_PRECISION (type
) = precision
;
7084 fixup_signed_type (type
);
7086 if (precision
<= MAX_INT_CACHED_PREC
)
7087 nonstandard_boolean_type_cache
[precision
] = type
;
7092 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7093 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7094 is true, reuse such a type that has already been constructed. */
7097 build_range_type_1 (tree type
, tree lowval
, tree highval
, bool shared
)
7099 tree itype
= make_node (INTEGER_TYPE
);
7101 TREE_TYPE (itype
) = type
;
7103 TYPE_MIN_VALUE (itype
) = fold_convert (type
, lowval
);
7104 TYPE_MAX_VALUE (itype
) = highval
? fold_convert (type
, highval
) : NULL
;
7106 TYPE_PRECISION (itype
) = TYPE_PRECISION (type
);
7107 SET_TYPE_MODE (itype
, TYPE_MODE (type
));
7108 TYPE_SIZE (itype
) = TYPE_SIZE (type
);
7109 TYPE_SIZE_UNIT (itype
) = TYPE_SIZE_UNIT (type
);
7110 SET_TYPE_ALIGN (itype
, TYPE_ALIGN (type
));
7111 TYPE_USER_ALIGN (itype
) = TYPE_USER_ALIGN (type
);
7112 SET_TYPE_WARN_IF_NOT_ALIGN (itype
, TYPE_WARN_IF_NOT_ALIGN (type
));
7117 if ((TYPE_MIN_VALUE (itype
)
7118 && TREE_CODE (TYPE_MIN_VALUE (itype
)) != INTEGER_CST
)
7119 || (TYPE_MAX_VALUE (itype
)
7120 && TREE_CODE (TYPE_MAX_VALUE (itype
)) != INTEGER_CST
))
7122 /* Since we cannot reliably merge this type, we need to compare it using
7123 structural equality checks. */
7124 SET_TYPE_STRUCTURAL_EQUALITY (itype
);
7128 hashval_t hash
= type_hash_canon_hash (itype
);
7129 itype
= type_hash_canon (hash
, itype
);
7134 /* Wrapper around build_range_type_1 with SHARED set to true. */
7137 build_range_type (tree type
, tree lowval
, tree highval
)
7139 return build_range_type_1 (type
, lowval
, highval
, true);
7142 /* Wrapper around build_range_type_1 with SHARED set to false. */
7145 build_nonshared_range_type (tree type
, tree lowval
, tree highval
)
7147 return build_range_type_1 (type
, lowval
, highval
, false);
7150 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7151 MAXVAL should be the maximum value in the domain
7152 (one less than the length of the array).
7154 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7155 We don't enforce this limit, that is up to caller (e.g. language front end).
7156 The limit exists because the result is a signed type and we don't handle
7157 sizes that use more than one HOST_WIDE_INT. */
7160 build_index_type (tree maxval
)
7162 return build_range_type (sizetype
, size_zero_node
, maxval
);
7165 /* Return true if the debug information for TYPE, a subtype, should be emitted
7166 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7167 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7168 debug info and doesn't reflect the source code. */
7171 subrange_type_for_debug_p (const_tree type
, tree
*lowval
, tree
*highval
)
7173 tree base_type
= TREE_TYPE (type
), low
, high
;
7175 /* Subrange types have a base type which is an integral type. */
7176 if (!INTEGRAL_TYPE_P (base_type
))
7179 /* Get the real bounds of the subtype. */
7180 if (lang_hooks
.types
.get_subrange_bounds
)
7181 lang_hooks
.types
.get_subrange_bounds (type
, &low
, &high
);
7184 low
= TYPE_MIN_VALUE (type
);
7185 high
= TYPE_MAX_VALUE (type
);
7188 /* If the type and its base type have the same representation and the same
7189 name, then the type is not a subrange but a copy of the base type. */
7190 if ((TREE_CODE (base_type
) == INTEGER_TYPE
7191 || TREE_CODE (base_type
) == BOOLEAN_TYPE
)
7192 && int_size_in_bytes (type
) == int_size_in_bytes (base_type
)
7193 && tree_int_cst_equal (low
, TYPE_MIN_VALUE (base_type
))
7194 && tree_int_cst_equal (high
, TYPE_MAX_VALUE (base_type
))
7195 && TYPE_IDENTIFIER (type
) == TYPE_IDENTIFIER (base_type
))
7205 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7206 and number of elements specified by the range of values of INDEX_TYPE.
7207 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7208 If SHARED is true, reuse such a type that has already been constructed.
7209 If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type. */
7212 build_array_type_1 (tree elt_type
, tree index_type
, bool typeless_storage
,
7213 bool shared
, bool set_canonical
)
7217 if (TREE_CODE (elt_type
) == FUNCTION_TYPE
)
7219 error ("arrays of functions are not meaningful");
7220 elt_type
= integer_type_node
;
7223 t
= make_node (ARRAY_TYPE
);
7224 TREE_TYPE (t
) = elt_type
;
7225 TYPE_DOMAIN (t
) = index_type
;
7226 TYPE_ADDR_SPACE (t
) = TYPE_ADDR_SPACE (elt_type
);
7227 TYPE_TYPELESS_STORAGE (t
) = typeless_storage
;
7232 hashval_t hash
= type_hash_canon_hash (t
);
7233 t
= type_hash_canon (hash
, t
);
7236 if (TYPE_CANONICAL (t
) == t
&& set_canonical
)
7238 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type
)
7239 || (index_type
&& TYPE_STRUCTURAL_EQUALITY_P (index_type
))
7241 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7242 else if (TYPE_CANONICAL (elt_type
) != elt_type
7243 || (index_type
&& TYPE_CANONICAL (index_type
) != index_type
))
7245 = build_array_type_1 (TYPE_CANONICAL (elt_type
),
7247 ? TYPE_CANONICAL (index_type
) : NULL_TREE
,
7248 typeless_storage
, shared
, set_canonical
);
7254 /* Wrapper around build_array_type_1 with SHARED set to true. */
7257 build_array_type (tree elt_type
, tree index_type
, bool typeless_storage
)
7260 build_array_type_1 (elt_type
, index_type
, typeless_storage
, true, true);
7263 /* Wrapper around build_array_type_1 with SHARED set to false. */
7266 build_nonshared_array_type (tree elt_type
, tree index_type
)
7268 return build_array_type_1 (elt_type
, index_type
, false, false, true);
7271 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7275 build_array_type_nelts (tree elt_type
, poly_uint64 nelts
)
7277 return build_array_type (elt_type
, build_index_type (size_int (nelts
- 1)));
7280 /* Recursively examines the array elements of TYPE, until a non-array
7281 element type is found. */
7284 strip_array_types (tree type
)
7286 while (TREE_CODE (type
) == ARRAY_TYPE
)
7287 type
= TREE_TYPE (type
);
7292 /* Computes the canonical argument types from the argument type list
7295 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7296 on entry to this function, or if any of the ARGTYPES are
7299 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7300 true on entry to this function, or if any of the ARGTYPES are
7303 Returns a canonical argument list, which may be ARGTYPES when the
7304 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7305 true) or would not differ from ARGTYPES. */
7308 maybe_canonicalize_argtypes (tree argtypes
,
7309 bool *any_structural_p
,
7310 bool *any_noncanonical_p
)
7313 bool any_noncanonical_argtypes_p
= false;
7315 for (arg
= argtypes
; arg
&& !(*any_structural_p
); arg
= TREE_CHAIN (arg
))
7317 if (!TREE_VALUE (arg
) || TREE_VALUE (arg
) == error_mark_node
)
7318 /* Fail gracefully by stating that the type is structural. */
7319 *any_structural_p
= true;
7320 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg
)))
7321 *any_structural_p
= true;
7322 else if (TYPE_CANONICAL (TREE_VALUE (arg
)) != TREE_VALUE (arg
)
7323 || TREE_PURPOSE (arg
))
7324 /* If the argument has a default argument, we consider it
7325 non-canonical even though the type itself is canonical.
7326 That way, different variants of function and method types
7327 with default arguments will all point to the variant with
7328 no defaults as their canonical type. */
7329 any_noncanonical_argtypes_p
= true;
7332 if (*any_structural_p
)
7335 if (any_noncanonical_argtypes_p
)
7337 /* Build the canonical list of argument types. */
7338 tree canon_argtypes
= NULL_TREE
;
7339 bool is_void
= false;
7341 for (arg
= argtypes
; arg
; arg
= TREE_CHAIN (arg
))
7343 if (arg
== void_list_node
)
7346 canon_argtypes
= tree_cons (NULL_TREE
,
7347 TYPE_CANONICAL (TREE_VALUE (arg
)),
7351 canon_argtypes
= nreverse (canon_argtypes
);
7353 canon_argtypes
= chainon (canon_argtypes
, void_list_node
);
7355 /* There is a non-canonical type. */
7356 *any_noncanonical_p
= true;
7357 return canon_argtypes
;
7360 /* The canonical argument types are the same as ARGTYPES. */
7364 /* Construct, lay out and return
7365 the type of functions returning type VALUE_TYPE
7366 given arguments of types ARG_TYPES.
7367 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7368 are data type nodes for the arguments of the function.
7369 NO_NAMED_ARGS_STDARG_P is true if this is a prototyped
7370 variable-arguments function with (...) prototype (no named arguments).
7371 If such a type has already been constructed, reuse it. */
7374 build_function_type (tree value_type
, tree arg_types
,
7375 bool no_named_args_stdarg_p
)
7378 inchash::hash hstate
;
7379 bool any_structural_p
, any_noncanonical_p
;
7380 tree canon_argtypes
;
7382 gcc_assert (arg_types
!= error_mark_node
);
7384 if (TREE_CODE (value_type
) == FUNCTION_TYPE
)
7386 error ("function return type cannot be function");
7387 value_type
= integer_type_node
;
7390 /* Make a node of the sort we want. */
7391 t
= make_node (FUNCTION_TYPE
);
7392 TREE_TYPE (t
) = value_type
;
7393 TYPE_ARG_TYPES (t
) = arg_types
;
7394 if (no_named_args_stdarg_p
)
7396 gcc_assert (arg_types
== NULL_TREE
);
7397 TYPE_NO_NAMED_ARGS_STDARG_P (t
) = 1;
7400 /* If we already have such a type, use the old one. */
7401 hashval_t hash
= type_hash_canon_hash (t
);
7402 t
= type_hash_canon (hash
, t
);
7404 /* Set up the canonical type. */
7405 any_structural_p
= TYPE_STRUCTURAL_EQUALITY_P (value_type
);
7406 any_noncanonical_p
= TYPE_CANONICAL (value_type
) != value_type
;
7407 canon_argtypes
= maybe_canonicalize_argtypes (arg_types
,
7409 &any_noncanonical_p
);
7410 if (any_structural_p
)
7411 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7412 else if (any_noncanonical_p
)
7413 TYPE_CANONICAL (t
) = build_function_type (TYPE_CANONICAL (value_type
),
7416 if (!COMPLETE_TYPE_P (t
))
7421 /* Build a function type. The RETURN_TYPE is the type returned by the
7422 function. If VAARGS is set, no void_type_node is appended to the
7423 list. ARGP must be always be terminated be a NULL_TREE. */
7426 build_function_type_list_1 (bool vaargs
, tree return_type
, va_list argp
)
7430 t
= va_arg (argp
, tree
);
7431 for (args
= NULL_TREE
; t
!= NULL_TREE
; t
= va_arg (argp
, tree
))
7432 args
= tree_cons (NULL_TREE
, t
, args
);
7437 if (args
!= NULL_TREE
)
7438 args
= nreverse (args
);
7439 gcc_assert (last
!= void_list_node
);
7441 else if (args
== NULL_TREE
)
7442 args
= void_list_node
;
7446 args
= nreverse (args
);
7447 TREE_CHAIN (last
) = void_list_node
;
7449 args
= build_function_type (return_type
, args
, vaargs
&& args
== NULL_TREE
);
7454 /* Build a function type. The RETURN_TYPE is the type returned by the
7455 function. If additional arguments are provided, they are
7456 additional argument types. The list of argument types must always
7457 be terminated by NULL_TREE. */
7460 build_function_type_list (tree return_type
, ...)
7465 va_start (p
, return_type
);
7466 args
= build_function_type_list_1 (false, return_type
, p
);
7471 /* Build a variable argument function type. The RETURN_TYPE is the
7472 type returned by the function. If additional arguments are provided,
7473 they are additional argument types. The list of argument types must
7474 always be terminated by NULL_TREE. */
7477 build_varargs_function_type_list (tree return_type
, ...)
7482 va_start (p
, return_type
);
7483 args
= build_function_type_list_1 (true, return_type
, p
);
7489 /* Build a function type. RETURN_TYPE is the type returned by the
7490 function; VAARGS indicates whether the function takes varargs. The
7491 function takes N named arguments, the types of which are provided in
7495 build_function_type_array_1 (bool vaargs
, tree return_type
, int n
,
7499 tree t
= vaargs
? NULL_TREE
: void_list_node
;
7501 for (i
= n
- 1; i
>= 0; i
--)
7502 t
= tree_cons (NULL_TREE
, arg_types
[i
], t
);
7504 return build_function_type (return_type
, t
, vaargs
&& n
== 0);
7507 /* Build a function type. RETURN_TYPE is the type returned by the
7508 function. The function takes N named arguments, the types of which
7509 are provided in ARG_TYPES. */
7512 build_function_type_array (tree return_type
, int n
, tree
*arg_types
)
7514 return build_function_type_array_1 (false, return_type
, n
, arg_types
);
7517 /* Build a variable argument function type. RETURN_TYPE is the type
7518 returned by the function. The function takes N named arguments, the
7519 types of which are provided in ARG_TYPES. */
7522 build_varargs_function_type_array (tree return_type
, int n
, tree
*arg_types
)
7524 return build_function_type_array_1 (true, return_type
, n
, arg_types
);
7527 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
7528 and ARGTYPES (a TREE_LIST) are the return type and arguments types
7529 for the method. An implicit additional parameter (of type
7530 pointer-to-BASETYPE) is added to the ARGTYPES. */
7533 build_method_type_directly (tree basetype
,
7539 bool any_structural_p
, any_noncanonical_p
;
7540 tree canon_argtypes
;
7542 /* Make a node of the sort we want. */
7543 t
= make_node (METHOD_TYPE
);
7545 TYPE_METHOD_BASETYPE (t
) = TYPE_MAIN_VARIANT (basetype
);
7546 TREE_TYPE (t
) = rettype
;
7547 ptype
= build_pointer_type (basetype
);
7549 /* The actual arglist for this function includes a "hidden" argument
7550 which is "this". Put it into the list of argument types. */
7551 argtypes
= tree_cons (NULL_TREE
, ptype
, argtypes
);
7552 TYPE_ARG_TYPES (t
) = argtypes
;
7554 /* If we already have such a type, use the old one. */
7555 hashval_t hash
= type_hash_canon_hash (t
);
7556 t
= type_hash_canon (hash
, t
);
7558 /* Set up the canonical type. */
7560 = (TYPE_STRUCTURAL_EQUALITY_P (basetype
)
7561 || TYPE_STRUCTURAL_EQUALITY_P (rettype
));
7563 = (TYPE_CANONICAL (basetype
) != basetype
7564 || TYPE_CANONICAL (rettype
) != rettype
);
7565 canon_argtypes
= maybe_canonicalize_argtypes (TREE_CHAIN (argtypes
),
7567 &any_noncanonical_p
);
7568 if (any_structural_p
)
7569 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7570 else if (any_noncanonical_p
)
7572 = build_method_type_directly (TYPE_CANONICAL (basetype
),
7573 TYPE_CANONICAL (rettype
),
7575 if (!COMPLETE_TYPE_P (t
))
7581 /* Construct, lay out and return the type of methods belonging to class
7582 BASETYPE and whose arguments and values are described by TYPE.
7583 If that type exists already, reuse it.
7584 TYPE must be a FUNCTION_TYPE node. */
7587 build_method_type (tree basetype
, tree type
)
7589 gcc_assert (TREE_CODE (type
) == FUNCTION_TYPE
);
7591 return build_method_type_directly (basetype
,
7593 TYPE_ARG_TYPES (type
));
7596 /* Construct, lay out and return the type of offsets to a value
7597 of type TYPE, within an object of type BASETYPE.
7598 If a suitable offset type exists already, reuse it. */
7601 build_offset_type (tree basetype
, tree type
)
7605 /* Make a node of the sort we want. */
7606 t
= make_node (OFFSET_TYPE
);
7608 TYPE_OFFSET_BASETYPE (t
) = TYPE_MAIN_VARIANT (basetype
);
7609 TREE_TYPE (t
) = type
;
7611 /* If we already have such a type, use the old one. */
7612 hashval_t hash
= type_hash_canon_hash (t
);
7613 t
= type_hash_canon (hash
, t
);
7615 if (!COMPLETE_TYPE_P (t
))
7618 if (TYPE_CANONICAL (t
) == t
)
7620 if (TYPE_STRUCTURAL_EQUALITY_P (basetype
)
7621 || TYPE_STRUCTURAL_EQUALITY_P (type
))
7622 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7623 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype
)) != basetype
7624 || TYPE_CANONICAL (type
) != type
)
7626 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype
)),
7627 TYPE_CANONICAL (type
));
7633 /* Create a complex type whose components are COMPONENT_TYPE.
7635 If NAMED is true, the type is given a TYPE_NAME. We do not always
7636 do so because this creates a DECL node and thus make the DECL_UIDs
7637 dependent on the type canonicalization hashtable, which is GC-ed,
7638 so the DECL_UIDs would not be stable wrt garbage collection. */
7641 build_complex_type (tree component_type
, bool named
)
7643 gcc_assert (INTEGRAL_TYPE_P (component_type
)
7644 || SCALAR_FLOAT_TYPE_P (component_type
)
7645 || FIXED_POINT_TYPE_P (component_type
));
7647 /* Make a node of the sort we want. */
7648 tree probe
= make_node (COMPLEX_TYPE
);
7650 TREE_TYPE (probe
) = TYPE_MAIN_VARIANT (component_type
);
7652 /* If we already have such a type, use the old one. */
7653 hashval_t hash
= type_hash_canon_hash (probe
);
7654 tree t
= type_hash_canon (hash
, probe
);
7658 /* We created a new type. The hash insertion will have laid
7659 out the type. We need to check the canonicalization and
7660 maybe set the name. */
7661 gcc_checking_assert (COMPLETE_TYPE_P (t
)
7663 && TYPE_CANONICAL (t
) == t
);
7665 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t
)))
7666 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7667 else if (TYPE_CANONICAL (TREE_TYPE (t
)) != TREE_TYPE (t
))
7669 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t
)), named
);
7671 /* We need to create a name, since complex is a fundamental type. */
7674 const char *name
= NULL
;
7676 if (TREE_TYPE (t
) == char_type_node
)
7677 name
= "complex char";
7678 else if (TREE_TYPE (t
) == signed_char_type_node
)
7679 name
= "complex signed char";
7680 else if (TREE_TYPE (t
) == unsigned_char_type_node
)
7681 name
= "complex unsigned char";
7682 else if (TREE_TYPE (t
) == short_integer_type_node
)
7683 name
= "complex short int";
7684 else if (TREE_TYPE (t
) == short_unsigned_type_node
)
7685 name
= "complex short unsigned int";
7686 else if (TREE_TYPE (t
) == integer_type_node
)
7687 name
= "complex int";
7688 else if (TREE_TYPE (t
) == unsigned_type_node
)
7689 name
= "complex unsigned int";
7690 else if (TREE_TYPE (t
) == long_integer_type_node
)
7691 name
= "complex long int";
7692 else if (TREE_TYPE (t
) == long_unsigned_type_node
)
7693 name
= "complex long unsigned int";
7694 else if (TREE_TYPE (t
) == long_long_integer_type_node
)
7695 name
= "complex long long int";
7696 else if (TREE_TYPE (t
) == long_long_unsigned_type_node
)
7697 name
= "complex long long unsigned int";
7700 TYPE_NAME (t
) = build_decl (UNKNOWN_LOCATION
, TYPE_DECL
,
7701 get_identifier (name
), t
);
7705 return build_qualified_type (t
, TYPE_QUALS (component_type
));
7708 /* If TYPE is a real or complex floating-point type and the target
7709 does not directly support arithmetic on TYPE then return the wider
7710 type to be used for arithmetic on TYPE. Otherwise, return
7714 excess_precision_type (tree type
)
7716 /* The target can give two different responses to the question of
7717 which excess precision mode it would like depending on whether we
7718 are in -fexcess-precision=standard or -fexcess-precision=fast. */
7720 enum excess_precision_type requested_type
7721 = (flag_excess_precision
== EXCESS_PRECISION_FAST
7722 ? EXCESS_PRECISION_TYPE_FAST
7723 : (flag_excess_precision
== EXCESS_PRECISION_FLOAT16
7724 ? EXCESS_PRECISION_TYPE_FLOAT16
: EXCESS_PRECISION_TYPE_STANDARD
));
7726 enum flt_eval_method target_flt_eval_method
7727 = targetm
.c
.excess_precision (requested_type
);
7729 /* The target should not ask for unpredictable float evaluation (though
7730 it might advertise that implicitly the evaluation is unpredictable,
7731 but we don't care about that here, it will have been reported
7732 elsewhere). If it does ask for unpredictable evaluation, we have
7733 nothing to do here. */
7734 gcc_assert (target_flt_eval_method
!= FLT_EVAL_METHOD_UNPREDICTABLE
);
7736 /* Nothing to do. The target has asked for all types we know about
7737 to be computed with their native precision and range. */
7738 if (target_flt_eval_method
== FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16
)
7741 /* The target will promote this type in a target-dependent way, so excess
7742 precision ought to leave it alone. */
7743 if (targetm
.promoted_type (type
) != NULL_TREE
)
7746 machine_mode float16_type_mode
= (float16_type_node
7747 ? TYPE_MODE (float16_type_node
)
7749 machine_mode bfloat16_type_mode
= (bfloat16_type_node
7750 ? TYPE_MODE (bfloat16_type_node
)
7752 machine_mode float_type_mode
= TYPE_MODE (float_type_node
);
7753 machine_mode double_type_mode
= TYPE_MODE (double_type_node
);
7755 switch (TREE_CODE (type
))
7759 machine_mode type_mode
= TYPE_MODE (type
);
7760 switch (target_flt_eval_method
)
7762 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT
:
7763 if (type_mode
== float16_type_mode
7764 || type_mode
== bfloat16_type_mode
)
7765 return float_type_node
;
7767 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE
:
7768 if (type_mode
== float16_type_mode
7769 || type_mode
== bfloat16_type_mode
7770 || type_mode
== float_type_mode
)
7771 return double_type_node
;
7773 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE
:
7774 if (type_mode
== float16_type_mode
7775 || type_mode
== bfloat16_type_mode
7776 || type_mode
== float_type_mode
7777 || type_mode
== double_type_mode
)
7778 return long_double_type_node
;
7787 if (TREE_CODE (TREE_TYPE (type
)) != REAL_TYPE
)
7789 machine_mode type_mode
= TYPE_MODE (TREE_TYPE (type
));
7790 switch (target_flt_eval_method
)
7792 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT
:
7793 if (type_mode
== float16_type_mode
7794 || type_mode
== bfloat16_type_mode
)
7795 return complex_float_type_node
;
7797 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE
:
7798 if (type_mode
== float16_type_mode
7799 || type_mode
== bfloat16_type_mode
7800 || type_mode
== float_type_mode
)
7801 return complex_double_type_node
;
7803 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE
:
7804 if (type_mode
== float16_type_mode
7805 || type_mode
== bfloat16_type_mode
7806 || type_mode
== float_type_mode
7807 || type_mode
== double_type_mode
)
7808 return complex_long_double_type_node
;
7822 /* Return OP, stripped of any conversions to wider types as much as is safe.
7823 Converting the value back to OP's type makes a value equivalent to OP.
7825 If FOR_TYPE is nonzero, we return a value which, if converted to
7826 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
7828 OP must have integer, real or enumeral type. Pointers are not allowed!
7830 There are some cases where the obvious value we could return
7831 would regenerate to OP if converted to OP's type,
7832 but would not extend like OP to wider types.
7833 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
7834 For example, if OP is (unsigned short)(signed char)-1,
7835 we avoid returning (signed char)-1 if FOR_TYPE is int,
7836 even though extending that to an unsigned short would regenerate OP,
7837 since the result of extending (signed char)-1 to (int)
7838 is different from (int) OP. */
7841 get_unwidened (tree op
, tree for_type
)
7843 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
7844 tree type
= TREE_TYPE (op
);
7846 = TYPE_PRECISION (for_type
!= 0 ? for_type
: type
);
7848 = (for_type
!= 0 && for_type
!= type
7849 && final_prec
> TYPE_PRECISION (type
)
7850 && TYPE_UNSIGNED (type
));
7853 while (CONVERT_EXPR_P (op
))
7857 /* TYPE_PRECISION on vector types has different meaning
7858 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
7859 so avoid them here. */
7860 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op
, 0))) == VECTOR_TYPE
)
7863 bitschange
= TYPE_PRECISION (TREE_TYPE (op
))
7864 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op
, 0)));
7866 /* Truncations are many-one so cannot be removed.
7867 Unless we are later going to truncate down even farther. */
7869 && final_prec
> TYPE_PRECISION (TREE_TYPE (op
)))
7872 /* See what's inside this conversion. If we decide to strip it,
7874 op
= TREE_OPERAND (op
, 0);
7876 /* If we have not stripped any zero-extensions (uns is 0),
7877 we can strip any kind of extension.
7878 If we have previously stripped a zero-extension,
7879 only zero-extensions can safely be stripped.
7880 Any extension can be stripped if the bits it would produce
7881 are all going to be discarded later by truncating to FOR_TYPE. */
7885 if (! uns
|| final_prec
<= TYPE_PRECISION (TREE_TYPE (op
)))
7887 /* TYPE_UNSIGNED says whether this is a zero-extension.
7888 Let's avoid computing it if it does not affect WIN
7889 and if UNS will not be needed again. */
7891 || CONVERT_EXPR_P (op
))
7892 && TYPE_UNSIGNED (TREE_TYPE (op
)))
7900 /* If we finally reach a constant see if it fits in sth smaller and
7901 in that case convert it. */
7902 if (TREE_CODE (win
) == INTEGER_CST
)
7904 tree wtype
= TREE_TYPE (win
);
7905 unsigned prec
= wi::min_precision (wi::to_wide (win
), TYPE_SIGN (wtype
));
7907 prec
= MAX (prec
, final_prec
);
7908 if (prec
< TYPE_PRECISION (wtype
))
7910 tree t
= lang_hooks
.types
.type_for_size (prec
, TYPE_UNSIGNED (wtype
));
7911 if (t
&& TYPE_PRECISION (t
) < TYPE_PRECISION (wtype
))
7912 win
= fold_convert (t
, win
);
7919 /* Return OP or a simpler expression for a narrower value
7920 which can be sign-extended or zero-extended to give back OP.
7921 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
7922 or 0 if the value should be sign-extended. */
7925 get_narrower (tree op
, int *unsignedp_ptr
)
7930 bool integral_p
= INTEGRAL_TYPE_P (TREE_TYPE (op
));
7932 if (TREE_CODE (op
) == COMPOUND_EXPR
)
7935 op
= TREE_OPERAND (op
, 1);
7936 while (TREE_CODE (op
) == COMPOUND_EXPR
);
7937 tree ret
= get_narrower (op
, unsignedp_ptr
);
7940 auto_vec
<tree
, 16> v
;
7942 for (op
= win
; TREE_CODE (op
) == COMPOUND_EXPR
;
7943 op
= TREE_OPERAND (op
, 1))
7945 FOR_EACH_VEC_ELT_REVERSE (v
, i
, op
)
7946 ret
= build2_loc (EXPR_LOCATION (op
), COMPOUND_EXPR
,
7947 TREE_TYPE (ret
), TREE_OPERAND (op
, 0),
7951 while (TREE_CODE (op
) == NOP_EXPR
)
7954 = (TYPE_PRECISION (TREE_TYPE (op
))
7955 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op
, 0))));
7957 /* Truncations are many-one so cannot be removed. */
7961 /* See what's inside this conversion. If we decide to strip it,
7966 op
= TREE_OPERAND (op
, 0);
7967 /* An extension: the outermost one can be stripped,
7968 but remember whether it is zero or sign extension. */
7970 uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
7971 /* Otherwise, if a sign extension has been stripped,
7972 only sign extensions can now be stripped;
7973 if a zero extension has been stripped, only zero-extensions. */
7974 else if (uns
!= TYPE_UNSIGNED (TREE_TYPE (op
)))
7978 else /* bitschange == 0 */
7980 /* A change in nominal type can always be stripped, but we must
7981 preserve the unsignedness. */
7983 uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
7985 op
= TREE_OPERAND (op
, 0);
7986 /* Keep trying to narrow, but don't assign op to win if it
7987 would turn an integral type into something else. */
7988 if (INTEGRAL_TYPE_P (TREE_TYPE (op
)) != integral_p
)
7995 if (TREE_CODE (op
) == COMPONENT_REF
7996 /* Since type_for_size always gives an integer type. */
7997 && TREE_CODE (TREE_TYPE (op
)) != REAL_TYPE
7998 && TREE_CODE (TREE_TYPE (op
)) != FIXED_POINT_TYPE
7999 /* Ensure field is laid out already. */
8000 && DECL_SIZE (TREE_OPERAND (op
, 1)) != 0
8001 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op
, 1))))
8003 unsigned HOST_WIDE_INT innerprec
8004 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op
, 1)));
8005 int unsignedp
= (DECL_UNSIGNED (TREE_OPERAND (op
, 1))
8006 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op
, 1))));
8007 tree type
= lang_hooks
.types
.type_for_size (innerprec
, unsignedp
);
8009 /* We can get this structure field in a narrower type that fits it,
8010 but the resulting extension to its nominal type (a fullword type)
8011 must satisfy the same conditions as for other extensions.
8013 Do this only for fields that are aligned (not bit-fields),
8014 because when bit-field insns will be used there is no
8015 advantage in doing this. */
8017 if (innerprec
< TYPE_PRECISION (TREE_TYPE (op
))
8018 && ! DECL_BIT_FIELD (TREE_OPERAND (op
, 1))
8019 && (first
|| uns
== DECL_UNSIGNED (TREE_OPERAND (op
, 1)))
8023 uns
= DECL_UNSIGNED (TREE_OPERAND (op
, 1));
8024 win
= fold_convert (type
, op
);
8028 *unsignedp_ptr
= uns
;
8032 /* Return true if integer constant C has a value that is permissible
8033 for TYPE, an integral type. */
8036 int_fits_type_p (const_tree c
, const_tree type
)
8038 tree type_low_bound
, type_high_bound
;
8039 bool ok_for_low_bound
, ok_for_high_bound
;
8040 signop sgn_c
= TYPE_SIGN (TREE_TYPE (c
));
8042 /* Non-standard boolean types can have arbitrary precision but various
8043 transformations assume that they can only take values 0 and +/-1. */
8044 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
8045 return wi::fits_to_boolean_p (wi::to_wide (c
), type
);
8048 type_low_bound
= TYPE_MIN_VALUE (type
);
8049 type_high_bound
= TYPE_MAX_VALUE (type
);
8051 /* If at least one bound of the type is a constant integer, we can check
8052 ourselves and maybe make a decision. If no such decision is possible, but
8053 this type is a subtype, try checking against that. Otherwise, use
8054 fits_to_tree_p, which checks against the precision.
8056 Compute the status for each possibly constant bound, and return if we see
8057 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8058 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8059 for "constant known to fit". */
8061 /* Check if c >= type_low_bound. */
8062 if (type_low_bound
&& TREE_CODE (type_low_bound
) == INTEGER_CST
)
8064 if (tree_int_cst_lt (c
, type_low_bound
))
8066 ok_for_low_bound
= true;
8069 ok_for_low_bound
= false;
8071 /* Check if c <= type_high_bound. */
8072 if (type_high_bound
&& TREE_CODE (type_high_bound
) == INTEGER_CST
)
8074 if (tree_int_cst_lt (type_high_bound
, c
))
8076 ok_for_high_bound
= true;
8079 ok_for_high_bound
= false;
8081 /* If the constant fits both bounds, the result is known. */
8082 if (ok_for_low_bound
&& ok_for_high_bound
)
8085 /* Perform some generic filtering which may allow making a decision
8086 even if the bounds are not constant. First, negative integers
8087 never fit in unsigned types, */
8088 if (TYPE_UNSIGNED (type
) && sgn_c
== SIGNED
&& wi::neg_p (wi::to_wide (c
)))
8091 /* Second, narrower types always fit in wider ones. */
8092 if (TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (c
)))
8095 /* Third, unsigned integers with top bit set never fit signed types. */
8096 if (!TYPE_UNSIGNED (type
) && sgn_c
== UNSIGNED
)
8098 int prec
= GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c
))) - 1;
8099 if (prec
< TYPE_PRECISION (TREE_TYPE (c
)))
8101 /* When a tree_cst is converted to a wide-int, the precision
8102 is taken from the type. However, if the precision of the
8103 mode underneath the type is smaller than that, it is
8104 possible that the value will not fit. The test below
8105 fails if any bit is set between the sign bit of the
8106 underlying mode and the top bit of the type. */
8107 if (wi::zext (wi::to_wide (c
), prec
- 1) != wi::to_wide (c
))
8110 else if (wi::neg_p (wi::to_wide (c
)))
8114 /* If we haven't been able to decide at this point, there nothing more we
8115 can check ourselves here. Look at the base type if we have one and it
8116 has the same precision. */
8117 if (TREE_CODE (type
) == INTEGER_TYPE
8118 && TREE_TYPE (type
) != 0
8119 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (type
)))
8121 type
= TREE_TYPE (type
);
8125 /* Or to fits_to_tree_p, if nothing else. */
8126 return wi::fits_to_tree_p (wi::to_wide (c
), type
);
8129 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8130 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8131 represented (assuming two's-complement arithmetic) within the bit
8132 precision of the type are returned instead. */
8135 get_type_static_bounds (const_tree type
, mpz_t min
, mpz_t max
)
8137 if (!POINTER_TYPE_P (type
) && TYPE_MIN_VALUE (type
)
8138 && TREE_CODE (TYPE_MIN_VALUE (type
)) == INTEGER_CST
)
8139 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type
)), min
, TYPE_SIGN (type
));
8142 if (TYPE_UNSIGNED (type
))
8143 mpz_set_ui (min
, 0);
8146 wide_int mn
= wi::min_value (TYPE_PRECISION (type
), SIGNED
);
8147 wi::to_mpz (mn
, min
, SIGNED
);
8151 if (!POINTER_TYPE_P (type
) && TYPE_MAX_VALUE (type
)
8152 && TREE_CODE (TYPE_MAX_VALUE (type
)) == INTEGER_CST
)
8153 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type
)), max
, TYPE_SIGN (type
));
8156 wide_int mn
= wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
8157 wi::to_mpz (mn
, max
, TYPE_SIGN (type
));
8161 /* Return true if VAR is an automatic variable. */
8164 auto_var_p (const_tree var
)
8166 return ((((VAR_P (var
) && ! DECL_EXTERNAL (var
))
8167 || TREE_CODE (var
) == PARM_DECL
)
8168 && ! TREE_STATIC (var
))
8169 || TREE_CODE (var
) == RESULT_DECL
);
8172 /* Return true if VAR is an automatic variable defined in function FN. */
8175 auto_var_in_fn_p (const_tree var
, const_tree fn
)
8177 return (DECL_P (var
) && DECL_CONTEXT (var
) == fn
8178 && (auto_var_p (var
)
8179 || TREE_CODE (var
) == LABEL_DECL
));
8182 /* Subprogram of following function. Called by walk_tree.
8184 Return *TP if it is an automatic variable or parameter of the
8185 function passed in as DATA. */
8188 find_var_from_fn (tree
*tp
, int *walk_subtrees
, void *data
)
8190 tree fn
= (tree
) data
;
8195 else if (DECL_P (*tp
)
8196 && auto_var_in_fn_p (*tp
, fn
))
8202 /* Returns true if T is, contains, or refers to a type with variable
8203 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8204 arguments, but not the return type. If FN is nonzero, only return
8205 true if a modifier of the type or position of FN is a variable or
8206 parameter inside FN.
8208 This concept is more general than that of C99 'variably modified types':
8209 in C99, a struct type is never variably modified because a VLA may not
8210 appear as a structure member. However, in GNU C code like:
8212 struct S { int i[f()]; };
8214 is valid, and other languages may define similar constructs. */
8217 variably_modified_type_p (tree type
, tree fn
)
8221 /* Test if T is either variable (if FN is zero) or an expression containing
8222 a variable in FN. If TYPE isn't gimplified, return true also if
8223 gimplify_one_sizepos would gimplify the expression into a local
8225 #define RETURN_TRUE_IF_VAR(T) \
8226 do { tree _t = (T); \
8227 if (_t != NULL_TREE \
8228 && _t != error_mark_node \
8229 && !CONSTANT_CLASS_P (_t) \
8230 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8232 || (!TYPE_SIZES_GIMPLIFIED (type) \
8233 && (TREE_CODE (_t) != VAR_DECL \
8234 && !CONTAINS_PLACEHOLDER_P (_t))) \
8235 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8236 return true; } while (0)
8238 if (type
== error_mark_node
)
8241 /* If TYPE itself has variable size, it is variably modified. */
8242 RETURN_TRUE_IF_VAR (TYPE_SIZE (type
));
8243 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type
));
8245 switch (TREE_CODE (type
))
8248 case REFERENCE_TYPE
:
8250 /* Ada can have pointer types refering to themselves indirectly. */
8251 if (TREE_VISITED (type
))
8253 TREE_VISITED (type
) = true;
8254 if (variably_modified_type_p (TREE_TYPE (type
), fn
))
8256 TREE_VISITED (type
) = false;
8259 TREE_VISITED (type
) = false;
8264 /* If TYPE is a function type, it is variably modified if the
8265 return type is variably modified. */
8266 if (variably_modified_type_p (TREE_TYPE (type
), fn
))
8272 case FIXED_POINT_TYPE
:
8275 /* Scalar types are variably modified if their end points
8277 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type
));
8278 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type
));
8283 case QUAL_UNION_TYPE
:
8284 /* We can't see if any of the fields are variably-modified by the
8285 definition we normally use, since that would produce infinite
8286 recursion via pointers. */
8287 /* This is variably modified if some field's type is. */
8288 for (t
= TYPE_FIELDS (type
); t
; t
= DECL_CHAIN (t
))
8289 if (TREE_CODE (t
) == FIELD_DECL
)
8291 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t
));
8292 RETURN_TRUE_IF_VAR (DECL_SIZE (t
));
8293 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t
));
8295 /* If the type is a qualified union, then the DECL_QUALIFIER
8296 of fields can also be an expression containing a variable. */
8297 if (TREE_CODE (type
) == QUAL_UNION_TYPE
)
8298 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t
));
8300 /* If the field is a qualified union, then it's only a container
8301 for what's inside so we look into it. That's necessary in LTO
8302 mode because the sizes of the field tested above have been set
8303 to PLACEHOLDER_EXPRs by free_lang_data. */
8304 if (TREE_CODE (TREE_TYPE (t
)) == QUAL_UNION_TYPE
8305 && variably_modified_type_p (TREE_TYPE (t
), fn
))
8311 /* Do not call ourselves to avoid infinite recursion. This is
8312 variably modified if the element type is. */
8313 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type
)));
8314 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type
)));
8321 /* The current language may have other cases to check, but in general,
8322 all other types are not variably modified. */
8323 return lang_hooks
.tree_inlining
.var_mod_type_p (type
, fn
);
8325 #undef RETURN_TRUE_IF_VAR
8328 /* Given a DECL or TYPE, return the scope in which it was declared, or
8329 NULL_TREE if there is no containing scope. */
8332 get_containing_scope (const_tree t
)
8334 return (TYPE_P (t
) ? TYPE_CONTEXT (t
) : DECL_CONTEXT (t
));
8337 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
8340 get_ultimate_context (const_tree decl
)
8342 while (decl
&& TREE_CODE (decl
) != TRANSLATION_UNIT_DECL
)
8344 if (TREE_CODE (decl
) == BLOCK
)
8345 decl
= BLOCK_SUPERCONTEXT (decl
);
8347 decl
= get_containing_scope (decl
);
8352 /* Return the innermost context enclosing DECL that is
8353 a FUNCTION_DECL, or zero if none. */
8356 decl_function_context (const_tree decl
)
8360 if (TREE_CODE (decl
) == ERROR_MARK
)
8363 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8364 where we look up the function at runtime. Such functions always take
8365 a first argument of type 'pointer to real context'.
8367 C++ should really be fixed to use DECL_CONTEXT for the real context,
8368 and use something else for the "virtual context". */
8369 else if (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_VIRTUAL_P (decl
))
8372 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl
)))));
8374 context
= DECL_CONTEXT (decl
);
8376 while (context
&& TREE_CODE (context
) != FUNCTION_DECL
)
8378 if (TREE_CODE (context
) == BLOCK
)
8379 context
= BLOCK_SUPERCONTEXT (context
);
8381 context
= get_containing_scope (context
);
8387 /* Return the innermost context enclosing DECL that is
8388 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8389 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8392 decl_type_context (const_tree decl
)
8394 tree context
= DECL_CONTEXT (decl
);
8397 switch (TREE_CODE (context
))
8399 case NAMESPACE_DECL
:
8400 case TRANSLATION_UNIT_DECL
:
8405 case QUAL_UNION_TYPE
:
8410 context
= DECL_CONTEXT (context
);
8414 context
= BLOCK_SUPERCONTEXT (context
);
8424 /* CALL is a CALL_EXPR. Return the declaration for the function
8425 called, or NULL_TREE if the called function cannot be
8429 get_callee_fndecl (const_tree call
)
8433 if (call
== error_mark_node
)
8434 return error_mark_node
;
8436 /* It's invalid to call this function with anything but a
8438 gcc_assert (TREE_CODE (call
) == CALL_EXPR
);
8440 /* The first operand to the CALL is the address of the function
8442 addr
= CALL_EXPR_FN (call
);
8444 /* If there is no function, return early. */
8445 if (addr
== NULL_TREE
)
8450 /* If this is a readonly function pointer, extract its initial value. */
8451 if (DECL_P (addr
) && TREE_CODE (addr
) != FUNCTION_DECL
8452 && TREE_READONLY (addr
) && ! TREE_THIS_VOLATILE (addr
)
8453 && DECL_INITIAL (addr
))
8454 addr
= DECL_INITIAL (addr
);
8456 /* If the address is just `&f' for some function `f', then we know
8457 that `f' is being called. */
8458 if (TREE_CODE (addr
) == ADDR_EXPR
8459 && TREE_CODE (TREE_OPERAND (addr
, 0)) == FUNCTION_DECL
)
8460 return TREE_OPERAND (addr
, 0);
8462 /* We couldn't figure out what was being called. */
8466 /* Return true when STMTs arguments and return value match those of FNDECL,
8467 a decl of a builtin function. */
8470 tree_builtin_call_types_compatible_p (const_tree call
, tree fndecl
)
8472 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl
) != NOT_BUILT_IN
);
8474 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
8475 if (tree decl
= builtin_decl_explicit (DECL_FUNCTION_CODE (fndecl
)))
8478 bool gimple_form
= (cfun
&& (cfun
->curr_properties
& PROP_gimple
)) != 0;
8480 ? !useless_type_conversion_p (TREE_TYPE (call
),
8481 TREE_TYPE (TREE_TYPE (fndecl
)))
8482 : (TYPE_MAIN_VARIANT (TREE_TYPE (call
))
8483 != TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (fndecl
)))))
8486 tree targs
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
8487 unsigned nargs
= call_expr_nargs (call
);
8488 for (unsigned i
= 0; i
< nargs
; ++i
, targs
= TREE_CHAIN (targs
))
8490 /* Variadic args follow. */
8493 tree arg
= CALL_EXPR_ARG (call
, i
);
8494 tree type
= TREE_VALUE (targs
);
8496 ? !useless_type_conversion_p (type
, TREE_TYPE (arg
))
8497 : TYPE_MAIN_VARIANT (type
) != TYPE_MAIN_VARIANT (TREE_TYPE (arg
)))
8499 /* For pointer arguments be more forgiving, e.g. due to
8500 FILE * vs. fileptr_type_node, or say char * vs. const char *
8503 && POINTER_TYPE_P (type
)
8504 && POINTER_TYPE_P (TREE_TYPE (arg
))
8505 && tree_nop_conversion_p (type
, TREE_TYPE (arg
)))
8507 /* char/short integral arguments are promoted to int
8508 by several frontends if targetm.calls.promote_prototypes
8509 is true. Allow such promotion too. */
8510 if (INTEGRAL_TYPE_P (type
)
8511 && TYPE_PRECISION (type
) < TYPE_PRECISION (integer_type_node
)
8512 && INTEGRAL_TYPE_P (TREE_TYPE (arg
))
8513 && !TYPE_UNSIGNED (TREE_TYPE (arg
))
8514 && targetm
.calls
.promote_prototypes (TREE_TYPE (fndecl
))
8516 ? useless_type_conversion_p (integer_type_node
,
8518 : tree_nop_conversion_p (integer_type_node
,
8524 if (targs
&& !VOID_TYPE_P (TREE_VALUE (targs
)))
8529 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
8530 return the associated function code, otherwise return CFN_LAST. */
8533 get_call_combined_fn (const_tree call
)
8535 /* It's invalid to call this function with anything but a CALL_EXPR. */
8536 gcc_assert (TREE_CODE (call
) == CALL_EXPR
);
8538 if (!CALL_EXPR_FN (call
))
8539 return as_combined_fn (CALL_EXPR_IFN (call
));
8541 tree fndecl
= get_callee_fndecl (call
);
8543 && fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
)
8544 && tree_builtin_call_types_compatible_p (call
, fndecl
))
8545 return as_combined_fn (DECL_FUNCTION_CODE (fndecl
));
8550 /* Comparator of indices based on tree_node_counts. */
8553 tree_nodes_cmp (const void *p1
, const void *p2
)
8555 const unsigned *n1
= (const unsigned *)p1
;
8556 const unsigned *n2
= (const unsigned *)p2
;
8558 return tree_node_counts
[*n1
] - tree_node_counts
[*n2
];
8561 /* Comparator of indices based on tree_code_counts. */
8564 tree_codes_cmp (const void *p1
, const void *p2
)
8566 const unsigned *n1
= (const unsigned *)p1
;
8567 const unsigned *n2
= (const unsigned *)p2
;
8569 return tree_code_counts
[*n1
] - tree_code_counts
[*n2
];
8572 #define TREE_MEM_USAGE_SPACES 40
8574 /* Print debugging information about tree nodes generated during the compile,
8575 and any language-specific information. */
8578 dump_tree_statistics (void)
8580 if (GATHER_STATISTICS
)
8582 uint64_t total_nodes
, total_bytes
;
8583 fprintf (stderr
, "\nKind Nodes Bytes\n");
8584 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8585 total_nodes
= total_bytes
= 0;
8588 auto_vec
<unsigned> indices (all_kinds
);
8589 for (unsigned i
= 0; i
< all_kinds
; i
++)
8590 indices
.quick_push (i
);
8591 indices
.qsort (tree_nodes_cmp
);
8593 for (unsigned i
= 0; i
< (int) all_kinds
; i
++)
8595 unsigned j
= indices
[i
];
8596 fprintf (stderr
, "%-20s %6" PRIu64
"%c %9" PRIu64
"%c\n",
8597 tree_node_kind_names
[j
], SIZE_AMOUNT (tree_node_counts
[j
]),
8598 SIZE_AMOUNT (tree_node_sizes
[j
]));
8599 total_nodes
+= tree_node_counts
[j
];
8600 total_bytes
+= tree_node_sizes
[j
];
8602 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8603 fprintf (stderr
, "%-20s %6" PRIu64
"%c %9" PRIu64
"%c\n", "Total",
8604 SIZE_AMOUNT (total_nodes
), SIZE_AMOUNT (total_bytes
));
8605 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8609 fprintf (stderr
, "Code Nodes\n");
8610 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8612 auto_vec
<unsigned> indices (MAX_TREE_CODES
);
8613 for (unsigned i
= 0; i
< MAX_TREE_CODES
; i
++)
8614 indices
.quick_push (i
);
8615 indices
.qsort (tree_codes_cmp
);
8617 for (unsigned i
= 0; i
< MAX_TREE_CODES
; i
++)
8619 unsigned j
= indices
[i
];
8620 fprintf (stderr
, "%-32s %6" PRIu64
"%c\n",
8621 get_tree_code_name ((enum tree_code
) j
),
8622 SIZE_AMOUNT (tree_code_counts
[j
]));
8624 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8625 fprintf (stderr
, "\n");
8626 ssanames_print_statistics ();
8627 fprintf (stderr
, "\n");
8628 phinodes_print_statistics ();
8629 fprintf (stderr
, "\n");
8633 fprintf (stderr
, "(No per-node statistics)\n");
8635 print_type_hash_statistics ();
8636 print_debug_expr_statistics ();
8637 print_value_expr_statistics ();
8638 lang_hooks
.print_statistics ();
8641 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8643 /* Generate a crc32 of the low BYTES bytes of VALUE. */
8646 crc32_unsigned_n (unsigned chksum
, unsigned value
, unsigned bytes
)
8648 /* This relies on the raw feedback's top 4 bits being zero. */
8649 #define FEEDBACK(X) ((X) * 0x04c11db7)
8650 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
8651 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
8652 static const unsigned syndromes
[16] =
8654 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
8655 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
8656 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
8657 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
8662 value
<<= (32 - bytes
* 8);
8663 for (unsigned ix
= bytes
* 2; ix
--; value
<<= 4)
8665 unsigned feedback
= syndromes
[((value
^ chksum
) >> 28) & 0xf];
8667 chksum
= (chksum
<< 4) ^ feedback
;
8673 /* Generate a crc32 of a string. */
8676 crc32_string (unsigned chksum
, const char *string
)
8679 chksum
= crc32_byte (chksum
, *string
);
8684 /* P is a string that will be used in a symbol. Mask out any characters
8685 that are not valid in that context. */
8688 clean_symbol_name (char *p
)
8692 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8695 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
8702 static GTY(()) unsigned anon_cnt
= 0; /* Saved for PCH. */
8704 /* Create a unique anonymous identifier. The identifier is still a
8705 valid assembly label. */
8711 #if !defined (NO_DOT_IN_LABEL)
8713 #elif !defined (NO_DOLLAR_IN_LABEL)
8721 int len
= snprintf (buf
, sizeof (buf
), fmt
, anon_cnt
++);
8722 gcc_checking_assert (len
< int (sizeof (buf
)));
8724 tree id
= get_identifier_with_length (buf
, len
);
8725 IDENTIFIER_ANON_P (id
) = true;
8730 /* Generate a name for a special-purpose function.
8731 The generated name may need to be unique across the whole link.
8732 Changes to this function may also require corresponding changes to
8733 xstrdup_mask_random.
8734 TYPE is some string to identify the purpose of this function to the
8735 linker or collect2; it must start with an uppercase letter,
8737 I - for constructors
8739 N - for C++ anonymous namespaces
8740 F - for DWARF unwind frame information. */
8743 get_file_function_name (const char *type
)
8749 /* If we already have a name we know to be unique, just use that. */
8750 if (first_global_object_name
)
8751 p
= q
= ASTRDUP (first_global_object_name
);
8752 /* If the target is handling the constructors/destructors, they
8753 will be local to this file and the name is only necessary for
8755 We also assign sub_I and sub_D sufixes to constructors called from
8756 the global static constructors. These are always local. */
8757 else if (((type
[0] == 'I' || type
[0] == 'D') && targetm
.have_ctors_dtors
)
8758 || (startswith (type
, "sub_")
8759 && (type
[4] == 'I' || type
[4] == 'D')))
8761 const char *file
= main_input_filename
;
8763 file
= LOCATION_FILE (input_location
);
8764 /* Just use the file's basename, because the full pathname
8765 might be quite long. */
8766 p
= q
= ASTRDUP (lbasename (file
));
8770 /* Otherwise, the name must be unique across the entire link.
8771 We don't have anything that we know to be unique to this translation
8772 unit, so use what we do have and throw in some randomness. */
8774 const char *name
= weak_global_object_name
;
8775 const char *file
= main_input_filename
;
8780 file
= LOCATION_FILE (input_location
);
8782 len
= strlen (file
);
8783 q
= (char *) alloca (9 + 19 + len
+ 1);
8784 memcpy (q
, file
, len
+ 1);
8786 snprintf (q
+ len
, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX
,
8787 crc32_string (0, name
), get_random_seed (false));
8792 clean_symbol_name (q
);
8793 buf
= (char *) alloca (sizeof (FILE_FUNCTION_FORMAT
) + strlen (p
)
8796 /* Set up the name of the file-level functions we may need.
8797 Use a global object (which is already required to be unique over
8798 the program) rather than the file name (which imposes extra
8800 sprintf (buf
, FILE_FUNCTION_FORMAT
, type
, p
);
8802 return get_identifier (buf
);
8805 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
8807 /* Complain that the tree code of NODE does not match the expected 0
8808 terminated list of trailing codes. The trailing code list can be
8809 empty, for a more vague error message. FILE, LINE, and FUNCTION
8810 are of the caller. */
8813 tree_check_failed (const_tree node
, const char *file
,
8814 int line
, const char *function
, ...)
8818 unsigned length
= 0;
8819 enum tree_code code
;
8821 va_start (args
, function
);
8822 while ((code
= (enum tree_code
) va_arg (args
, int)))
8823 length
+= 4 + strlen (get_tree_code_name (code
));
8828 va_start (args
, function
);
8829 length
+= strlen ("expected ");
8830 buffer
= tmp
= (char *) alloca (length
);
8832 while ((code
= (enum tree_code
) va_arg (args
, int)))
8834 const char *prefix
= length
? " or " : "expected ";
8836 strcpy (tmp
+ length
, prefix
);
8837 length
+= strlen (prefix
);
8838 strcpy (tmp
+ length
, get_tree_code_name (code
));
8839 length
+= strlen (get_tree_code_name (code
));
8844 buffer
= "unexpected node";
8846 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8847 buffer
, get_tree_code_name (TREE_CODE (node
)),
8848 function
, trim_filename (file
), line
);
8851 /* Complain that the tree code of NODE does match the expected 0
8852 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
8856 tree_not_check_failed (const_tree node
, const char *file
,
8857 int line
, const char *function
, ...)
8861 unsigned length
= 0;
8862 enum tree_code code
;
8864 va_start (args
, function
);
8865 while ((code
= (enum tree_code
) va_arg (args
, int)))
8866 length
+= 4 + strlen (get_tree_code_name (code
));
8868 va_start (args
, function
);
8869 buffer
= (char *) alloca (length
);
8871 while ((code
= (enum tree_code
) va_arg (args
, int)))
8875 strcpy (buffer
+ length
, " or ");
8878 strcpy (buffer
+ length
, get_tree_code_name (code
));
8879 length
+= strlen (get_tree_code_name (code
));
8883 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
8884 buffer
, get_tree_code_name (TREE_CODE (node
)),
8885 function
, trim_filename (file
), line
);
8888 /* Similar to tree_check_failed, except that we check for a class of tree
8889 code, given in CL. */
8892 tree_class_check_failed (const_tree node
, const enum tree_code_class cl
,
8893 const char *file
, int line
, const char *function
)
8896 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
8897 TREE_CODE_CLASS_STRING (cl
),
8898 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node
))),
8899 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
8902 /* Similar to tree_check_failed, except that instead of specifying a
8903 dozen codes, use the knowledge that they're all sequential. */
8906 tree_range_check_failed (const_tree node
, const char *file
, int line
,
8907 const char *function
, enum tree_code c1
,
8911 unsigned length
= 0;
8914 for (c
= c1
; c
<= c2
; ++c
)
8915 length
+= 4 + strlen (get_tree_code_name ((enum tree_code
) c
));
8917 length
+= strlen ("expected ");
8918 buffer
= (char *) alloca (length
);
8921 for (c
= c1
; c
<= c2
; ++c
)
8923 const char *prefix
= length
? " or " : "expected ";
8925 strcpy (buffer
+ length
, prefix
);
8926 length
+= strlen (prefix
);
8927 strcpy (buffer
+ length
, get_tree_code_name ((enum tree_code
) c
));
8928 length
+= strlen (get_tree_code_name ((enum tree_code
) c
));
8931 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8932 buffer
, get_tree_code_name (TREE_CODE (node
)),
8933 function
, trim_filename (file
), line
);
8937 /* Similar to tree_check_failed, except that we check that a tree does
8938 not have the specified code, given in CL. */
8941 tree_not_class_check_failed (const_tree node
, const enum tree_code_class cl
,
8942 const char *file
, int line
, const char *function
)
8945 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
8946 TREE_CODE_CLASS_STRING (cl
),
8947 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node
))),
8948 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
8952 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
8955 omp_clause_check_failed (const_tree node
, const char *file
, int line
,
8956 const char *function
, enum omp_clause_code code
)
8958 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
8960 omp_clause_code_name
[code
],
8961 get_tree_code_name (TREE_CODE (node
)),
8962 function
, trim_filename (file
), line
);
8966 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
8969 omp_clause_range_check_failed (const_tree node
, const char *file
, int line
,
8970 const char *function
, enum omp_clause_code c1
,
8971 enum omp_clause_code c2
)
8974 unsigned length
= 0;
8977 for (c
= c1
; c
<= c2
; ++c
)
8978 length
+= 4 + strlen (omp_clause_code_name
[c
]);
8980 length
+= strlen ("expected ");
8981 buffer
= (char *) alloca (length
);
8984 for (c
= c1
; c
<= c2
; ++c
)
8986 const char *prefix
= length
? " or " : "expected ";
8988 strcpy (buffer
+ length
, prefix
);
8989 length
+= strlen (prefix
);
8990 strcpy (buffer
+ length
, omp_clause_code_name
[c
]);
8991 length
+= strlen (omp_clause_code_name
[c
]);
8994 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8995 buffer
, omp_clause_code_name
[TREE_CODE (node
)],
8996 function
, trim_filename (file
), line
);
9000 #undef DEFTREESTRUCT
9001 #define DEFTREESTRUCT(VAL, NAME) NAME,
9003 static const char *ts_enum_names
[] = {
9004 #include "treestruct.def"
9006 #undef DEFTREESTRUCT
9008 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9010 /* Similar to tree_class_check_failed, except that we check for
9011 whether CODE contains the tree structure identified by EN. */
9014 tree_contains_struct_check_failed (const_tree node
,
9015 const enum tree_node_structure_enum en
,
9016 const char *file
, int line
,
9017 const char *function
)
9020 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9022 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
9026 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9027 (dynamically sized) vector. */
9030 tree_int_cst_elt_check_failed (int idx
, int len
, const char *file
, int line
,
9031 const char *function
)
9034 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
9036 idx
+ 1, len
, function
, trim_filename (file
), line
);
9039 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9040 (dynamically sized) vector. */
9043 tree_vec_elt_check_failed (int idx
, int len
, const char *file
, int line
,
9044 const char *function
)
9047 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
9048 idx
+ 1, len
, function
, trim_filename (file
), line
);
9051 /* Similar to above, except that the check is for the bounds of the operand
9052 vector of an expression node EXP. */
9055 tree_operand_check_failed (int idx
, const_tree exp
, const char *file
,
9056 int line
, const char *function
)
9058 enum tree_code code
= TREE_CODE (exp
);
9060 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9061 idx
+ 1, get_tree_code_name (code
), TREE_OPERAND_LENGTH (exp
),
9062 function
, trim_filename (file
), line
);
9065 /* Similar to above, except that the check is for the number of
9066 operands of an OMP_CLAUSE node. */
9069 omp_clause_operand_check_failed (int idx
, const_tree t
, const char *file
,
9070 int line
, const char *function
)
9073 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
9074 "in %s, at %s:%d", idx
+ 1, omp_clause_code_name
[OMP_CLAUSE_CODE (t
)],
9075 omp_clause_num_ops
[OMP_CLAUSE_CODE (t
)], function
,
9076 trim_filename (file
), line
);
9078 #endif /* ENABLE_TREE_CHECKING */
9080 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
9081 and mapped to the machine mode MODE. Initialize its fields and build
9082 the information necessary for debugging output. */
9085 make_vector_type (tree innertype
, poly_int64 nunits
, machine_mode mode
)
9088 tree mv_innertype
= TYPE_MAIN_VARIANT (innertype
);
9090 t
= make_node (VECTOR_TYPE
);
9091 TREE_TYPE (t
) = mv_innertype
;
9092 SET_TYPE_VECTOR_SUBPARTS (t
, nunits
);
9093 SET_TYPE_MODE (t
, mode
);
9095 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype
) || in_lto_p
)
9096 SET_TYPE_STRUCTURAL_EQUALITY (t
);
9097 else if ((TYPE_CANONICAL (mv_innertype
) != innertype
9098 || mode
!= VOIDmode
)
9099 && !VECTOR_BOOLEAN_TYPE_P (t
))
9101 = make_vector_type (TYPE_CANONICAL (mv_innertype
), nunits
, VOIDmode
);
9105 hashval_t hash
= type_hash_canon_hash (t
);
9106 t
= type_hash_canon (hash
, t
);
9108 /* We have built a main variant, based on the main variant of the
9109 inner type. Use it to build the variant we return. */
9110 if ((TYPE_ATTRIBUTES (innertype
) || TYPE_QUALS (innertype
))
9111 && TREE_TYPE (t
) != innertype
)
9112 return build_type_attribute_qual_variant (t
,
9113 TYPE_ATTRIBUTES (innertype
),
9114 TYPE_QUALS (innertype
));
9120 make_or_reuse_type (unsigned size
, int unsignedp
)
9124 if (size
== INT_TYPE_SIZE
)
9125 return unsignedp
? unsigned_type_node
: integer_type_node
;
9126 if (size
== CHAR_TYPE_SIZE
)
9127 return unsignedp
? unsigned_char_type_node
: signed_char_type_node
;
9128 if (size
== SHORT_TYPE_SIZE
)
9129 return unsignedp
? short_unsigned_type_node
: short_integer_type_node
;
9130 if (size
== LONG_TYPE_SIZE
)
9131 return unsignedp
? long_unsigned_type_node
: long_integer_type_node
;
9132 if (size
== LONG_LONG_TYPE_SIZE
)
9133 return (unsignedp
? long_long_unsigned_type_node
9134 : long_long_integer_type_node
);
9136 for (i
= 0; i
< NUM_INT_N_ENTS
; i
++)
9137 if (size
== int_n_data
[i
].bitsize
9138 && int_n_enabled_p
[i
])
9139 return (unsignedp
? int_n_trees
[i
].unsigned_type
9140 : int_n_trees
[i
].signed_type
);
9143 return make_unsigned_type (size
);
9145 return make_signed_type (size
);
9148 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9151 make_or_reuse_fract_type (unsigned size
, int unsignedp
, int satp
)
9155 if (size
== SHORT_FRACT_TYPE_SIZE
)
9156 return unsignedp
? sat_unsigned_short_fract_type_node
9157 : sat_short_fract_type_node
;
9158 if (size
== FRACT_TYPE_SIZE
)
9159 return unsignedp
? sat_unsigned_fract_type_node
: sat_fract_type_node
;
9160 if (size
== LONG_FRACT_TYPE_SIZE
)
9161 return unsignedp
? sat_unsigned_long_fract_type_node
9162 : sat_long_fract_type_node
;
9163 if (size
== LONG_LONG_FRACT_TYPE_SIZE
)
9164 return unsignedp
? sat_unsigned_long_long_fract_type_node
9165 : sat_long_long_fract_type_node
;
9169 if (size
== SHORT_FRACT_TYPE_SIZE
)
9170 return unsignedp
? unsigned_short_fract_type_node
9171 : short_fract_type_node
;
9172 if (size
== FRACT_TYPE_SIZE
)
9173 return unsignedp
? unsigned_fract_type_node
: fract_type_node
;
9174 if (size
== LONG_FRACT_TYPE_SIZE
)
9175 return unsignedp
? unsigned_long_fract_type_node
9176 : long_fract_type_node
;
9177 if (size
== LONG_LONG_FRACT_TYPE_SIZE
)
9178 return unsignedp
? unsigned_long_long_fract_type_node
9179 : long_long_fract_type_node
;
9182 return make_fract_type (size
, unsignedp
, satp
);
9185 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9188 make_or_reuse_accum_type (unsigned size
, int unsignedp
, int satp
)
9192 if (size
== SHORT_ACCUM_TYPE_SIZE
)
9193 return unsignedp
? sat_unsigned_short_accum_type_node
9194 : sat_short_accum_type_node
;
9195 if (size
== ACCUM_TYPE_SIZE
)
9196 return unsignedp
? sat_unsigned_accum_type_node
: sat_accum_type_node
;
9197 if (size
== LONG_ACCUM_TYPE_SIZE
)
9198 return unsignedp
? sat_unsigned_long_accum_type_node
9199 : sat_long_accum_type_node
;
9200 if (size
== LONG_LONG_ACCUM_TYPE_SIZE
)
9201 return unsignedp
? sat_unsigned_long_long_accum_type_node
9202 : sat_long_long_accum_type_node
;
9206 if (size
== SHORT_ACCUM_TYPE_SIZE
)
9207 return unsignedp
? unsigned_short_accum_type_node
9208 : short_accum_type_node
;
9209 if (size
== ACCUM_TYPE_SIZE
)
9210 return unsignedp
? unsigned_accum_type_node
: accum_type_node
;
9211 if (size
== LONG_ACCUM_TYPE_SIZE
)
9212 return unsignedp
? unsigned_long_accum_type_node
9213 : long_accum_type_node
;
9214 if (size
== LONG_LONG_ACCUM_TYPE_SIZE
)
9215 return unsignedp
? unsigned_long_long_accum_type_node
9216 : long_long_accum_type_node
;
9219 return make_accum_type (size
, unsignedp
, satp
);
9223 /* Create an atomic variant node for TYPE. This routine is called
9224 during initialization of data types to create the 5 basic atomic
9225 types. The generic build_variant_type function requires these to
9226 already be set up in order to function properly, so cannot be
9227 called from there. If ALIGN is non-zero, then ensure alignment is
9228 overridden to this value. */
9231 build_atomic_base (tree type
, unsigned int align
)
9235 /* Make sure its not already registered. */
9236 if ((t
= get_qualified_type (type
, TYPE_QUAL_ATOMIC
)))
9239 t
= build_variant_type_copy (type
);
9240 set_type_quals (t
, TYPE_QUAL_ATOMIC
);
9243 SET_TYPE_ALIGN (t
, align
);
9248 /* Information about the _FloatN and _FloatNx types. This must be in
9249 the same order as the corresponding TI_* enum values. */
9250 const floatn_type_info floatn_nx_types
[NUM_FLOATN_NX_TYPES
] =
9262 /* Create nodes for all integer types (and error_mark_node) using the sizes
9263 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9266 build_common_tree_nodes (bool signed_char
)
9270 error_mark_node
= make_node (ERROR_MARK
);
9271 TREE_TYPE (error_mark_node
) = error_mark_node
;
9273 initialize_sizetypes ();
9275 /* Define both `signed char' and `unsigned char'. */
9276 signed_char_type_node
= make_signed_type (CHAR_TYPE_SIZE
);
9277 TYPE_STRING_FLAG (signed_char_type_node
) = 1;
9278 unsigned_char_type_node
= make_unsigned_type (CHAR_TYPE_SIZE
);
9279 TYPE_STRING_FLAG (unsigned_char_type_node
) = 1;
9281 /* Define `char', which is like either `signed char' or `unsigned char'
9282 but not the same as either. */
9285 ? make_signed_type (CHAR_TYPE_SIZE
)
9286 : make_unsigned_type (CHAR_TYPE_SIZE
));
9287 TYPE_STRING_FLAG (char_type_node
) = 1;
9289 short_integer_type_node
= make_signed_type (SHORT_TYPE_SIZE
);
9290 short_unsigned_type_node
= make_unsigned_type (SHORT_TYPE_SIZE
);
9291 integer_type_node
= make_signed_type (INT_TYPE_SIZE
);
9292 unsigned_type_node
= make_unsigned_type (INT_TYPE_SIZE
);
9293 long_integer_type_node
= make_signed_type (LONG_TYPE_SIZE
);
9294 long_unsigned_type_node
= make_unsigned_type (LONG_TYPE_SIZE
);
9295 long_long_integer_type_node
= make_signed_type (LONG_LONG_TYPE_SIZE
);
9296 long_long_unsigned_type_node
= make_unsigned_type (LONG_LONG_TYPE_SIZE
);
9298 for (i
= 0; i
< NUM_INT_N_ENTS
; i
++)
9300 int_n_trees
[i
].signed_type
= make_signed_type (int_n_data
[i
].bitsize
);
9301 int_n_trees
[i
].unsigned_type
= make_unsigned_type (int_n_data
[i
].bitsize
);
9303 if (int_n_enabled_p
[i
])
9305 integer_types
[itk_intN_0
+ i
* 2] = int_n_trees
[i
].signed_type
;
9306 integer_types
[itk_unsigned_intN_0
+ i
* 2] = int_n_trees
[i
].unsigned_type
;
9310 /* Define a boolean type. This type only represents boolean values but
9311 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9312 boolean_type_node
= make_unsigned_type (BOOL_TYPE_SIZE
);
9313 TREE_SET_CODE (boolean_type_node
, BOOLEAN_TYPE
);
9314 TYPE_PRECISION (boolean_type_node
) = 1;
9315 TYPE_MAX_VALUE (boolean_type_node
) = build_int_cst (boolean_type_node
, 1);
9317 /* Define what type to use for size_t. */
9318 if (strcmp (SIZE_TYPE
, "unsigned int") == 0)
9319 size_type_node
= unsigned_type_node
;
9320 else if (strcmp (SIZE_TYPE
, "long unsigned int") == 0)
9321 size_type_node
= long_unsigned_type_node
;
9322 else if (strcmp (SIZE_TYPE
, "long long unsigned int") == 0)
9323 size_type_node
= long_long_unsigned_type_node
;
9324 else if (strcmp (SIZE_TYPE
, "short unsigned int") == 0)
9325 size_type_node
= short_unsigned_type_node
;
9330 size_type_node
= NULL_TREE
;
9331 for (i
= 0; i
< NUM_INT_N_ENTS
; i
++)
9332 if (int_n_enabled_p
[i
])
9334 char name
[50], altname
[50];
9335 sprintf (name
, "__int%d unsigned", int_n_data
[i
].bitsize
);
9336 sprintf (altname
, "__int%d__ unsigned", int_n_data
[i
].bitsize
);
9338 if (strcmp (name
, SIZE_TYPE
) == 0
9339 || strcmp (altname
, SIZE_TYPE
) == 0)
9341 size_type_node
= int_n_trees
[i
].unsigned_type
;
9344 if (size_type_node
== NULL_TREE
)
9348 /* Define what type to use for ptrdiff_t. */
9349 if (strcmp (PTRDIFF_TYPE
, "int") == 0)
9350 ptrdiff_type_node
= integer_type_node
;
9351 else if (strcmp (PTRDIFF_TYPE
, "long int") == 0)
9352 ptrdiff_type_node
= long_integer_type_node
;
9353 else if (strcmp (PTRDIFF_TYPE
, "long long int") == 0)
9354 ptrdiff_type_node
= long_long_integer_type_node
;
9355 else if (strcmp (PTRDIFF_TYPE
, "short int") == 0)
9356 ptrdiff_type_node
= short_integer_type_node
;
9359 ptrdiff_type_node
= NULL_TREE
;
9360 for (int i
= 0; i
< NUM_INT_N_ENTS
; i
++)
9361 if (int_n_enabled_p
[i
])
9363 char name
[50], altname
[50];
9364 sprintf (name
, "__int%d", int_n_data
[i
].bitsize
);
9365 sprintf (altname
, "__int%d__", int_n_data
[i
].bitsize
);
9367 if (strcmp (name
, PTRDIFF_TYPE
) == 0
9368 || strcmp (altname
, PTRDIFF_TYPE
) == 0)
9369 ptrdiff_type_node
= int_n_trees
[i
].signed_type
;
9371 if (ptrdiff_type_node
== NULL_TREE
)
9375 /* Fill in the rest of the sized types. Reuse existing type nodes
9377 intQI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (QImode
), 0);
9378 intHI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (HImode
), 0);
9379 intSI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (SImode
), 0);
9380 intDI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (DImode
), 0);
9381 intTI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (TImode
), 0);
9383 unsigned_intQI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (QImode
), 1);
9384 unsigned_intHI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (HImode
), 1);
9385 unsigned_intSI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (SImode
), 1);
9386 unsigned_intDI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (DImode
), 1);
9387 unsigned_intTI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (TImode
), 1);
9389 /* Don't call build_qualified type for atomics. That routine does
9390 special processing for atomics, and until they are initialized
9391 it's better not to make that call.
9393 Check to see if there is a target override for atomic types. */
9395 atomicQI_type_node
= build_atomic_base (unsigned_intQI_type_node
,
9396 targetm
.atomic_align_for_mode (QImode
));
9397 atomicHI_type_node
= build_atomic_base (unsigned_intHI_type_node
,
9398 targetm
.atomic_align_for_mode (HImode
));
9399 atomicSI_type_node
= build_atomic_base (unsigned_intSI_type_node
,
9400 targetm
.atomic_align_for_mode (SImode
));
9401 atomicDI_type_node
= build_atomic_base (unsigned_intDI_type_node
,
9402 targetm
.atomic_align_for_mode (DImode
));
9403 atomicTI_type_node
= build_atomic_base (unsigned_intTI_type_node
,
9404 targetm
.atomic_align_for_mode (TImode
));
9406 access_public_node
= get_identifier ("public");
9407 access_protected_node
= get_identifier ("protected");
9408 access_private_node
= get_identifier ("private");
9410 /* Define these next since types below may used them. */
9411 integer_zero_node
= build_int_cst (integer_type_node
, 0);
9412 integer_one_node
= build_int_cst (integer_type_node
, 1);
9413 integer_three_node
= build_int_cst (integer_type_node
, 3);
9414 integer_minus_one_node
= build_int_cst (integer_type_node
, -1);
9416 size_zero_node
= size_int (0);
9417 size_one_node
= size_int (1);
9418 bitsize_zero_node
= bitsize_int (0);
9419 bitsize_one_node
= bitsize_int (1);
9420 bitsize_unit_node
= bitsize_int (BITS_PER_UNIT
);
9422 boolean_false_node
= TYPE_MIN_VALUE (boolean_type_node
);
9423 boolean_true_node
= TYPE_MAX_VALUE (boolean_type_node
);
9425 void_type_node
= make_node (VOID_TYPE
);
9426 layout_type (void_type_node
);
9428 /* We are not going to have real types in C with less than byte alignment,
9429 so we might as well not have any types that claim to have it. */
9430 SET_TYPE_ALIGN (void_type_node
, BITS_PER_UNIT
);
9431 TYPE_USER_ALIGN (void_type_node
) = 0;
9433 void_node
= make_node (VOID_CST
);
9434 TREE_TYPE (void_node
) = void_type_node
;
9436 void_list_node
= build_tree_list (NULL_TREE
, void_type_node
);
9438 null_pointer_node
= build_int_cst (build_pointer_type (void_type_node
), 0);
9439 layout_type (TREE_TYPE (null_pointer_node
));
9441 ptr_type_node
= build_pointer_type (void_type_node
);
9443 = build_pointer_type (build_type_variant (void_type_node
, 1, 0));
9444 for (unsigned i
= 0; i
< ARRAY_SIZE (builtin_structptr_types
); ++i
)
9445 builtin_structptr_types
[i
].node
= builtin_structptr_types
[i
].base
;
9447 pointer_sized_int_node
= build_nonstandard_integer_type (POINTER_SIZE
, 1);
9449 float_type_node
= make_node (REAL_TYPE
);
9450 TYPE_PRECISION (float_type_node
) = FLOAT_TYPE_SIZE
;
9451 layout_type (float_type_node
);
9453 double_type_node
= make_node (REAL_TYPE
);
9454 TYPE_PRECISION (double_type_node
) = DOUBLE_TYPE_SIZE
;
9455 layout_type (double_type_node
);
9457 long_double_type_node
= make_node (REAL_TYPE
);
9458 TYPE_PRECISION (long_double_type_node
) = LONG_DOUBLE_TYPE_SIZE
;
9459 layout_type (long_double_type_node
);
9461 for (i
= 0; i
< NUM_FLOATN_NX_TYPES
; i
++)
9463 int n
= floatn_nx_types
[i
].n
;
9464 bool extended
= floatn_nx_types
[i
].extended
;
9465 scalar_float_mode mode
;
9466 if (!targetm
.floatn_mode (n
, extended
).exists (&mode
))
9468 int precision
= GET_MODE_PRECISION (mode
);
9469 /* Work around the rs6000 KFmode having precision 113 not
9471 const struct real_format
*fmt
= REAL_MODE_FORMAT (mode
);
9472 gcc_assert (fmt
->b
== 2 && fmt
->emin
+ fmt
->emax
== 3);
9473 int min_precision
= fmt
->p
+ ceil_log2 (fmt
->emax
- fmt
->emin
);
9475 gcc_assert (min_precision
== n
);
9476 if (precision
< min_precision
)
9477 precision
= min_precision
;
9478 FLOATN_NX_TYPE_NODE (i
) = make_node (REAL_TYPE
);
9479 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i
)) = precision
;
9480 layout_type (FLOATN_NX_TYPE_NODE (i
));
9481 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i
), mode
);
9483 float128t_type_node
= float128_type_node
;
9485 if (REAL_MODE_FORMAT (BFmode
) == &arm_bfloat_half_format
9486 && targetm
.scalar_mode_supported_p (BFmode
)
9487 && targetm
.libgcc_floating_mode_supported_p (BFmode
))
9489 bfloat16_type_node
= make_node (REAL_TYPE
);
9490 TYPE_PRECISION (bfloat16_type_node
) = GET_MODE_PRECISION (BFmode
);
9491 layout_type (bfloat16_type_node
);
9492 SET_TYPE_MODE (bfloat16_type_node
, BFmode
);
9496 float_ptr_type_node
= build_pointer_type (float_type_node
);
9497 double_ptr_type_node
= build_pointer_type (double_type_node
);
9498 long_double_ptr_type_node
= build_pointer_type (long_double_type_node
);
9499 integer_ptr_type_node
= build_pointer_type (integer_type_node
);
9501 /* Fixed size integer types. */
9502 uint16_type_node
= make_or_reuse_type (16, 1);
9503 uint32_type_node
= make_or_reuse_type (32, 1);
9504 uint64_type_node
= make_or_reuse_type (64, 1);
9505 if (targetm
.scalar_mode_supported_p (TImode
))
9506 uint128_type_node
= make_or_reuse_type (128, 1);
9508 /* Decimal float types. */
9509 if (targetm
.decimal_float_supported_p ())
9511 dfloat32_type_node
= make_node (REAL_TYPE
);
9512 TYPE_PRECISION (dfloat32_type_node
) = DECIMAL32_TYPE_SIZE
;
9513 SET_TYPE_MODE (dfloat32_type_node
, SDmode
);
9514 layout_type (dfloat32_type_node
);
9516 dfloat64_type_node
= make_node (REAL_TYPE
);
9517 TYPE_PRECISION (dfloat64_type_node
) = DECIMAL64_TYPE_SIZE
;
9518 SET_TYPE_MODE (dfloat64_type_node
, DDmode
);
9519 layout_type (dfloat64_type_node
);
9521 dfloat128_type_node
= make_node (REAL_TYPE
);
9522 TYPE_PRECISION (dfloat128_type_node
) = DECIMAL128_TYPE_SIZE
;
9523 SET_TYPE_MODE (dfloat128_type_node
, TDmode
);
9524 layout_type (dfloat128_type_node
);
9527 complex_integer_type_node
= build_complex_type (integer_type_node
, true);
9528 complex_float_type_node
= build_complex_type (float_type_node
, true);
9529 complex_double_type_node
= build_complex_type (double_type_node
, true);
9530 complex_long_double_type_node
= build_complex_type (long_double_type_node
,
9533 for (i
= 0; i
< NUM_FLOATN_NX_TYPES
; i
++)
9535 if (FLOATN_NX_TYPE_NODE (i
) != NULL_TREE
)
9536 COMPLEX_FLOATN_NX_TYPE_NODE (i
)
9537 = build_complex_type (FLOATN_NX_TYPE_NODE (i
));
9540 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9541 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9542 sat_ ## KIND ## _type_node = \
9543 make_sat_signed_ ## KIND ## _type (SIZE); \
9544 sat_unsigned_ ## KIND ## _type_node = \
9545 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9546 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9547 unsigned_ ## KIND ## _type_node = \
9548 make_unsigned_ ## KIND ## _type (SIZE);
9550 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9551 sat_ ## WIDTH ## KIND ## _type_node = \
9552 make_sat_signed_ ## KIND ## _type (SIZE); \
9553 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9554 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9555 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9556 unsigned_ ## WIDTH ## KIND ## _type_node = \
9557 make_unsigned_ ## KIND ## _type (SIZE);
9559 /* Make fixed-point type nodes based on four different widths. */
9560 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9561 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9562 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9563 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9564 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9566 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9567 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9568 NAME ## _type_node = \
9569 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9570 u ## NAME ## _type_node = \
9571 make_or_reuse_unsigned_ ## KIND ## _type \
9572 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9573 sat_ ## NAME ## _type_node = \
9574 make_or_reuse_sat_signed_ ## KIND ## _type \
9575 (GET_MODE_BITSIZE (MODE ## mode)); \
9576 sat_u ## NAME ## _type_node = \
9577 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9578 (GET_MODE_BITSIZE (U ## MODE ## mode));
9580 /* Fixed-point type and mode nodes. */
9581 MAKE_FIXED_TYPE_NODE_FAMILY (fract
, FRACT
)
9582 MAKE_FIXED_TYPE_NODE_FAMILY (accum
, ACCUM
)
9583 MAKE_FIXED_MODE_NODE (fract
, qq
, QQ
)
9584 MAKE_FIXED_MODE_NODE (fract
, hq
, HQ
)
9585 MAKE_FIXED_MODE_NODE (fract
, sq
, SQ
)
9586 MAKE_FIXED_MODE_NODE (fract
, dq
, DQ
)
9587 MAKE_FIXED_MODE_NODE (fract
, tq
, TQ
)
9588 MAKE_FIXED_MODE_NODE (accum
, ha
, HA
)
9589 MAKE_FIXED_MODE_NODE (accum
, sa
, SA
)
9590 MAKE_FIXED_MODE_NODE (accum
, da
, DA
)
9591 MAKE_FIXED_MODE_NODE (accum
, ta
, TA
)
9594 tree t
= targetm
.build_builtin_va_list ();
9596 /* Many back-ends define record types without setting TYPE_NAME.
9597 If we copied the record type here, we'd keep the original
9598 record type without a name. This breaks name mangling. So,
9599 don't copy record types and let c_common_nodes_and_builtins()
9600 declare the type to be __builtin_va_list. */
9601 if (TREE_CODE (t
) != RECORD_TYPE
)
9602 t
= build_variant_type_copy (t
);
9604 va_list_type_node
= t
;
9607 /* SCEV analyzer global shared trees. */
9608 chrec_dont_know
= make_node (SCEV_NOT_KNOWN
);
9609 TREE_TYPE (chrec_dont_know
) = void_type_node
;
9610 chrec_known
= make_node (SCEV_KNOWN
);
9611 TREE_TYPE (chrec_known
) = void_type_node
;
9614 /* Modify DECL for given flags.
9615 TM_PURE attribute is set only on types, so the function will modify
9616 DECL's type when ECF_TM_PURE is used. */
9619 set_call_expr_flags (tree decl
, int flags
)
9621 if (flags
& ECF_NOTHROW
)
9622 TREE_NOTHROW (decl
) = 1;
9623 if (flags
& ECF_CONST
)
9624 TREE_READONLY (decl
) = 1;
9625 if (flags
& ECF_PURE
)
9626 DECL_PURE_P (decl
) = 1;
9627 if (flags
& ECF_LOOPING_CONST_OR_PURE
)
9628 DECL_LOOPING_CONST_OR_PURE_P (decl
) = 1;
9629 if (flags
& ECF_NOVOPS
)
9630 DECL_IS_NOVOPS (decl
) = 1;
9631 if (flags
& ECF_NORETURN
)
9632 TREE_THIS_VOLATILE (decl
) = 1;
9633 if (flags
& ECF_MALLOC
)
9634 DECL_IS_MALLOC (decl
) = 1;
9635 if (flags
& ECF_RETURNS_TWICE
)
9636 DECL_IS_RETURNS_TWICE (decl
) = 1;
9637 if (flags
& ECF_LEAF
)
9638 DECL_ATTRIBUTES (decl
) = tree_cons (get_identifier ("leaf"),
9639 NULL
, DECL_ATTRIBUTES (decl
));
9640 if (flags
& ECF_COLD
)
9641 DECL_ATTRIBUTES (decl
) = tree_cons (get_identifier ("cold"),
9642 NULL
, DECL_ATTRIBUTES (decl
));
9643 if (flags
& ECF_RET1
)
9644 DECL_ATTRIBUTES (decl
)
9645 = tree_cons (get_identifier ("fn spec"),
9646 build_tree_list (NULL_TREE
, build_string (2, "1 ")),
9647 DECL_ATTRIBUTES (decl
));
9648 if ((flags
& ECF_TM_PURE
) && flag_tm
)
9649 apply_tm_attr (decl
, get_identifier ("transaction_pure"));
9650 /* Looping const or pure is implied by noreturn.
9651 There is currently no way to declare looping const or looping pure alone. */
9652 gcc_assert (!(flags
& ECF_LOOPING_CONST_OR_PURE
)
9653 || ((flags
& ECF_NORETURN
) && (flags
& (ECF_CONST
| ECF_PURE
))));
9657 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9660 local_define_builtin (const char *name
, tree type
, enum built_in_function code
,
9661 const char *library_name
, int ecf_flags
)
9665 decl
= add_builtin_function (name
, type
, code
, BUILT_IN_NORMAL
,
9666 library_name
, NULL_TREE
);
9667 set_call_expr_flags (decl
, ecf_flags
);
9669 set_builtin_decl (code
, decl
, true);
9672 /* Call this function after instantiating all builtins that the language
9673 front end cares about. This will build the rest of the builtins
9674 and internal functions that are relied upon by the tree optimizers and
9678 build_common_builtin_nodes (void)
9683 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_PADDING
))
9685 ftype
= build_function_type_list (void_type_node
,
9690 local_define_builtin ("__builtin_clear_padding", ftype
,
9691 BUILT_IN_CLEAR_PADDING
,
9692 "__builtin_clear_padding",
9693 ECF_LEAF
| ECF_NOTHROW
);
9696 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE
)
9697 || !builtin_decl_explicit_p (BUILT_IN_TRAP
)
9698 || !builtin_decl_explicit_p (BUILT_IN_ABORT
))
9700 ftype
= build_function_type (void_type_node
, void_list_node
);
9701 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE
))
9702 local_define_builtin ("__builtin_unreachable", ftype
,
9703 BUILT_IN_UNREACHABLE
,
9704 "__builtin_unreachable",
9705 ECF_NOTHROW
| ECF_LEAF
| ECF_NORETURN
9706 | ECF_CONST
| ECF_COLD
);
9707 if (!builtin_decl_explicit_p (BUILT_IN_ABORT
))
9708 local_define_builtin ("__builtin_abort", ftype
, BUILT_IN_ABORT
,
9710 ECF_LEAF
| ECF_NORETURN
| ECF_CONST
| ECF_COLD
);
9711 if (!builtin_decl_explicit_p (BUILT_IN_TRAP
))
9712 local_define_builtin ("__builtin_trap", ftype
, BUILT_IN_TRAP
,
9714 ECF_NORETURN
| ECF_NOTHROW
| ECF_LEAF
| ECF_COLD
);
9717 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY
)
9718 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE
))
9720 ftype
= build_function_type_list (ptr_type_node
,
9721 ptr_type_node
, const_ptr_type_node
,
9722 size_type_node
, NULL_TREE
);
9724 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY
))
9725 local_define_builtin ("__builtin_memcpy", ftype
, BUILT_IN_MEMCPY
,
9726 "memcpy", ECF_NOTHROW
| ECF_LEAF
);
9727 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE
))
9728 local_define_builtin ("__builtin_memmove", ftype
, BUILT_IN_MEMMOVE
,
9729 "memmove", ECF_NOTHROW
| ECF_LEAF
);
9732 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP
))
9734 ftype
= build_function_type_list (integer_type_node
, const_ptr_type_node
,
9735 const_ptr_type_node
, size_type_node
,
9737 local_define_builtin ("__builtin_memcmp", ftype
, BUILT_IN_MEMCMP
,
9738 "memcmp", ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9741 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET
))
9743 ftype
= build_function_type_list (ptr_type_node
,
9744 ptr_type_node
, integer_type_node
,
9745 size_type_node
, NULL_TREE
);
9746 local_define_builtin ("__builtin_memset", ftype
, BUILT_IN_MEMSET
,
9747 "memset", ECF_NOTHROW
| ECF_LEAF
);
9750 /* If we're checking the stack, `alloca' can throw. */
9751 const int alloca_flags
9752 = ECF_MALLOC
| ECF_LEAF
| (flag_stack_check
? 0 : ECF_NOTHROW
);
9754 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA
))
9756 ftype
= build_function_type_list (ptr_type_node
,
9757 size_type_node
, NULL_TREE
);
9758 local_define_builtin ("__builtin_alloca", ftype
, BUILT_IN_ALLOCA
,
9759 "alloca", alloca_flags
);
9762 ftype
= build_function_type_list (ptr_type_node
, size_type_node
,
9763 size_type_node
, NULL_TREE
);
9764 local_define_builtin ("__builtin_alloca_with_align", ftype
,
9765 BUILT_IN_ALLOCA_WITH_ALIGN
,
9766 "__builtin_alloca_with_align",
9769 ftype
= build_function_type_list (ptr_type_node
, size_type_node
,
9770 size_type_node
, size_type_node
, NULL_TREE
);
9771 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype
,
9772 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
,
9773 "__builtin_alloca_with_align_and_max",
9776 ftype
= build_function_type_list (void_type_node
,
9777 ptr_type_node
, ptr_type_node
,
9778 ptr_type_node
, NULL_TREE
);
9779 local_define_builtin ("__builtin_init_trampoline", ftype
,
9780 BUILT_IN_INIT_TRAMPOLINE
,
9781 "__builtin_init_trampoline", ECF_NOTHROW
| ECF_LEAF
);
9782 local_define_builtin ("__builtin_init_heap_trampoline", ftype
,
9783 BUILT_IN_INIT_HEAP_TRAMPOLINE
,
9784 "__builtin_init_heap_trampoline",
9785 ECF_NOTHROW
| ECF_LEAF
);
9786 local_define_builtin ("__builtin_init_descriptor", ftype
,
9787 BUILT_IN_INIT_DESCRIPTOR
,
9788 "__builtin_init_descriptor", ECF_NOTHROW
| ECF_LEAF
);
9790 ftype
= build_function_type_list (ptr_type_node
, ptr_type_node
, NULL_TREE
);
9791 local_define_builtin ("__builtin_adjust_trampoline", ftype
,
9792 BUILT_IN_ADJUST_TRAMPOLINE
,
9793 "__builtin_adjust_trampoline",
9794 ECF_CONST
| ECF_NOTHROW
);
9795 local_define_builtin ("__builtin_adjust_descriptor", ftype
,
9796 BUILT_IN_ADJUST_DESCRIPTOR
,
9797 "__builtin_adjust_descriptor",
9798 ECF_CONST
| ECF_NOTHROW
);
9800 ftype
= build_function_type_list (void_type_node
,
9801 ptr_type_node
, ptr_type_node
, NULL_TREE
);
9802 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_CACHE
))
9803 local_define_builtin ("__builtin___clear_cache", ftype
,
9804 BUILT_IN_CLEAR_CACHE
,
9808 local_define_builtin ("__builtin_nonlocal_goto", ftype
,
9809 BUILT_IN_NONLOCAL_GOTO
,
9810 "__builtin_nonlocal_goto",
9811 ECF_NORETURN
| ECF_NOTHROW
);
9813 ftype
= build_function_type_list (void_type_node
,
9814 ptr_type_node
, ptr_type_node
, NULL_TREE
);
9815 local_define_builtin ("__builtin_setjmp_setup", ftype
,
9816 BUILT_IN_SETJMP_SETUP
,
9817 "__builtin_setjmp_setup", ECF_NOTHROW
);
9819 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
9820 local_define_builtin ("__builtin_setjmp_receiver", ftype
,
9821 BUILT_IN_SETJMP_RECEIVER
,
9822 "__builtin_setjmp_receiver", ECF_NOTHROW
| ECF_LEAF
);
9824 ftype
= build_function_type_list (ptr_type_node
, NULL_TREE
);
9825 local_define_builtin ("__builtin_stack_save", ftype
, BUILT_IN_STACK_SAVE
,
9826 "__builtin_stack_save", ECF_NOTHROW
| ECF_LEAF
);
9828 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
9829 local_define_builtin ("__builtin_stack_restore", ftype
,
9830 BUILT_IN_STACK_RESTORE
,
9831 "__builtin_stack_restore", ECF_NOTHROW
| ECF_LEAF
);
9833 ftype
= build_function_type_list (integer_type_node
, const_ptr_type_node
,
9834 const_ptr_type_node
, size_type_node
,
9836 local_define_builtin ("__builtin_memcmp_eq", ftype
, BUILT_IN_MEMCMP_EQ
,
9837 "__builtin_memcmp_eq",
9838 ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9840 local_define_builtin ("__builtin_strncmp_eq", ftype
, BUILT_IN_STRNCMP_EQ
,
9841 "__builtin_strncmp_eq",
9842 ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9844 local_define_builtin ("__builtin_strcmp_eq", ftype
, BUILT_IN_STRCMP_EQ
,
9845 "__builtin_strcmp_eq",
9846 ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9848 /* If there's a possibility that we might use the ARM EABI, build the
9849 alternate __cxa_end_cleanup node used to resume from C++. */
9850 if (targetm
.arm_eabi_unwinder
)
9852 ftype
= build_function_type_list (void_type_node
, NULL_TREE
);
9853 local_define_builtin ("__builtin_cxa_end_cleanup", ftype
,
9854 BUILT_IN_CXA_END_CLEANUP
,
9855 "__cxa_end_cleanup", ECF_NORETURN
| ECF_LEAF
);
9858 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
9859 local_define_builtin ("__builtin_unwind_resume", ftype
,
9860 BUILT_IN_UNWIND_RESUME
,
9861 ((targetm_common
.except_unwind_info (&global_options
)
9863 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
9866 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS
) == NULL_TREE
)
9868 ftype
= build_function_type_list (ptr_type_node
, integer_type_node
,
9870 local_define_builtin ("__builtin_return_address", ftype
,
9871 BUILT_IN_RETURN_ADDRESS
,
9872 "__builtin_return_address",
9876 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER
)
9877 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT
))
9879 ftype
= build_function_type_list (void_type_node
, ptr_type_node
,
9880 ptr_type_node
, NULL_TREE
);
9881 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER
))
9882 local_define_builtin ("__cyg_profile_func_enter", ftype
,
9883 BUILT_IN_PROFILE_FUNC_ENTER
,
9884 "__cyg_profile_func_enter", 0);
9885 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT
))
9886 local_define_builtin ("__cyg_profile_func_exit", ftype
,
9887 BUILT_IN_PROFILE_FUNC_EXIT
,
9888 "__cyg_profile_func_exit", 0);
9891 /* The exception object and filter values from the runtime. The argument
9892 must be zero before exception lowering, i.e. from the front end. After
9893 exception lowering, it will be the region number for the exception
9894 landing pad. These functions are PURE instead of CONST to prevent
9895 them from being hoisted past the exception edge that will initialize
9896 its value in the landing pad. */
9897 ftype
= build_function_type_list (ptr_type_node
,
9898 integer_type_node
, NULL_TREE
);
9899 ecf_flags
= ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
;
9900 /* Only use TM_PURE if we have TM language support. */
9901 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1
))
9902 ecf_flags
|= ECF_TM_PURE
;
9903 local_define_builtin ("__builtin_eh_pointer", ftype
, BUILT_IN_EH_POINTER
,
9904 "__builtin_eh_pointer", ecf_flags
);
9906 tmp
= lang_hooks
.types
.type_for_mode (targetm
.eh_return_filter_mode (), 0);
9907 ftype
= build_function_type_list (tmp
, integer_type_node
, NULL_TREE
);
9908 local_define_builtin ("__builtin_eh_filter", ftype
, BUILT_IN_EH_FILTER
,
9909 "__builtin_eh_filter", ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9911 ftype
= build_function_type_list (void_type_node
,
9912 integer_type_node
, integer_type_node
,
9914 local_define_builtin ("__builtin_eh_copy_values", ftype
,
9915 BUILT_IN_EH_COPY_VALUES
,
9916 "__builtin_eh_copy_values", ECF_NOTHROW
);
9918 /* Complex multiplication and division. These are handled as builtins
9919 rather than optabs because emit_library_call_value doesn't support
9920 complex. Further, we can do slightly better with folding these
9921 beasties if the real and complex parts of the arguments are separate. */
9925 for (mode
= MIN_MODE_COMPLEX_FLOAT
; mode
<= MAX_MODE_COMPLEX_FLOAT
; ++mode
)
9927 char mode_name_buf
[4], *q
;
9929 enum built_in_function mcode
, dcode
;
9930 tree type
, inner_type
;
9931 const char *prefix
= "__";
9933 if (targetm
.libfunc_gnu_prefix
)
9936 type
= lang_hooks
.types
.type_for_mode ((machine_mode
) mode
, 0);
9939 inner_type
= TREE_TYPE (type
);
9941 ftype
= build_function_type_list (type
, inner_type
, inner_type
,
9942 inner_type
, inner_type
, NULL_TREE
);
9944 mcode
= ((enum built_in_function
)
9945 (BUILT_IN_COMPLEX_MUL_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
9946 dcode
= ((enum built_in_function
)
9947 (BUILT_IN_COMPLEX_DIV_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
9949 for (p
= GET_MODE_NAME (mode
), q
= mode_name_buf
; *p
; p
++, q
++)
9953 /* For -ftrapping-math these should throw from a former
9954 -fnon-call-exception stmt. */
9955 built_in_names
[mcode
] = concat (prefix
, "mul", mode_name_buf
, "3",
9957 local_define_builtin (built_in_names
[mcode
], ftype
, mcode
,
9958 built_in_names
[mcode
],
9959 ECF_CONST
| ECF_LEAF
);
9961 built_in_names
[dcode
] = concat (prefix
, "div", mode_name_buf
, "3",
9963 local_define_builtin (built_in_names
[dcode
], ftype
, dcode
,
9964 built_in_names
[dcode
],
9965 ECF_CONST
| ECF_LEAF
);
9969 init_internal_fns ();
9972 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
9975 If we requested a pointer to a vector, build up the pointers that
9976 we stripped off while looking for the inner type. Similarly for
9977 return values from functions.
9979 The argument TYPE is the top of the chain, and BOTTOM is the
9980 new type which we will point to. */
9983 reconstruct_complex_type (tree type
, tree bottom
)
9987 if (TREE_CODE (type
) == POINTER_TYPE
)
9989 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
9990 outer
= build_pointer_type_for_mode (inner
, TYPE_MODE (type
),
9991 TYPE_REF_CAN_ALIAS_ALL (type
));
9993 else if (TREE_CODE (type
) == REFERENCE_TYPE
)
9995 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
9996 outer
= build_reference_type_for_mode (inner
, TYPE_MODE (type
),
9997 TYPE_REF_CAN_ALIAS_ALL (type
));
9999 else if (TREE_CODE (type
) == ARRAY_TYPE
)
10001 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10002 outer
= build_array_type (inner
, TYPE_DOMAIN (type
));
10004 else if (TREE_CODE (type
) == FUNCTION_TYPE
)
10006 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10007 outer
= build_function_type (inner
, TYPE_ARG_TYPES (type
),
10008 TYPE_NO_NAMED_ARGS_STDARG_P (type
));
10010 else if (TREE_CODE (type
) == METHOD_TYPE
)
10012 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10013 /* The build_method_type_directly() routine prepends 'this' to argument list,
10014 so we must compensate by getting rid of it. */
10016 = build_method_type_directly
10017 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type
))),
10019 TREE_CHAIN (TYPE_ARG_TYPES (type
)));
10021 else if (TREE_CODE (type
) == OFFSET_TYPE
)
10023 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10024 outer
= build_offset_type (TYPE_OFFSET_BASETYPE (type
), inner
);
10029 return build_type_attribute_qual_variant (outer
, TYPE_ATTRIBUTES (type
),
10030 TYPE_QUALS (type
));
10033 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10036 build_vector_type_for_mode (tree innertype
, machine_mode mode
)
10039 unsigned int bitsize
;
10041 switch (GET_MODE_CLASS (mode
))
10043 case MODE_VECTOR_BOOL
:
10044 case MODE_VECTOR_INT
:
10045 case MODE_VECTOR_FLOAT
:
10046 case MODE_VECTOR_FRACT
:
10047 case MODE_VECTOR_UFRACT
:
10048 case MODE_VECTOR_ACCUM
:
10049 case MODE_VECTOR_UACCUM
:
10050 nunits
= GET_MODE_NUNITS (mode
);
10054 /* Check that there are no leftover bits. */
10055 bitsize
= GET_MODE_BITSIZE (as_a
<scalar_int_mode
> (mode
));
10056 gcc_assert (bitsize
% TREE_INT_CST_LOW (TYPE_SIZE (innertype
)) == 0);
10057 nunits
= bitsize
/ TREE_INT_CST_LOW (TYPE_SIZE (innertype
));
10061 gcc_unreachable ();
10064 return make_vector_type (innertype
, nunits
, mode
);
10067 /* Similarly, but takes the inner type and number of units, which must be
10071 build_vector_type (tree innertype
, poly_int64 nunits
)
10073 return make_vector_type (innertype
, nunits
, VOIDmode
);
10076 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE. */
10079 build_truth_vector_type_for_mode (poly_uint64 nunits
, machine_mode mask_mode
)
10081 gcc_assert (mask_mode
!= BLKmode
);
10083 unsigned HOST_WIDE_INT esize
;
10084 if (VECTOR_MODE_P (mask_mode
))
10086 poly_uint64 vsize
= GET_MODE_BITSIZE (mask_mode
);
10087 esize
= vector_element_size (vsize
, nunits
);
10092 tree bool_type
= build_nonstandard_boolean_type (esize
);
10094 return make_vector_type (bool_type
, nunits
, mask_mode
);
10097 /* Build a vector type that holds one boolean result for each element of
10098 vector type VECTYPE. The public interface for this operation is
10102 build_truth_vector_type_for (tree vectype
)
10104 machine_mode vector_mode
= TYPE_MODE (vectype
);
10105 poly_uint64 nunits
= TYPE_VECTOR_SUBPARTS (vectype
);
10107 machine_mode mask_mode
;
10108 if (VECTOR_MODE_P (vector_mode
)
10109 && targetm
.vectorize
.get_mask_mode (vector_mode
).exists (&mask_mode
))
10110 return build_truth_vector_type_for_mode (nunits
, mask_mode
);
10112 poly_uint64 vsize
= tree_to_poly_uint64 (TYPE_SIZE (vectype
));
10113 unsigned HOST_WIDE_INT esize
= vector_element_size (vsize
, nunits
);
10114 tree bool_type
= build_nonstandard_boolean_type (esize
);
10116 return make_vector_type (bool_type
, nunits
, VOIDmode
);
10119 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
10123 build_opaque_vector_type (tree innertype
, poly_int64 nunits
)
10125 tree t
= make_vector_type (innertype
, nunits
, VOIDmode
);
10127 /* We always build the non-opaque variant before the opaque one,
10128 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10129 cand
= TYPE_NEXT_VARIANT (t
);
10131 && TYPE_VECTOR_OPAQUE (cand
)
10132 && check_qualified_type (cand
, t
, TYPE_QUALS (t
)))
10134 /* Othewise build a variant type and make sure to queue it after
10135 the non-opaque type. */
10136 cand
= build_distinct_type_copy (t
);
10137 TYPE_VECTOR_OPAQUE (cand
) = true;
10138 TYPE_CANONICAL (cand
) = TYPE_CANONICAL (t
);
10139 TYPE_NEXT_VARIANT (cand
) = TYPE_NEXT_VARIANT (t
);
10140 TYPE_NEXT_VARIANT (t
) = cand
;
10141 TYPE_MAIN_VARIANT (cand
) = TYPE_MAIN_VARIANT (t
);
10145 /* Return the value of element I of VECTOR_CST T as a wide_int. */
10147 static poly_wide_int
10148 vector_cst_int_elt (const_tree t
, unsigned int i
)
10150 /* First handle elements that are directly encoded. */
10151 unsigned int encoded_nelts
= vector_cst_encoded_nelts (t
);
10152 if (i
< encoded_nelts
)
10153 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t
, i
));
10155 /* Identify the pattern that contains element I and work out the index of
10156 the last encoded element for that pattern. */
10157 unsigned int npatterns
= VECTOR_CST_NPATTERNS (t
);
10158 unsigned int pattern
= i
% npatterns
;
10159 unsigned int count
= i
/ npatterns
;
10160 unsigned int final_i
= encoded_nelts
- npatterns
+ pattern
;
10162 /* If there are no steps, the final encoded value is the right one. */
10163 if (!VECTOR_CST_STEPPED_P (t
))
10164 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t
, final_i
));
10166 /* Otherwise work out the value from the last two encoded elements. */
10167 tree v1
= VECTOR_CST_ENCODED_ELT (t
, final_i
- npatterns
);
10168 tree v2
= VECTOR_CST_ENCODED_ELT (t
, final_i
);
10169 poly_wide_int diff
= wi::to_poly_wide (v2
) - wi::to_poly_wide (v1
);
10170 return wi::to_poly_wide (v2
) + (count
- 2) * diff
;
10173 /* Return the value of element I of VECTOR_CST T. */
10176 vector_cst_elt (const_tree t
, unsigned int i
)
10178 /* First handle elements that are directly encoded. */
10179 unsigned int encoded_nelts
= vector_cst_encoded_nelts (t
);
10180 if (i
< encoded_nelts
)
10181 return VECTOR_CST_ENCODED_ELT (t
, i
);
10183 /* If there are no steps, the final encoded value is the right one. */
10184 if (!VECTOR_CST_STEPPED_P (t
))
10186 /* Identify the pattern that contains element I and work out the index of
10187 the last encoded element for that pattern. */
10188 unsigned int npatterns
= VECTOR_CST_NPATTERNS (t
);
10189 unsigned int pattern
= i
% npatterns
;
10190 unsigned int final_i
= encoded_nelts
- npatterns
+ pattern
;
10191 return VECTOR_CST_ENCODED_ELT (t
, final_i
);
10194 /* Otherwise work out the value from the last two encoded elements. */
10195 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t
)),
10196 vector_cst_int_elt (t
, i
));
10199 /* Given an initializer INIT, return TRUE if INIT is zero or some
10200 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
10201 null, set *NONZERO if and only if INIT is known not to be all
10202 zeros. The combination of return value of false and *NONZERO
10203 false implies that INIT may but need not be all zeros. Other
10204 combinations indicate definitive answers. */
10207 initializer_zerop (const_tree init
, bool *nonzero
/* = NULL */)
10213 /* Conservatively clear NONZERO and set it only if INIT is definitely
10219 unsigned HOST_WIDE_INT off
= 0;
10221 switch (TREE_CODE (init
))
10224 if (integer_zerop (init
))
10231 /* ??? Note that this is not correct for C4X float formats. There,
10232 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10233 negative exponent. */
10234 if (real_zerop (init
)
10235 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init
)))
10242 if (fixed_zerop (init
))
10249 if (integer_zerop (init
)
10250 || (real_zerop (init
)
10251 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init
)))
10252 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init
)))))
10259 if (VECTOR_CST_NPATTERNS (init
) == 1
10260 && VECTOR_CST_DUPLICATE_P (init
)
10261 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init
, 0)))
10269 if (TREE_CLOBBER_P (init
))
10272 unsigned HOST_WIDE_INT idx
;
10275 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init
), idx
, elt
)
10276 if (!initializer_zerop (elt
, nonzero
))
10284 tree arg
= TREE_OPERAND (init
, 0);
10285 if (TREE_CODE (arg
) != ADDR_EXPR
)
10287 tree offset
= TREE_OPERAND (init
, 1);
10288 if (TREE_CODE (offset
) != INTEGER_CST
10289 || !tree_fits_uhwi_p (offset
))
10291 off
= tree_to_uhwi (offset
);
10294 arg
= TREE_OPERAND (arg
, 0);
10295 if (TREE_CODE (arg
) != STRING_CST
)
10299 /* Fall through. */
10303 gcc_assert (off
<= INT_MAX
);
10306 int n
= TREE_STRING_LENGTH (init
);
10310 /* We need to loop through all elements to handle cases like
10311 "\0" and "\0foobar". */
10312 for (i
= 0; i
< n
; ++i
)
10313 if (TREE_STRING_POINTER (init
)[i
] != '\0')
10327 /* Return true if EXPR is an initializer expression in which every element
10328 is a constant that is numerically equal to 0 or 1. The elements do not
10329 need to be equal to each other. */
10332 initializer_each_zero_or_onep (const_tree expr
)
10334 STRIP_ANY_LOCATION_WRAPPER (expr
);
10336 switch (TREE_CODE (expr
))
10339 return integer_zerop (expr
) || integer_onep (expr
);
10342 return real_zerop (expr
) || real_onep (expr
);
10346 unsigned HOST_WIDE_INT nelts
= vector_cst_encoded_nelts (expr
);
10347 if (VECTOR_CST_STEPPED_P (expr
)
10348 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr
)).is_constant (&nelts
))
10351 for (unsigned int i
= 0; i
< nelts
; ++i
)
10353 tree elt
= vector_cst_elt (expr
, i
);
10354 if (!initializer_each_zero_or_onep (elt
))
10366 /* Check if vector VEC consists of all the equal elements and
10367 that the number of elements corresponds to the type of VEC.
10368 The function returns first element of the vector
10369 or NULL_TREE if the vector is not uniform. */
10371 uniform_vector_p (const_tree vec
)
10374 unsigned HOST_WIDE_INT i
, nelts
;
10376 if (vec
== NULL_TREE
)
10379 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec
)));
10381 if (TREE_CODE (vec
) == VEC_DUPLICATE_EXPR
)
10382 return TREE_OPERAND (vec
, 0);
10384 else if (TREE_CODE (vec
) == VECTOR_CST
)
10386 if (VECTOR_CST_NPATTERNS (vec
) == 1 && VECTOR_CST_DUPLICATE_P (vec
))
10387 return VECTOR_CST_ENCODED_ELT (vec
, 0);
10391 else if (TREE_CODE (vec
) == CONSTRUCTOR
10392 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec
)).is_constant (&nelts
))
10394 first
= error_mark_node
;
10396 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec
), i
, t
)
10403 if (!operand_equal_p (first
, t
, 0))
10409 if (TREE_CODE (first
) == CONSTRUCTOR
|| TREE_CODE (first
) == VECTOR_CST
)
10410 return uniform_vector_p (first
);
10417 /* If the argument is INTEGER_CST, return it. If the argument is vector
10418 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
10420 Look through location wrappers. */
10423 uniform_integer_cst_p (tree t
)
10425 STRIP_ANY_LOCATION_WRAPPER (t
);
10427 if (TREE_CODE (t
) == INTEGER_CST
)
10430 if (VECTOR_TYPE_P (TREE_TYPE (t
)))
10432 t
= uniform_vector_p (t
);
10433 if (t
&& TREE_CODE (t
) == INTEGER_CST
)
10440 /* Checks to see if T is a constant or a constant vector and if each element E
10441 adheres to ~E + 1 == pow2 then return ~E otherwise NULL_TREE. */
10444 bitmask_inv_cst_vector_p (tree t
)
10447 tree_code code
= TREE_CODE (t
);
10448 tree type
= TREE_TYPE (t
);
10450 if (!INTEGRAL_TYPE_P (type
)
10451 && !VECTOR_INTEGER_TYPE_P (type
))
10454 unsigned HOST_WIDE_INT nelts
= 1;
10456 unsigned int idx
= 0;
10457 bool uniform
= uniform_integer_cst_p (t
);
10458 tree newtype
= unsigned_type_for (type
);
10459 tree_vector_builder builder
;
10460 if (code
== INTEGER_CST
)
10464 if (!VECTOR_CST_NELTS (t
).is_constant (&nelts
))
10467 cst
= vector_cst_elt (t
, 0);
10468 builder
.new_vector (newtype
, nelts
, 1);
10471 tree ty
= unsigned_type_for (TREE_TYPE (cst
));
10476 cst
= vector_cst_elt (t
, idx
);
10477 wide_int icst
= wi::to_wide (cst
);
10478 wide_int inv
= wi::bit_not (icst
);
10479 icst
= wi::add (1, inv
);
10480 if (wi::popcount (icst
) != 1)
10483 tree newcst
= wide_int_to_tree (ty
, inv
);
10486 return build_uniform_cst (newtype
, newcst
);
10488 builder
.quick_push (newcst
);
10490 while (++idx
< nelts
);
10492 return builder
.build ();
10495 /* If VECTOR_CST T has a single nonzero element, return the index of that
10496 element, otherwise return -1. */
10499 single_nonzero_element (const_tree t
)
10501 unsigned HOST_WIDE_INT nelts
;
10502 unsigned int repeat_nelts
;
10503 if (VECTOR_CST_NELTS (t
).is_constant (&nelts
))
10504 repeat_nelts
= nelts
;
10505 else if (VECTOR_CST_NELTS_PER_PATTERN (t
) == 2)
10507 nelts
= vector_cst_encoded_nelts (t
);
10508 repeat_nelts
= VECTOR_CST_NPATTERNS (t
);
10514 for (unsigned int i
= 0; i
< nelts
; ++i
)
10516 tree elt
= vector_cst_elt (t
, i
);
10517 if (!integer_zerop (elt
) && !real_zerop (elt
))
10519 if (res
>= 0 || i
>= repeat_nelts
)
10527 /* Build an empty statement at location LOC. */
10530 build_empty_stmt (location_t loc
)
10532 tree t
= build1 (NOP_EXPR
, void_type_node
, size_zero_node
);
10533 SET_EXPR_LOCATION (t
, loc
);
10538 /* Build an OMP clause with code CODE. LOC is the location of the
10542 build_omp_clause (location_t loc
, enum omp_clause_code code
)
10547 length
= omp_clause_num_ops
[code
];
10548 size
= (sizeof (struct tree_omp_clause
) + (length
- 1) * sizeof (tree
));
10550 record_node_allocation_statistics (OMP_CLAUSE
, size
);
10552 t
= (tree
) ggc_internal_alloc (size
);
10553 memset (t
, 0, size
);
10554 TREE_SET_CODE (t
, OMP_CLAUSE
);
10555 OMP_CLAUSE_SET_CODE (t
, code
);
10556 OMP_CLAUSE_LOCATION (t
) = loc
;
10561 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10562 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10563 Except for the CODE and operand count field, other storage for the
10564 object is initialized to zeros. */
10567 build_vl_exp (enum tree_code code
, int len MEM_STAT_DECL
)
10570 int length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_exp
);
10572 gcc_assert (TREE_CODE_CLASS (code
) == tcc_vl_exp
);
10573 gcc_assert (len
>= 1);
10575 record_node_allocation_statistics (code
, length
);
10577 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
10579 TREE_SET_CODE (t
, code
);
10581 /* Can't use TREE_OPERAND to store the length because if checking is
10582 enabled, it will try to check the length before we store it. :-P */
10583 t
->exp
.operands
[0] = build_int_cst (sizetype
, len
);
10588 /* Helper function for build_call_* functions; build a CALL_EXPR with
10589 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10590 the argument slots. */
10593 build_call_1 (tree return_type
, tree fn
, int nargs
)
10597 t
= build_vl_exp (CALL_EXPR
, nargs
+ 3);
10598 TREE_TYPE (t
) = return_type
;
10599 CALL_EXPR_FN (t
) = fn
;
10600 CALL_EXPR_STATIC_CHAIN (t
) = NULL
;
10605 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10606 FN and a null static chain slot. NARGS is the number of call arguments
10607 which are specified as "..." arguments. */
10610 build_call_nary (tree return_type
, tree fn
, int nargs
, ...)
10614 va_start (args
, nargs
);
10615 ret
= build_call_valist (return_type
, fn
, nargs
, args
);
10620 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10621 FN and a null static chain slot. NARGS is the number of call arguments
10622 which are specified as a va_list ARGS. */
10625 build_call_valist (tree return_type
, tree fn
, int nargs
, va_list args
)
10630 t
= build_call_1 (return_type
, fn
, nargs
);
10631 for (i
= 0; i
< nargs
; i
++)
10632 CALL_EXPR_ARG (t
, i
) = va_arg (args
, tree
);
10633 process_call_operands (t
);
10637 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10638 FN and a null static chain slot. NARGS is the number of call arguments
10639 which are specified as a tree array ARGS. */
10642 build_call_array_loc (location_t loc
, tree return_type
, tree fn
,
10643 int nargs
, const tree
*args
)
10648 t
= build_call_1 (return_type
, fn
, nargs
);
10649 for (i
= 0; i
< nargs
; i
++)
10650 CALL_EXPR_ARG (t
, i
) = args
[i
];
10651 process_call_operands (t
);
10652 SET_EXPR_LOCATION (t
, loc
);
10656 /* Like build_call_array, but takes a vec. */
10659 build_call_vec (tree return_type
, tree fn
, const vec
<tree
, va_gc
> *args
)
10664 ret
= build_call_1 (return_type
, fn
, vec_safe_length (args
));
10665 FOR_EACH_VEC_SAFE_ELT (args
, ix
, t
)
10666 CALL_EXPR_ARG (ret
, ix
) = t
;
10667 process_call_operands (ret
);
10671 /* Conveniently construct a function call expression. FNDECL names the
10672 function to be called and N arguments are passed in the array
10676 build_call_expr_loc_array (location_t loc
, tree fndecl
, int n
, tree
*argarray
)
10678 tree fntype
= TREE_TYPE (fndecl
);
10679 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10681 return fold_build_call_array_loc (loc
, TREE_TYPE (fntype
), fn
, n
, argarray
);
10684 /* Conveniently construct a function call expression. FNDECL names the
10685 function to be called and the arguments are passed in the vector
10689 build_call_expr_loc_vec (location_t loc
, tree fndecl
, vec
<tree
, va_gc
> *vec
)
10691 return build_call_expr_loc_array (loc
, fndecl
, vec_safe_length (vec
),
10692 vec_safe_address (vec
));
10696 /* Conveniently construct a function call expression. FNDECL names the
10697 function to be called, N is the number of arguments, and the "..."
10698 parameters are the argument expressions. */
10701 build_call_expr_loc (location_t loc
, tree fndecl
, int n
, ...)
10704 tree
*argarray
= XALLOCAVEC (tree
, n
);
10708 for (i
= 0; i
< n
; i
++)
10709 argarray
[i
] = va_arg (ap
, tree
);
10711 return build_call_expr_loc_array (loc
, fndecl
, n
, argarray
);
10714 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10715 varargs macros aren't supported by all bootstrap compilers. */
10718 build_call_expr (tree fndecl
, int n
, ...)
10721 tree
*argarray
= XALLOCAVEC (tree
, n
);
10725 for (i
= 0; i
< n
; i
++)
10726 argarray
[i
] = va_arg (ap
, tree
);
10728 return build_call_expr_loc_array (UNKNOWN_LOCATION
, fndecl
, n
, argarray
);
10731 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10732 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10733 It will get gimplified later into an ordinary internal function. */
10736 build_call_expr_internal_loc_array (location_t loc
, internal_fn ifn
,
10737 tree type
, int n
, const tree
*args
)
10739 tree t
= build_call_1 (type
, NULL_TREE
, n
);
10740 for (int i
= 0; i
< n
; ++i
)
10741 CALL_EXPR_ARG (t
, i
) = args
[i
];
10742 SET_EXPR_LOCATION (t
, loc
);
10743 CALL_EXPR_IFN (t
) = ifn
;
10744 process_call_operands (t
);
10748 /* Build internal call expression. This is just like CALL_EXPR, except
10749 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10750 internal function. */
10753 build_call_expr_internal_loc (location_t loc
, enum internal_fn ifn
,
10754 tree type
, int n
, ...)
10757 tree
*argarray
= XALLOCAVEC (tree
, n
);
10761 for (i
= 0; i
< n
; i
++)
10762 argarray
[i
] = va_arg (ap
, tree
);
10764 return build_call_expr_internal_loc_array (loc
, ifn
, type
, n
, argarray
);
10767 /* Return a function call to FN, if the target is guaranteed to support it,
10770 N is the number of arguments, passed in the "...", and TYPE is the
10771 type of the return value. */
10774 maybe_build_call_expr_loc (location_t loc
, combined_fn fn
, tree type
,
10778 tree
*argarray
= XALLOCAVEC (tree
, n
);
10782 for (i
= 0; i
< n
; i
++)
10783 argarray
[i
] = va_arg (ap
, tree
);
10785 if (internal_fn_p (fn
))
10787 internal_fn ifn
= as_internal_fn (fn
);
10788 if (direct_internal_fn_p (ifn
))
10790 tree_pair types
= direct_internal_fn_types (ifn
, type
, argarray
);
10791 if (!direct_internal_fn_supported_p (ifn
, types
,
10792 OPTIMIZE_FOR_BOTH
))
10795 return build_call_expr_internal_loc_array (loc
, ifn
, type
, n
, argarray
);
10799 tree fndecl
= builtin_decl_implicit (as_builtin_fn (fn
));
10802 return build_call_expr_loc_array (loc
, fndecl
, n
, argarray
);
10806 /* Return a function call to the appropriate builtin alloca variant.
10808 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
10809 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
10810 bound for SIZE in case it is not a fixed value. */
10813 build_alloca_call_expr (tree size
, unsigned int align
, HOST_WIDE_INT max_size
)
10817 tree t
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
);
10819 build_call_expr (t
, 3, size
, size_int (align
), size_int (max_size
));
10821 else if (align
> 0)
10823 tree t
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
10824 return build_call_expr (t
, 2, size
, size_int (align
));
10828 tree t
= builtin_decl_explicit (BUILT_IN_ALLOCA
);
10829 return build_call_expr (t
, 1, size
);
10833 /* The built-in decl to use to mark code points believed to be unreachable.
10834 Typically __builtin_unreachable, but __builtin_trap if
10835 -fsanitize=unreachable -fsanitize-trap=unreachable. If only
10836 -fsanitize=unreachable, we rely on sanopt to replace calls with the
10837 appropriate ubsan function. When building a call directly, use
10838 {gimple_},build_builtin_unreachable instead. */
10841 builtin_decl_unreachable ()
10843 enum built_in_function fncode
= BUILT_IN_UNREACHABLE
;
10845 if (sanitize_flags_p (SANITIZE_UNREACHABLE
)
10846 ? (flag_sanitize_trap
& SANITIZE_UNREACHABLE
)
10847 : flag_unreachable_traps
)
10848 fncode
= BUILT_IN_TRAP
;
10849 /* For non-trapping sanitize, we will rewrite __builtin_unreachable () later,
10850 in the sanopt pass. */
10852 return builtin_decl_explicit (fncode
);
10855 /* Build a call to __builtin_unreachable, possibly rewritten by
10856 -fsanitize=unreachable. Use this rather than the above when practical. */
10859 build_builtin_unreachable (location_t loc
)
10861 tree data
= NULL_TREE
;
10862 tree fn
= sanitize_unreachable_fn (&data
, loc
);
10863 return build_call_expr_loc (loc
, fn
, data
!= NULL_TREE
, data
);
10866 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
10867 if SIZE == -1) and return a tree node representing char* pointer to
10868 it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). When STR is nonnull
10869 the STRING_CST value is the LEN bytes at STR (the representation
10870 of the string, which may be wide). Otherwise it's all zeros. */
10873 build_string_literal (unsigned len
, const char *str
/* = NULL */,
10874 tree eltype
/* = char_type_node */,
10875 unsigned HOST_WIDE_INT size
/* = -1 */)
10877 tree t
= build_string (len
, str
);
10878 /* Set the maximum valid index based on the string length or SIZE. */
10879 unsigned HOST_WIDE_INT maxidx
10880 = (size
== HOST_WIDE_INT_M1U
? len
: size
) - 1;
10882 tree index
= build_index_type (size_int (maxidx
));
10883 eltype
= build_type_variant (eltype
, 1, 0);
10884 tree type
= build_array_type (eltype
, index
);
10885 TREE_TYPE (t
) = type
;
10886 TREE_CONSTANT (t
) = 1;
10887 TREE_READONLY (t
) = 1;
10888 TREE_STATIC (t
) = 1;
10890 type
= build_pointer_type (eltype
);
10891 t
= build1 (ADDR_EXPR
, type
,
10892 build4 (ARRAY_REF
, eltype
,
10893 t
, integer_zero_node
, NULL_TREE
, NULL_TREE
));
10899 /* Return true if T (assumed to be a DECL) must be assigned a memory
10903 needs_to_live_in_memory (const_tree t
)
10905 return (TREE_ADDRESSABLE (t
)
10906 || is_global_var (t
)
10907 || (TREE_CODE (t
) == RESULT_DECL
10908 && !DECL_BY_REFERENCE (t
)
10909 && aggregate_value_p (t
, current_function_decl
)));
10912 /* Return value of a constant X and sign-extend it. */
10915 int_cst_value (const_tree x
)
10917 unsigned bits
= TYPE_PRECISION (TREE_TYPE (x
));
10918 unsigned HOST_WIDE_INT val
= TREE_INT_CST_LOW (x
);
10920 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10921 gcc_assert (cst_and_fits_in_hwi (x
));
10923 if (bits
< HOST_BITS_PER_WIDE_INT
)
10925 bool negative
= ((val
>> (bits
- 1)) & 1) != 0;
10927 val
|= HOST_WIDE_INT_M1U
<< (bits
- 1) << 1;
10929 val
&= ~(HOST_WIDE_INT_M1U
<< (bits
- 1) << 1);
10935 /* If TYPE is an integral or pointer type, return an integer type with
10936 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10937 if TYPE is already an integer type of signedness UNSIGNEDP.
10938 If TYPE is a floating-point type, return an integer type with the same
10939 bitsize and with the signedness given by UNSIGNEDP; this is useful
10940 when doing bit-level operations on a floating-point value. */
10943 signed_or_unsigned_type_for (int unsignedp
, tree type
)
10945 if (ANY_INTEGRAL_TYPE_P (type
) && TYPE_UNSIGNED (type
) == unsignedp
)
10948 if (TREE_CODE (type
) == VECTOR_TYPE
)
10950 tree inner
= TREE_TYPE (type
);
10951 tree inner2
= signed_or_unsigned_type_for (unsignedp
, inner
);
10954 if (inner
== inner2
)
10956 return build_vector_type (inner2
, TYPE_VECTOR_SUBPARTS (type
));
10959 if (TREE_CODE (type
) == COMPLEX_TYPE
)
10961 tree inner
= TREE_TYPE (type
);
10962 tree inner2
= signed_or_unsigned_type_for (unsignedp
, inner
);
10965 if (inner
== inner2
)
10967 return build_complex_type (inner2
);
10971 if (INTEGRAL_TYPE_P (type
)
10972 || POINTER_TYPE_P (type
)
10973 || TREE_CODE (type
) == OFFSET_TYPE
)
10974 bits
= TYPE_PRECISION (type
);
10975 else if (TREE_CODE (type
) == REAL_TYPE
)
10976 bits
= GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type
));
10980 return build_nonstandard_integer_type (bits
, unsignedp
);
10983 /* If TYPE is an integral or pointer type, return an integer type with
10984 the same precision which is unsigned, or itself if TYPE is already an
10985 unsigned integer type. If TYPE is a floating-point type, return an
10986 unsigned integer type with the same bitsize as TYPE. */
10989 unsigned_type_for (tree type
)
10991 return signed_or_unsigned_type_for (1, type
);
10994 /* If TYPE is an integral or pointer type, return an integer type with
10995 the same precision which is signed, or itself if TYPE is already a
10996 signed integer type. If TYPE is a floating-point type, return a
10997 signed integer type with the same bitsize as TYPE. */
11000 signed_type_for (tree type
)
11002 return signed_or_unsigned_type_for (0, type
);
11005 /* - For VECTOR_TYPEs:
11006 - The truth type must be a VECTOR_BOOLEAN_TYPE.
11007 - The number of elements must match (known_eq).
11008 - targetm.vectorize.get_mask_mode exists, and exactly
11009 the same mode as the truth type.
11010 - Otherwise, the truth type must be a BOOLEAN_TYPE
11011 or useless_type_conversion_p to BOOLEAN_TYPE. */
11013 is_truth_type_for (tree type
, tree truth_type
)
11015 machine_mode mask_mode
= TYPE_MODE (truth_type
);
11016 machine_mode vmode
= TYPE_MODE (type
);
11017 machine_mode tmask_mode
;
11019 if (TREE_CODE (type
) == VECTOR_TYPE
)
11021 if (VECTOR_BOOLEAN_TYPE_P (truth_type
)
11022 && known_eq (TYPE_VECTOR_SUBPARTS (type
),
11023 TYPE_VECTOR_SUBPARTS (truth_type
))
11024 && targetm
.vectorize
.get_mask_mode (vmode
).exists (&tmask_mode
)
11025 && tmask_mode
== mask_mode
)
11031 return useless_type_conversion_p (boolean_type_node
, truth_type
);
11034 /* If TYPE is a vector type, return a signed integer vector type with the
11035 same width and number of subparts. Otherwise return boolean_type_node. */
11038 truth_type_for (tree type
)
11040 if (TREE_CODE (type
) == VECTOR_TYPE
)
11042 if (VECTOR_BOOLEAN_TYPE_P (type
))
11044 return build_truth_vector_type_for (type
);
11047 return boolean_type_node
;
11050 /* Returns the largest value obtainable by casting something in INNER type to
11054 upper_bound_in_type (tree outer
, tree inner
)
11056 unsigned int det
= 0;
11057 unsigned oprec
= TYPE_PRECISION (outer
);
11058 unsigned iprec
= TYPE_PRECISION (inner
);
11061 /* Compute a unique number for every combination. */
11062 det
|= (oprec
> iprec
) ? 4 : 0;
11063 det
|= TYPE_UNSIGNED (outer
) ? 2 : 0;
11064 det
|= TYPE_UNSIGNED (inner
) ? 1 : 0;
11066 /* Determine the exponent to use. */
11071 /* oprec <= iprec, outer: signed, inner: don't care. */
11076 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11080 /* oprec > iprec, outer: signed, inner: signed. */
11084 /* oprec > iprec, outer: signed, inner: unsigned. */
11088 /* oprec > iprec, outer: unsigned, inner: signed. */
11092 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11096 gcc_unreachable ();
11099 return wide_int_to_tree (outer
,
11100 wi::mask (prec
, false, TYPE_PRECISION (outer
)));
11103 /* Returns the smallest value obtainable by casting something in INNER type to
11107 lower_bound_in_type (tree outer
, tree inner
)
11109 unsigned oprec
= TYPE_PRECISION (outer
);
11110 unsigned iprec
= TYPE_PRECISION (inner
);
11112 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11114 if (TYPE_UNSIGNED (outer
)
11115 /* If we are widening something of an unsigned type, OUTER type
11116 contains all values of INNER type. In particular, both INNER
11117 and OUTER types have zero in common. */
11118 || (oprec
> iprec
&& TYPE_UNSIGNED (inner
)))
11119 return build_int_cst (outer
, 0);
11122 /* If we are widening a signed type to another signed type, we
11123 want to obtain -2^^(iprec-1). If we are keeping the
11124 precision or narrowing to a signed type, we want to obtain
11126 unsigned prec
= oprec
> iprec
? iprec
: oprec
;
11127 return wide_int_to_tree (outer
,
11128 wi::mask (prec
- 1, true,
11129 TYPE_PRECISION (outer
)));
11133 /* Return nonzero if two operands that are suitable for PHI nodes are
11134 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11135 SSA_NAME or invariant. Note that this is strictly an optimization.
11136 That is, callers of this function can directly call operand_equal_p
11137 and get the same result, only slower. */
11140 operand_equal_for_phi_arg_p (const_tree arg0
, const_tree arg1
)
11144 if (TREE_CODE (arg0
) == SSA_NAME
|| TREE_CODE (arg1
) == SSA_NAME
)
11146 return operand_equal_p (arg0
, arg1
, 0);
11149 /* Returns number of zeros at the end of binary representation of X. */
11152 num_ending_zeros (const_tree x
)
11154 return build_int_cst (TREE_TYPE (x
), wi::ctz (wi::to_wide (x
)));
11158 #define WALK_SUBTREE(NODE) \
11161 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11167 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11168 be walked whenever a type is seen in the tree. Rest of operands and return
11169 value are as for walk_tree. */
11172 walk_type_fields (tree type
, walk_tree_fn func
, void *data
,
11173 hash_set
<tree
> *pset
, walk_tree_lh lh
)
11175 tree result
= NULL_TREE
;
11177 switch (TREE_CODE (type
))
11180 case REFERENCE_TYPE
:
11182 /* We have to worry about mutually recursive pointers. These can't
11183 be written in C. They can in Ada. It's pathological, but
11184 there's an ACATS test (c38102a) that checks it. Deal with this
11185 by checking if we're pointing to another pointer, that one
11186 points to another pointer, that one does too, and we have no htab.
11187 If so, get a hash table. We check three levels deep to avoid
11188 the cost of the hash table if we don't need one. */
11189 if (POINTER_TYPE_P (TREE_TYPE (type
))
11190 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type
)))
11191 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type
))))
11194 result
= walk_tree_without_duplicates (&TREE_TYPE (type
),
11205 WALK_SUBTREE (TREE_TYPE (type
));
11209 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type
));
11211 /* Fall through. */
11213 case FUNCTION_TYPE
:
11214 WALK_SUBTREE (TREE_TYPE (type
));
11218 /* We never want to walk into default arguments. */
11219 for (arg
= TYPE_ARG_TYPES (type
); arg
; arg
= TREE_CHAIN (arg
))
11220 WALK_SUBTREE (TREE_VALUE (arg
));
11225 /* Don't follow this nodes's type if a pointer for fear that
11226 we'll have infinite recursion. If we have a PSET, then we
11229 || (!POINTER_TYPE_P (TREE_TYPE (type
))
11230 && TREE_CODE (TREE_TYPE (type
)) != OFFSET_TYPE
))
11231 WALK_SUBTREE (TREE_TYPE (type
));
11232 WALK_SUBTREE (TYPE_DOMAIN (type
));
11236 WALK_SUBTREE (TREE_TYPE (type
));
11237 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type
));
11247 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11248 called with the DATA and the address of each sub-tree. If FUNC returns a
11249 non-NULL value, the traversal is stopped, and the value returned by FUNC
11250 is returned. If PSET is non-NULL it is used to record the nodes visited,
11251 and to avoid visiting a node more than once. */
11254 walk_tree_1 (tree
*tp
, walk_tree_fn func
, void *data
,
11255 hash_set
<tree
> *pset
, walk_tree_lh lh
)
11257 enum tree_code code
;
11261 #define WALK_SUBTREE_TAIL(NODE) \
11265 goto tail_recurse; \
11270 /* Skip empty subtrees. */
11274 /* Don't walk the same tree twice, if the user has requested
11275 that we avoid doing so. */
11276 if (pset
&& pset
->add (*tp
))
11279 /* Call the function. */
11281 result
= (*func
) (tp
, &walk_subtrees
, data
);
11283 /* If we found something, return it. */
11287 code
= TREE_CODE (*tp
);
11289 /* Even if we didn't, FUNC may have decided that there was nothing
11290 interesting below this point in the tree. */
11291 if (!walk_subtrees
)
11293 /* But we still need to check our siblings. */
11294 if (code
== TREE_LIST
)
11295 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp
));
11296 else if (code
== OMP_CLAUSE
)
11297 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11304 result
= (*lh
) (tp
, &walk_subtrees
, func
, data
, pset
);
11305 if (result
|| !walk_subtrees
)
11312 case IDENTIFIER_NODE
:
11318 case PLACEHOLDER_EXPR
:
11322 /* None of these have subtrees other than those already walked
11327 WALK_SUBTREE (TREE_VALUE (*tp
));
11328 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp
));
11332 int len
= TREE_VEC_LENGTH (*tp
);
11337 /* Walk all elements but the first. */
11339 WALK_SUBTREE (TREE_VEC_ELT (*tp
, len
));
11341 /* Now walk the first one as a tail call. */
11342 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp
, 0));
11347 unsigned len
= vector_cst_encoded_nelts (*tp
);
11350 /* Walk all elements but the first. */
11352 WALK_SUBTREE (VECTOR_CST_ENCODED_ELT (*tp
, len
));
11353 /* Now walk the first one as a tail call. */
11354 WALK_SUBTREE_TAIL (VECTOR_CST_ENCODED_ELT (*tp
, 0));
11358 WALK_SUBTREE (TREE_REALPART (*tp
));
11359 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp
));
11363 unsigned HOST_WIDE_INT idx
;
11364 constructor_elt
*ce
;
11366 for (idx
= 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp
), idx
, &ce
);
11368 WALK_SUBTREE (ce
->value
);
11373 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, 0));
11378 for (decl
= BIND_EXPR_VARS (*tp
); decl
; decl
= DECL_CHAIN (decl
))
11380 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11381 into declarations that are just mentioned, rather than
11382 declared; they don't really belong to this part of the tree.
11383 And, we can see cycles: the initializer for a declaration
11384 can refer to the declaration itself. */
11385 WALK_SUBTREE (DECL_INITIAL (decl
));
11386 WALK_SUBTREE (DECL_SIZE (decl
));
11387 WALK_SUBTREE (DECL_SIZE_UNIT (decl
));
11389 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp
));
11392 case STATEMENT_LIST
:
11394 tree_stmt_iterator i
;
11395 for (i
= tsi_start (*tp
); !tsi_end_p (i
); tsi_next (&i
))
11396 WALK_SUBTREE (*tsi_stmt_ptr (i
));
11402 int len
= omp_clause_num_ops
[OMP_CLAUSE_CODE (*tp
)];
11403 for (int i
= 0; i
< len
; i
++)
11404 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, i
));
11405 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11412 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11413 But, we only want to walk once. */
11414 len
= (TREE_OPERAND (*tp
, 3) == TREE_OPERAND (*tp
, 1)) ? 2 : 3;
11415 for (i
= 0; i
< len
; ++i
)
11416 WALK_SUBTREE (TREE_OPERAND (*tp
, i
));
11417 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, len
));
11421 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11422 defining. We only want to walk into these fields of a type in this
11423 case and not in the general case of a mere reference to the type.
11425 The criterion is as follows: if the field can be an expression, it
11426 must be walked only here. This should be in keeping with the fields
11427 that are directly gimplified in gimplify_type_sizes in order for the
11428 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11429 variable-sized types.
11431 Note that DECLs get walked as part of processing the BIND_EXPR. */
11432 if (TREE_CODE (DECL_EXPR_DECL (*tp
)) == TYPE_DECL
)
11434 /* Call the function for the decl so e.g. copy_tree_body_r can
11435 replace it with the remapped one. */
11436 result
= (*func
) (&DECL_EXPR_DECL (*tp
), &walk_subtrees
, data
);
11437 if (result
|| !walk_subtrees
)
11440 tree
*type_p
= &TREE_TYPE (DECL_EXPR_DECL (*tp
));
11441 if (TREE_CODE (*type_p
) == ERROR_MARK
)
11444 /* Call the function for the type. See if it returns anything or
11445 doesn't want us to continue. If we are to continue, walk both
11446 the normal fields and those for the declaration case. */
11447 result
= (*func
) (type_p
, &walk_subtrees
, data
);
11448 if (result
|| !walk_subtrees
)
11451 /* But do not walk a pointed-to type since it may itself need to
11452 be walked in the declaration case if it isn't anonymous. */
11453 if (!POINTER_TYPE_P (*type_p
))
11455 result
= walk_type_fields (*type_p
, func
, data
, pset
, lh
);
11460 /* If this is a record type, also walk the fields. */
11461 if (RECORD_OR_UNION_TYPE_P (*type_p
))
11465 for (field
= TYPE_FIELDS (*type_p
); field
;
11466 field
= DECL_CHAIN (field
))
11468 /* We'd like to look at the type of the field, but we can
11469 easily get infinite recursion. So assume it's pointed
11470 to elsewhere in the tree. Also, ignore things that
11472 if (TREE_CODE (field
) != FIELD_DECL
)
11475 WALK_SUBTREE (DECL_FIELD_OFFSET (field
));
11476 WALK_SUBTREE (DECL_SIZE (field
));
11477 WALK_SUBTREE (DECL_SIZE_UNIT (field
));
11478 if (TREE_CODE (*type_p
) == QUAL_UNION_TYPE
)
11479 WALK_SUBTREE (DECL_QUALIFIER (field
));
11483 /* Same for scalar types. */
11484 else if (TREE_CODE (*type_p
) == BOOLEAN_TYPE
11485 || TREE_CODE (*type_p
) == ENUMERAL_TYPE
11486 || TREE_CODE (*type_p
) == INTEGER_TYPE
11487 || TREE_CODE (*type_p
) == FIXED_POINT_TYPE
11488 || TREE_CODE (*type_p
) == REAL_TYPE
)
11490 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p
));
11491 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p
));
11494 WALK_SUBTREE (TYPE_SIZE (*type_p
));
11495 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p
));
11500 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
11504 /* Walk over all the sub-trees of this operand. */
11505 len
= TREE_OPERAND_LENGTH (*tp
);
11507 /* Go through the subtrees. We need to do this in forward order so
11508 that the scope of a FOR_EXPR is handled properly. */
11511 for (i
= 0; i
< len
- 1; ++i
)
11512 WALK_SUBTREE (TREE_OPERAND (*tp
, i
));
11513 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, len
- 1));
11516 /* If this is a type, walk the needed fields in the type. */
11517 else if (TYPE_P (*tp
))
11518 return walk_type_fields (*tp
, func
, data
, pset
, lh
);
11522 /* We didn't find what we were looking for. */
11525 #undef WALK_SUBTREE_TAIL
11527 #undef WALK_SUBTREE
11529 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11532 walk_tree_without_duplicates_1 (tree
*tp
, walk_tree_fn func
, void *data
,
11537 hash_set
<tree
> pset
;
11538 result
= walk_tree_1 (tp
, func
, data
, &pset
, lh
);
11544 tree_block (tree t
)
11546 const enum tree_code_class c
= TREE_CODE_CLASS (TREE_CODE (t
));
11548 if (IS_EXPR_CODE_CLASS (c
))
11549 return LOCATION_BLOCK (t
->exp
.locus
);
11550 gcc_unreachable ();
11555 tree_set_block (tree t
, tree b
)
11557 const enum tree_code_class c
= TREE_CODE_CLASS (TREE_CODE (t
));
11559 if (IS_EXPR_CODE_CLASS (c
))
11561 t
->exp
.locus
= set_block (t
->exp
.locus
, b
);
11564 gcc_unreachable ();
11567 /* Create a nameless artificial label and put it in the current
11568 function context. The label has a location of LOC. Returns the
11569 newly created label. */
11572 create_artificial_label (location_t loc
)
11574 tree lab
= build_decl (loc
,
11575 LABEL_DECL
, NULL_TREE
, void_type_node
);
11577 DECL_ARTIFICIAL (lab
) = 1;
11578 DECL_IGNORED_P (lab
) = 1;
11579 DECL_CONTEXT (lab
) = current_function_decl
;
11583 /* Given a tree, try to return a useful variable name that we can use
11584 to prefix a temporary that is being assigned the value of the tree.
11585 I.E. given <temp> = &A, return A. */
11590 tree stripped_decl
;
11593 STRIP_NOPS (stripped_decl
);
11594 if (DECL_P (stripped_decl
) && DECL_NAME (stripped_decl
))
11595 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl
));
11596 else if (TREE_CODE (stripped_decl
) == SSA_NAME
)
11598 tree name
= SSA_NAME_IDENTIFIER (stripped_decl
);
11601 return IDENTIFIER_POINTER (name
);
11605 switch (TREE_CODE (stripped_decl
))
11608 return get_name (TREE_OPERAND (stripped_decl
, 0));
11615 /* Return true if TYPE has a variable argument list. */
11618 stdarg_p (const_tree fntype
)
11620 function_args_iterator args_iter
;
11621 tree n
= NULL_TREE
, t
;
11626 if (TYPE_NO_NAMED_ARGS_STDARG_P (fntype
))
11629 FOREACH_FUNCTION_ARGS (fntype
, t
, args_iter
)
11634 return n
!= NULL_TREE
&& n
!= void_type_node
;
11637 /* Return true if TYPE has a prototype. */
11640 prototype_p (const_tree fntype
)
11644 gcc_assert (fntype
!= NULL_TREE
);
11646 if (TYPE_NO_NAMED_ARGS_STDARG_P (fntype
))
11649 t
= TYPE_ARG_TYPES (fntype
);
11650 return (t
!= NULL_TREE
);
11653 /* If BLOCK is inlined from an __attribute__((__artificial__))
11654 routine, return pointer to location from where it has been
11657 block_nonartificial_location (tree block
)
11659 location_t
*ret
= NULL
;
11661 while (block
&& TREE_CODE (block
) == BLOCK
11662 && BLOCK_ABSTRACT_ORIGIN (block
))
11664 tree ao
= BLOCK_ABSTRACT_ORIGIN (block
);
11665 if (TREE_CODE (ao
) == FUNCTION_DECL
)
11667 /* If AO is an artificial inline, point RET to the
11668 call site locus at which it has been inlined and continue
11669 the loop, in case AO's caller is also an artificial
11671 if (DECL_DECLARED_INLINE_P (ao
)
11672 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao
)))
11673 ret
= &BLOCK_SOURCE_LOCATION (block
);
11677 else if (TREE_CODE (ao
) != BLOCK
)
11680 block
= BLOCK_SUPERCONTEXT (block
);
11686 /* If EXP is inlined from an __attribute__((__artificial__))
11687 function, return the location of the original call expression. */
11690 tree_nonartificial_location (tree exp
)
11692 location_t
*loc
= block_nonartificial_location (TREE_BLOCK (exp
));
11697 return EXPR_LOCATION (exp
);
11700 /* Return the location into which EXP has been inlined. Analogous
11701 to tree_nonartificial_location() above but not limited to artificial
11702 functions declared inline. If SYSTEM_HEADER is true, return
11703 the macro expansion point of the location if it's in a system header */
11706 tree_inlined_location (tree exp
, bool system_header
/* = true */)
11708 location_t loc
= UNKNOWN_LOCATION
;
11710 tree block
= TREE_BLOCK (exp
);
11712 while (block
&& TREE_CODE (block
) == BLOCK
11713 && BLOCK_ABSTRACT_ORIGIN (block
))
11715 tree ao
= BLOCK_ABSTRACT_ORIGIN (block
);
11716 if (TREE_CODE (ao
) == FUNCTION_DECL
)
11717 loc
= BLOCK_SOURCE_LOCATION (block
);
11718 else if (TREE_CODE (ao
) != BLOCK
)
11721 block
= BLOCK_SUPERCONTEXT (block
);
11724 if (loc
== UNKNOWN_LOCATION
)
11726 loc
= EXPR_LOCATION (exp
);
11728 /* Only consider macro expansion when the block traversal failed
11729 to find a location. Otherwise it's not relevant. */
11730 return expansion_point_location_if_in_system_header (loc
);
11736 /* These are the hash table functions for the hash table of OPTIMIZATION_NODE
11739 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11742 cl_option_hasher::hash (tree x
)
11744 const_tree
const t
= x
;
11746 if (TREE_CODE (t
) == OPTIMIZATION_NODE
)
11747 return cl_optimization_hash (TREE_OPTIMIZATION (t
));
11748 else if (TREE_CODE (t
) == TARGET_OPTION_NODE
)
11749 return cl_target_option_hash (TREE_TARGET_OPTION (t
));
11751 gcc_unreachable ();
11754 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11755 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11759 cl_option_hasher::equal (tree x
, tree y
)
11761 const_tree
const xt
= x
;
11762 const_tree
const yt
= y
;
11764 if (TREE_CODE (xt
) != TREE_CODE (yt
))
11767 if (TREE_CODE (xt
) == OPTIMIZATION_NODE
)
11768 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt
),
11769 TREE_OPTIMIZATION (yt
));
11770 else if (TREE_CODE (xt
) == TARGET_OPTION_NODE
)
11771 return cl_target_option_eq (TREE_TARGET_OPTION (xt
),
11772 TREE_TARGET_OPTION (yt
));
11774 gcc_unreachable ();
11777 /* Build an OPTIMIZATION_NODE based on the options in OPTS and OPTS_SET. */
11780 build_optimization_node (struct gcc_options
*opts
,
11781 struct gcc_options
*opts_set
)
11785 /* Use the cache of optimization nodes. */
11787 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node
),
11790 tree
*slot
= cl_option_hash_table
->find_slot (cl_optimization_node
, INSERT
);
11794 /* Insert this one into the hash table. */
11795 t
= cl_optimization_node
;
11798 /* Make a new node for next time round. */
11799 cl_optimization_node
= make_node (OPTIMIZATION_NODE
);
11805 /* Build a TARGET_OPTION_NODE based on the options in OPTS and OPTS_SET. */
11808 build_target_option_node (struct gcc_options
*opts
,
11809 struct gcc_options
*opts_set
)
11813 /* Use the cache of optimization nodes. */
11815 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node
),
11818 tree
*slot
= cl_option_hash_table
->find_slot (cl_target_option_node
, INSERT
);
11822 /* Insert this one into the hash table. */
11823 t
= cl_target_option_node
;
11826 /* Make a new node for next time round. */
11827 cl_target_option_node
= make_node (TARGET_OPTION_NODE
);
11833 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11834 so that they aren't saved during PCH writing. */
11837 prepare_target_option_nodes_for_pch (void)
11839 hash_table
<cl_option_hasher
>::iterator iter
= cl_option_hash_table
->begin ();
11840 for (; iter
!= cl_option_hash_table
->end (); ++iter
)
11841 if (TREE_CODE (*iter
) == TARGET_OPTION_NODE
)
11842 TREE_TARGET_GLOBALS (*iter
) = NULL
;
11845 /* Determine the "ultimate origin" of a block. */
11848 block_ultimate_origin (const_tree block
)
11850 tree origin
= BLOCK_ABSTRACT_ORIGIN (block
);
11852 if (origin
== NULL_TREE
)
11856 gcc_checking_assert ((DECL_P (origin
)
11857 && DECL_ORIGIN (origin
) == origin
)
11858 || BLOCK_ORIGIN (origin
) == origin
);
11863 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11867 tree_nop_conversion_p (const_tree outer_type
, const_tree inner_type
)
11869 /* Do not strip casts into or out of differing address spaces. */
11870 if (POINTER_TYPE_P (outer_type
)
11871 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type
)) != ADDR_SPACE_GENERIC
)
11873 if (!POINTER_TYPE_P (inner_type
)
11874 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type
))
11875 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type
))))
11878 else if (POINTER_TYPE_P (inner_type
)
11879 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type
)) != ADDR_SPACE_GENERIC
)
11881 /* We already know that outer_type is not a pointer with
11882 a non-generic address space. */
11886 /* Use precision rather then machine mode when we can, which gives
11887 the correct answer even for submode (bit-field) types. */
11888 if ((INTEGRAL_TYPE_P (outer_type
)
11889 || POINTER_TYPE_P (outer_type
)
11890 || TREE_CODE (outer_type
) == OFFSET_TYPE
)
11891 && (INTEGRAL_TYPE_P (inner_type
)
11892 || POINTER_TYPE_P (inner_type
)
11893 || TREE_CODE (inner_type
) == OFFSET_TYPE
))
11894 return TYPE_PRECISION (outer_type
) == TYPE_PRECISION (inner_type
);
11896 /* Otherwise fall back on comparing machine modes (e.g. for
11897 aggregate types, floats). */
11898 return TYPE_MODE (outer_type
) == TYPE_MODE (inner_type
);
11901 /* Return true iff conversion in EXP generates no instruction. Mark
11902 it inline so that we fully inline into the stripping functions even
11903 though we have two uses of this function. */
11906 tree_nop_conversion (const_tree exp
)
11908 tree outer_type
, inner_type
;
11910 if (location_wrapper_p (exp
))
11912 if (!CONVERT_EXPR_P (exp
)
11913 && TREE_CODE (exp
) != NON_LVALUE_EXPR
)
11916 outer_type
= TREE_TYPE (exp
);
11917 inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
11918 if (!inner_type
|| inner_type
== error_mark_node
)
11921 return tree_nop_conversion_p (outer_type
, inner_type
);
11924 /* Return true iff conversion in EXP generates no instruction. Don't
11925 consider conversions changing the signedness. */
11928 tree_sign_nop_conversion (const_tree exp
)
11930 tree outer_type
, inner_type
;
11932 if (!tree_nop_conversion (exp
))
11935 outer_type
= TREE_TYPE (exp
);
11936 inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
11938 return (TYPE_UNSIGNED (outer_type
) == TYPE_UNSIGNED (inner_type
)
11939 && POINTER_TYPE_P (outer_type
) == POINTER_TYPE_P (inner_type
));
11942 /* Strip conversions from EXP according to tree_nop_conversion and
11943 return the resulting expression. */
11946 tree_strip_nop_conversions (tree exp
)
11948 while (tree_nop_conversion (exp
))
11949 exp
= TREE_OPERAND (exp
, 0);
11953 /* Strip conversions from EXP according to tree_sign_nop_conversion
11954 and return the resulting expression. */
11957 tree_strip_sign_nop_conversions (tree exp
)
11959 while (tree_sign_nop_conversion (exp
))
11960 exp
= TREE_OPERAND (exp
, 0);
11964 /* Avoid any floating point extensions from EXP. */
11966 strip_float_extensions (tree exp
)
11968 tree sub
, expt
, subt
;
11970 /* For floating point constant look up the narrowest type that can hold
11971 it properly and handle it like (type)(narrowest_type)constant.
11972 This way we can optimize for instance a=a*2.0 where "a" is float
11973 but 2.0 is double constant. */
11974 if (TREE_CODE (exp
) == REAL_CST
&& !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp
)))
11976 REAL_VALUE_TYPE orig
;
11979 orig
= TREE_REAL_CST (exp
);
11980 if (TYPE_PRECISION (TREE_TYPE (exp
)) > TYPE_PRECISION (float_type_node
)
11981 && exact_real_truncate (TYPE_MODE (float_type_node
), &orig
))
11982 type
= float_type_node
;
11983 else if (TYPE_PRECISION (TREE_TYPE (exp
))
11984 > TYPE_PRECISION (double_type_node
)
11985 && exact_real_truncate (TYPE_MODE (double_type_node
), &orig
))
11986 type
= double_type_node
;
11988 return build_real_truncate (type
, orig
);
11991 if (!CONVERT_EXPR_P (exp
))
11994 sub
= TREE_OPERAND (exp
, 0);
11995 subt
= TREE_TYPE (sub
);
11996 expt
= TREE_TYPE (exp
);
11998 if (!FLOAT_TYPE_P (subt
))
12001 if (DECIMAL_FLOAT_TYPE_P (expt
) != DECIMAL_FLOAT_TYPE_P (subt
))
12004 if (TYPE_PRECISION (subt
) > TYPE_PRECISION (expt
))
12007 return strip_float_extensions (sub
);
12010 /* Strip out all handled components that produce invariant
12014 strip_invariant_refs (const_tree op
)
12016 while (handled_component_p (op
))
12018 switch (TREE_CODE (op
))
12021 case ARRAY_RANGE_REF
:
12022 if (!is_gimple_constant (TREE_OPERAND (op
, 1))
12023 || TREE_OPERAND (op
, 2) != NULL_TREE
12024 || TREE_OPERAND (op
, 3) != NULL_TREE
)
12028 case COMPONENT_REF
:
12029 if (TREE_OPERAND (op
, 2) != NULL_TREE
)
12035 op
= TREE_OPERAND (op
, 0);
12041 static GTY(()) tree gcc_eh_personality_decl
;
12043 /* Return the GCC personality function decl. */
12046 lhd_gcc_personality (void)
12048 if (!gcc_eh_personality_decl
)
12049 gcc_eh_personality_decl
= build_personality_function ("gcc");
12050 return gcc_eh_personality_decl
;
12053 /* TARGET is a call target of GIMPLE call statement
12054 (obtained by gimple_call_fn). Return true if it is
12055 OBJ_TYPE_REF representing an virtual call of C++ method.
12056 (As opposed to OBJ_TYPE_REF representing objc calls
12057 through a cast where middle-end devirtualization machinery
12058 can't apply.) FOR_DUMP_P is true when being called from
12059 the dump routines. */
12062 virtual_method_call_p (const_tree target
, bool for_dump_p
)
12064 if (TREE_CODE (target
) != OBJ_TYPE_REF
)
12066 tree t
= TREE_TYPE (target
);
12067 gcc_checking_assert (TREE_CODE (t
) == POINTER_TYPE
);
12069 if (TREE_CODE (t
) == FUNCTION_TYPE
)
12071 gcc_checking_assert (TREE_CODE (t
) == METHOD_TYPE
);
12072 /* If we do not have BINFO associated, it means that type was built
12073 without devirtualization enabled. Do not consider this a virtual
12075 if (!TYPE_BINFO (obj_type_ref_class (target
, for_dump_p
)))
12080 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12083 lookup_binfo_at_offset (tree binfo
, tree type
, HOST_WIDE_INT pos
)
12086 tree base_binfo
, b
;
12088 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
12089 if (pos
== tree_to_shwi (BINFO_OFFSET (base_binfo
))
12090 && types_same_for_odr (TREE_TYPE (base_binfo
), type
))
12092 else if ((b
= lookup_binfo_at_offset (base_binfo
, type
, pos
)) != NULL
)
12097 /* Try to find a base info of BINFO that would have its field decl at offset
12098 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12099 found, return, otherwise return NULL_TREE. */
12102 get_binfo_at_offset (tree binfo
, poly_int64 offset
, tree expected_type
)
12104 tree type
= BINFO_TYPE (binfo
);
12108 HOST_WIDE_INT pos
, size
;
12112 if (types_same_for_odr (type
, expected_type
))
12114 if (maybe_lt (offset
, 0))
12117 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
12119 if (TREE_CODE (fld
) != FIELD_DECL
|| !DECL_ARTIFICIAL (fld
))
12122 pos
= int_bit_position (fld
);
12123 size
= tree_to_uhwi (DECL_SIZE (fld
));
12124 if (known_in_range_p (offset
, pos
, size
))
12127 if (!fld
|| TREE_CODE (TREE_TYPE (fld
)) != RECORD_TYPE
)
12130 /* Offset 0 indicates the primary base, whose vtable contents are
12131 represented in the binfo for the derived class. */
12132 else if (maybe_ne (offset
, 0))
12134 tree found_binfo
= NULL
, base_binfo
;
12135 /* Offsets in BINFO are in bytes relative to the whole structure
12136 while POS is in bits relative to the containing field. */
12137 int binfo_offset
= (tree_to_shwi (BINFO_OFFSET (binfo
)) + pos
12140 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
12141 if (tree_to_shwi (BINFO_OFFSET (base_binfo
)) == binfo_offset
12142 && types_same_for_odr (TREE_TYPE (base_binfo
), TREE_TYPE (fld
)))
12144 found_binfo
= base_binfo
;
12148 binfo
= found_binfo
;
12150 binfo
= lookup_binfo_at_offset (binfo
, TREE_TYPE (fld
),
12154 type
= TREE_TYPE (fld
);
12159 /* Returns true if X is a typedef decl. */
12162 is_typedef_decl (const_tree x
)
12164 return (x
&& TREE_CODE (x
) == TYPE_DECL
12165 && DECL_ORIGINAL_TYPE (x
) != NULL_TREE
);
12168 /* Returns true iff TYPE is a type variant created for a typedef. */
12171 typedef_variant_p (const_tree type
)
12173 return is_typedef_decl (TYPE_NAME (type
));
12176 /* PR 84195: Replace control characters in "unescaped" with their
12177 escaped equivalents. Allow newlines if -fmessage-length has
12178 been set to a non-zero value. This is done here, rather than
12179 where the attribute is recorded as the message length can
12180 change between these two locations. */
12183 escaped_string::escape (const char *unescaped
)
12186 size_t i
, new_i
, len
;
12191 m_str
= const_cast<char *> (unescaped
);
12194 if (unescaped
== NULL
|| *unescaped
== 0)
12197 len
= strlen (unescaped
);
12201 for (i
= 0; i
< len
; i
++)
12203 char c
= unescaped
[i
];
12208 escaped
[new_i
++] = c
;
12212 if (c
!= '\n' || !pp_is_wrapping_line (global_dc
->printer
))
12214 if (escaped
== NULL
)
12216 /* We only allocate space for a new string if we
12217 actually encounter a control character that
12218 needs replacing. */
12219 escaped
= (char *) xmalloc (len
* 2 + 1);
12220 strncpy (escaped
, unescaped
, i
);
12224 escaped
[new_i
++] = '\\';
12228 case '\a': escaped
[new_i
++] = 'a'; break;
12229 case '\b': escaped
[new_i
++] = 'b'; break;
12230 case '\f': escaped
[new_i
++] = 'f'; break;
12231 case '\n': escaped
[new_i
++] = 'n'; break;
12232 case '\r': escaped
[new_i
++] = 'r'; break;
12233 case '\t': escaped
[new_i
++] = 't'; break;
12234 case '\v': escaped
[new_i
++] = 'v'; break;
12235 default: escaped
[new_i
++] = '?'; break;
12239 escaped
[new_i
++] = c
;
12244 escaped
[new_i
] = 0;
12250 /* Warn about a use of an identifier which was marked deprecated. Returns
12251 whether a warning was given. */
12254 warn_deprecated_use (tree node
, tree attr
)
12256 escaped_string msg
;
12258 if (node
== 0 || !warn_deprecated_decl
)
12264 attr
= DECL_ATTRIBUTES (node
);
12265 else if (TYPE_P (node
))
12267 tree decl
= TYPE_STUB_DECL (node
);
12269 attr
= TYPE_ATTRIBUTES (TREE_TYPE (decl
));
12270 else if ((decl
= TYPE_STUB_DECL (TYPE_MAIN_VARIANT (node
)))
12273 node
= TREE_TYPE (decl
);
12274 attr
= TYPE_ATTRIBUTES (node
);
12280 attr
= lookup_attribute ("deprecated", attr
);
12283 msg
.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
12288 auto_diagnostic_group d
;
12290 w
= warning (OPT_Wdeprecated_declarations
,
12291 "%qD is deprecated: %s", node
, (const char *) msg
);
12293 w
= warning (OPT_Wdeprecated_declarations
,
12294 "%qD is deprecated", node
);
12296 inform (DECL_SOURCE_LOCATION (node
), "declared here");
12298 else if (TYPE_P (node
))
12300 tree what
= NULL_TREE
;
12301 tree decl
= TYPE_STUB_DECL (node
);
12303 if (TYPE_NAME (node
))
12305 if (TREE_CODE (TYPE_NAME (node
)) == IDENTIFIER_NODE
)
12306 what
= TYPE_NAME (node
);
12307 else if (TREE_CODE (TYPE_NAME (node
)) == TYPE_DECL
12308 && DECL_NAME (TYPE_NAME (node
)))
12309 what
= DECL_NAME (TYPE_NAME (node
));
12312 auto_diagnostic_group d
;
12316 w
= warning (OPT_Wdeprecated_declarations
,
12317 "%qE is deprecated: %s", what
, (const char *) msg
);
12319 w
= warning (OPT_Wdeprecated_declarations
,
12320 "%qE is deprecated", what
);
12325 w
= warning (OPT_Wdeprecated_declarations
,
12326 "type is deprecated: %s", (const char *) msg
);
12328 w
= warning (OPT_Wdeprecated_declarations
,
12329 "type is deprecated");
12333 inform (DECL_SOURCE_LOCATION (decl
), "declared here");
12339 /* Error out with an identifier which was marked 'unavailable'. */
12341 error_unavailable_use (tree node
, tree attr
)
12343 escaped_string msg
;
12351 attr
= DECL_ATTRIBUTES (node
);
12352 else if (TYPE_P (node
))
12354 tree decl
= TYPE_STUB_DECL (node
);
12356 attr
= lookup_attribute ("unavailable",
12357 TYPE_ATTRIBUTES (TREE_TYPE (decl
)));
12362 attr
= lookup_attribute ("unavailable", attr
);
12365 msg
.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
12369 auto_diagnostic_group d
;
12371 error ("%qD is unavailable: %s", node
, (const char *) msg
);
12373 error ("%qD is unavailable", node
);
12374 inform (DECL_SOURCE_LOCATION (node
), "declared here");
12376 else if (TYPE_P (node
))
12378 tree what
= NULL_TREE
;
12379 tree decl
= TYPE_STUB_DECL (node
);
12381 if (TYPE_NAME (node
))
12383 if (TREE_CODE (TYPE_NAME (node
)) == IDENTIFIER_NODE
)
12384 what
= TYPE_NAME (node
);
12385 else if (TREE_CODE (TYPE_NAME (node
)) == TYPE_DECL
12386 && DECL_NAME (TYPE_NAME (node
)))
12387 what
= DECL_NAME (TYPE_NAME (node
));
12390 auto_diagnostic_group d
;
12394 error ("%qE is unavailable: %s", what
, (const char *) msg
);
12396 error ("%qE is unavailable", what
);
12401 error ("type is unavailable: %s", (const char *) msg
);
12403 error ("type is unavailable");
12407 inform (DECL_SOURCE_LOCATION (decl
), "declared here");
12411 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12412 somewhere in it. */
12415 contains_bitfld_component_ref_p (const_tree ref
)
12417 while (handled_component_p (ref
))
12419 if (TREE_CODE (ref
) == COMPONENT_REF
12420 && DECL_BIT_FIELD (TREE_OPERAND (ref
, 1)))
12422 ref
= TREE_OPERAND (ref
, 0);
12428 /* Try to determine whether a TRY_CATCH expression can fall through.
12429 This is a subroutine of block_may_fallthru. */
12432 try_catch_may_fallthru (const_tree stmt
)
12434 tree_stmt_iterator i
;
12436 /* If the TRY block can fall through, the whole TRY_CATCH can
12438 if (block_may_fallthru (TREE_OPERAND (stmt
, 0)))
12441 i
= tsi_start (TREE_OPERAND (stmt
, 1));
12442 switch (TREE_CODE (tsi_stmt (i
)))
12445 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12446 catch expression and a body. The whole TRY_CATCH may fall
12447 through iff any of the catch bodies falls through. */
12448 for (; !tsi_end_p (i
); tsi_next (&i
))
12450 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i
))))
12455 case EH_FILTER_EXPR
:
12456 /* The exception filter expression only matters if there is an
12457 exception. If the exception does not match EH_FILTER_TYPES,
12458 we will execute EH_FILTER_FAILURE, and we will fall through
12459 if that falls through. If the exception does match
12460 EH_FILTER_TYPES, the stack unwinder will continue up the
12461 stack, so we will not fall through. We don't know whether we
12462 will throw an exception which matches EH_FILTER_TYPES or not,
12463 so we just ignore EH_FILTER_TYPES and assume that we might
12464 throw an exception which doesn't match. */
12465 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i
)));
12468 /* This case represents statements to be executed when an
12469 exception occurs. Those statements are implicitly followed
12470 by a RESX statement to resume execution after the exception.
12471 So in this case the TRY_CATCH never falls through. */
12476 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12477 need not be 100% accurate; simply be conservative and return true if we
12478 don't know. This is used only to avoid stupidly generating extra code.
12479 If we're wrong, we'll just delete the extra code later. */
12482 block_may_fallthru (const_tree block
)
12484 /* This CONST_CAST is okay because expr_last returns its argument
12485 unmodified and we assign it to a const_tree. */
12486 const_tree stmt
= expr_last (CONST_CAST_TREE (block
));
12488 switch (stmt
? TREE_CODE (stmt
) : ERROR_MARK
)
12492 /* Easy cases. If the last statement of the block implies
12493 control transfer, then we can't fall through. */
12497 /* If there is a default: label or case labels cover all possible
12498 SWITCH_COND values, then the SWITCH_EXPR will transfer control
12499 to some case label in all cases and all we care is whether the
12500 SWITCH_BODY falls through. */
12501 if (SWITCH_ALL_CASES_P (stmt
))
12502 return block_may_fallthru (SWITCH_BODY (stmt
));
12506 if (block_may_fallthru (COND_EXPR_THEN (stmt
)))
12508 return block_may_fallthru (COND_EXPR_ELSE (stmt
));
12511 return block_may_fallthru (BIND_EXPR_BODY (stmt
));
12513 case TRY_CATCH_EXPR
:
12514 return try_catch_may_fallthru (stmt
);
12516 case TRY_FINALLY_EXPR
:
12517 /* The finally clause is always executed after the try clause,
12518 so if it does not fall through, then the try-finally will not
12519 fall through. Otherwise, if the try clause does not fall
12520 through, then when the finally clause falls through it will
12521 resume execution wherever the try clause was going. So the
12522 whole try-finally will only fall through if both the try
12523 clause and the finally clause fall through. */
12524 return (block_may_fallthru (TREE_OPERAND (stmt
, 0))
12525 && block_may_fallthru (TREE_OPERAND (stmt
, 1)));
12528 return block_may_fallthru (TREE_OPERAND (stmt
, 0));
12531 if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
)
12532 stmt
= TREE_OPERAND (stmt
, 1);
12538 /* Functions that do not return do not fall through. */
12539 return (call_expr_flags (stmt
) & ECF_NORETURN
) == 0;
12541 case CLEANUP_POINT_EXPR
:
12542 return block_may_fallthru (TREE_OPERAND (stmt
, 0));
12545 return block_may_fallthru (TREE_OPERAND (stmt
, 1));
12551 return lang_hooks
.block_may_fallthru (stmt
);
12555 /* True if we are using EH to handle cleanups. */
12556 static bool using_eh_for_cleanups_flag
= false;
12558 /* This routine is called from front ends to indicate eh should be used for
12561 using_eh_for_cleanups (void)
12563 using_eh_for_cleanups_flag
= true;
12566 /* Query whether EH is used for cleanups. */
12568 using_eh_for_cleanups_p (void)
12570 return using_eh_for_cleanups_flag
;
12573 /* Wrapper for tree_code_name to ensure that tree code is valid */
12575 get_tree_code_name (enum tree_code code
)
12577 const char *invalid
= "<invalid tree code>";
12579 /* The tree_code enum promotes to signed, but we could be getting
12580 invalid values, so force an unsigned comparison. */
12581 if (unsigned (code
) >= MAX_TREE_CODES
)
12583 if ((unsigned)code
== 0xa5a5)
12584 return "ggc_freed";
12588 return tree_code_name
[code
];
12591 /* Drops the TREE_OVERFLOW flag from T. */
12594 drop_tree_overflow (tree t
)
12596 gcc_checking_assert (TREE_OVERFLOW (t
));
12598 /* For tree codes with a sharing machinery re-build the result. */
12599 if (poly_int_tree_p (t
))
12600 return wide_int_to_tree (TREE_TYPE (t
), wi::to_poly_wide (t
));
12602 /* For VECTOR_CST, remove the overflow bits from the encoded elements
12603 and canonicalize the result. */
12604 if (TREE_CODE (t
) == VECTOR_CST
)
12606 tree_vector_builder builder
;
12607 builder
.new_unary_operation (TREE_TYPE (t
), t
, true);
12608 unsigned int count
= builder
.encoded_nelts ();
12609 for (unsigned int i
= 0; i
< count
; ++i
)
12611 tree elt
= VECTOR_CST_ELT (t
, i
);
12612 if (TREE_OVERFLOW (elt
))
12613 elt
= drop_tree_overflow (elt
);
12614 builder
.quick_push (elt
);
12616 return builder
.build ();
12619 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12620 and drop the flag. */
12622 TREE_OVERFLOW (t
) = 0;
12624 /* For constants that contain nested constants, drop the flag
12625 from those as well. */
12626 if (TREE_CODE (t
) == COMPLEX_CST
)
12628 if (TREE_OVERFLOW (TREE_REALPART (t
)))
12629 TREE_REALPART (t
) = drop_tree_overflow (TREE_REALPART (t
));
12630 if (TREE_OVERFLOW (TREE_IMAGPART (t
)))
12631 TREE_IMAGPART (t
) = drop_tree_overflow (TREE_IMAGPART (t
));
12637 /* Given a memory reference expression T, return its base address.
12638 The base address of a memory reference expression is the main
12639 object being referenced. For instance, the base address for
12640 'array[i].fld[j]' is 'array'. You can think of this as stripping
12641 away the offset part from a memory address.
12643 This function calls handled_component_p to strip away all the inner
12644 parts of the memory reference until it reaches the base object. */
12647 get_base_address (tree t
)
12649 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
12650 t
= TREE_OPERAND (t
, 0);
12651 while (handled_component_p (t
))
12652 t
= TREE_OPERAND (t
, 0);
12654 if ((TREE_CODE (t
) == MEM_REF
12655 || TREE_CODE (t
) == TARGET_MEM_REF
)
12656 && TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
)
12657 t
= TREE_OPERAND (TREE_OPERAND (t
, 0), 0);
12662 /* Return a tree of sizetype representing the size, in bytes, of the element
12663 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12666 array_ref_element_size (tree exp
)
12668 tree aligned_size
= TREE_OPERAND (exp
, 3);
12669 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
12670 location_t loc
= EXPR_LOCATION (exp
);
12672 /* If a size was specified in the ARRAY_REF, it's the size measured
12673 in alignment units of the element type. So multiply by that value. */
12676 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12677 sizetype from another type of the same width and signedness. */
12678 if (TREE_TYPE (aligned_size
) != sizetype
)
12679 aligned_size
= fold_convert_loc (loc
, sizetype
, aligned_size
);
12680 return size_binop_loc (loc
, MULT_EXPR
, aligned_size
,
12681 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
12684 /* Otherwise, take the size from that of the element type. Substitute
12685 any PLACEHOLDER_EXPR that we have. */
12687 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type
), exp
);
12690 /* Return a tree representing the lower bound of the array mentioned in
12691 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12694 array_ref_low_bound (tree exp
)
12696 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
12698 /* If a lower bound is specified in EXP, use it. */
12699 if (TREE_OPERAND (exp
, 2))
12700 return TREE_OPERAND (exp
, 2);
12702 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12703 substituting for a PLACEHOLDER_EXPR as needed. */
12704 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
12705 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type
), exp
);
12707 /* Otherwise, return a zero of the appropriate type. */
12708 tree idxtype
= TREE_TYPE (TREE_OPERAND (exp
, 1));
12709 return (idxtype
== error_mark_node
12710 ? integer_zero_node
: build_int_cst (idxtype
, 0));
12713 /* Return a tree representing the upper bound of the array mentioned in
12714 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12717 array_ref_up_bound (tree exp
)
12719 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
12721 /* If there is a domain type and it has an upper bound, use it, substituting
12722 for a PLACEHOLDER_EXPR as needed. */
12723 if (domain_type
&& TYPE_MAX_VALUE (domain_type
))
12724 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type
), exp
);
12726 /* Otherwise fail. */
12730 /* Returns true if REF is an array reference, component reference,
12731 or memory reference to an array whose actual size might be larger
12732 than its upper bound implies, there are multiple cases:
12733 A. a ref to a flexible array member at the end of a structure;
12734 B. a ref to an array with a different type against the original decl;
12737 short a[16] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 };
12738 (*((char(*)[16])&a[0]))[i+8]
12740 C. a ref to an array that was passed as a parameter;
12743 int test (uint8_t *p, uint32_t t[1][1], int n) {
12744 for (int i = 0; i < 4; i++, p++)
12747 FIXME, the name of this routine need to be changed to be more accurate. */
12749 array_at_struct_end_p (tree ref
)
12751 /* the TYPE for this array referece. */
12752 tree atype
= NULL_TREE
;
12753 /* the FIELD_DECL for the array field in the containing structure. */
12754 tree afield_decl
= NULL_TREE
;
12756 if (TREE_CODE (ref
) == ARRAY_REF
12757 || TREE_CODE (ref
) == ARRAY_RANGE_REF
)
12759 atype
= TREE_TYPE (TREE_OPERAND (ref
, 0));
12760 ref
= TREE_OPERAND (ref
, 0);
12762 else if (TREE_CODE (ref
) == COMPONENT_REF
12763 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref
, 1))) == ARRAY_TYPE
)
12765 atype
= TREE_TYPE (TREE_OPERAND (ref
, 1));
12766 afield_decl
= TREE_OPERAND (ref
, 1);
12768 else if (TREE_CODE (ref
) == MEM_REF
)
12770 tree arg
= TREE_OPERAND (ref
, 0);
12771 if (TREE_CODE (arg
) == ADDR_EXPR
)
12772 arg
= TREE_OPERAND (arg
, 0);
12773 tree argtype
= TREE_TYPE (arg
);
12774 if (TREE_CODE (argtype
) == RECORD_TYPE
)
12776 if (tree fld
= last_field (argtype
))
12778 atype
= TREE_TYPE (fld
);
12780 if (TREE_CODE (atype
) != ARRAY_TYPE
)
12782 if (VAR_P (arg
) && DECL_SIZE (fld
))
12794 if (TREE_CODE (ref
) == STRING_CST
)
12797 tree ref_to_array
= ref
;
12798 while (handled_component_p (ref
))
12800 /* If the reference chain contains a component reference to a
12801 non-union type and there follows another field the reference
12802 is not at the end of a structure. */
12803 if (TREE_CODE (ref
) == COMPONENT_REF
)
12805 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref
, 0))) == RECORD_TYPE
)
12807 tree nextf
= DECL_CHAIN (TREE_OPERAND (ref
, 1));
12808 while (nextf
&& TREE_CODE (nextf
) != FIELD_DECL
)
12809 nextf
= DECL_CHAIN (nextf
);
12814 /* If we have a multi-dimensional array we do not consider
12815 a non-innermost dimension as flex array if the whole
12816 multi-dimensional array is at struct end.
12817 Same for an array of aggregates with a trailing array
12819 else if (TREE_CODE (ref
) == ARRAY_REF
)
12821 else if (TREE_CODE (ref
) == ARRAY_RANGE_REF
)
12823 /* If we view an underlying object as sth else then what we
12824 gathered up to now is what we have to rely on. */
12825 else if (TREE_CODE (ref
) == VIEW_CONVERT_EXPR
)
12828 gcc_unreachable ();
12830 ref
= TREE_OPERAND (ref
, 0);
12833 gcc_assert (!afield_decl
12834 || (afield_decl
&& TREE_CODE (afield_decl
) == FIELD_DECL
));
12836 /* The array now is at struct end. Treat flexible array member as
12837 always subject to extend, even into just padding constrained by
12838 an underlying decl. */
12839 if (! TYPE_SIZE (atype
)
12840 || ! TYPE_DOMAIN (atype
)
12841 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype
)))
12842 return afield_decl
? !DECL_NOT_FLEXARRAY (afield_decl
) : true;
12844 /* If the reference is based on a declared entity, the size of the array
12845 is constrained by its given domain. (Do not trust commons PR/69368). */
12846 ref
= get_base_address (ref
);
12849 && !(flag_unconstrained_commons
12850 && VAR_P (ref
) && DECL_COMMON (ref
))
12851 && DECL_SIZE_UNIT (ref
)
12852 && TREE_CODE (DECL_SIZE_UNIT (ref
)) == INTEGER_CST
)
12854 /* If the object itself is the array it is not at struct end. */
12855 if (DECL_P (ref_to_array
))
12858 /* Check whether the array domain covers all of the available
12861 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype
))) != INTEGER_CST
12862 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype
))) != INTEGER_CST
12863 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype
))) != INTEGER_CST
)
12864 return afield_decl
? !DECL_NOT_FLEXARRAY (afield_decl
) : true;
12865 if (! get_addr_base_and_unit_offset (ref_to_array
, &offset
))
12866 return afield_decl
? !DECL_NOT_FLEXARRAY (afield_decl
) : true;
12868 /* If at least one extra element fits it is a flexarray. */
12869 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype
)))
12870 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype
)))
12872 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype
))),
12873 wi::to_offset (DECL_SIZE_UNIT (ref
)) - offset
))
12874 return afield_decl
? !DECL_NOT_FLEXARRAY (afield_decl
) : true;
12879 return afield_decl
? !DECL_NOT_FLEXARRAY (afield_decl
) : true;
12882 /* Return a tree representing the offset, in bytes, of the field referenced
12883 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12886 component_ref_field_offset (tree exp
)
12888 tree aligned_offset
= TREE_OPERAND (exp
, 2);
12889 tree field
= TREE_OPERAND (exp
, 1);
12890 location_t loc
= EXPR_LOCATION (exp
);
12892 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12893 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12895 if (aligned_offset
)
12897 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12898 sizetype from another type of the same width and signedness. */
12899 if (TREE_TYPE (aligned_offset
) != sizetype
)
12900 aligned_offset
= fold_convert_loc (loc
, sizetype
, aligned_offset
);
12901 return size_binop_loc (loc
, MULT_EXPR
, aligned_offset
,
12902 size_int (DECL_OFFSET_ALIGN (field
)
12906 /* Otherwise, take the offset from that of the field. Substitute
12907 any PLACEHOLDER_EXPR that we have. */
12909 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field
), exp
);
12912 /* Given the initializer INIT, return the initializer for the field
12913 DECL if it exists, otherwise null. Used to obtain the initializer
12914 for a flexible array member and determine its size. */
12917 get_initializer_for (tree init
, tree decl
)
12921 tree fld
, fld_init
;
12922 unsigned HOST_WIDE_INT i
;
12923 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), i
, fld
, fld_init
)
12928 if (TREE_CODE (fld
) == CONSTRUCTOR
)
12930 fld_init
= get_initializer_for (fld_init
, decl
);
12939 /* Determines the size of the member referenced by the COMPONENT_REF
12940 REF, using its initializer expression if necessary in order to
12941 determine the size of an initialized flexible array member.
12942 If non-null, set *ARK when REF refers to an interior zero-length
12943 array or a trailing one-element array.
12944 Returns the size as sizetype (which might be zero for an object
12945 with an uninitialized flexible array member) or null if the size
12946 cannot be determined. */
12949 component_ref_size (tree ref
, special_array_member
*sam
/* = NULL */)
12951 gcc_assert (TREE_CODE (ref
) == COMPONENT_REF
);
12953 special_array_member sambuf
;
12956 *sam
= special_array_member::none
;
12958 /* The object/argument referenced by the COMPONENT_REF and its type. */
12959 tree arg
= TREE_OPERAND (ref
, 0);
12960 tree argtype
= TREE_TYPE (arg
);
12961 /* The referenced member. */
12962 tree member
= TREE_OPERAND (ref
, 1);
12964 tree memsize
= DECL_SIZE_UNIT (member
);
12967 tree memtype
= TREE_TYPE (member
);
12968 if (TREE_CODE (memtype
) != ARRAY_TYPE
)
12969 /* DECL_SIZE may be less than TYPE_SIZE in C++ when referring
12970 to the type of a class with a virtual base which doesn't
12971 reflect the size of the virtual's members (see pr97595).
12972 If that's the case fail for now and implement something
12973 more robust in the future. */
12974 return (tree_int_cst_equal (memsize
, TYPE_SIZE_UNIT (memtype
))
12975 ? memsize
: NULL_TREE
);
12977 bool trailing
= array_at_struct_end_p (ref
);
12978 bool zero_length
= integer_zerop (memsize
);
12979 if (!trailing
&& !zero_length
)
12980 /* MEMBER is either an interior array or is an array with
12981 more than one element. */
12987 *sam
= special_array_member::trail_0
;
12990 *sam
= special_array_member::int_0
;
12991 memsize
= NULL_TREE
;
12996 if (tree dom
= TYPE_DOMAIN (memtype
))
12997 if (tree min
= TYPE_MIN_VALUE (dom
))
12998 if (tree max
= TYPE_MAX_VALUE (dom
))
12999 if (TREE_CODE (min
) == INTEGER_CST
13000 && TREE_CODE (max
) == INTEGER_CST
)
13002 offset_int minidx
= wi::to_offset (min
);
13003 offset_int maxidx
= wi::to_offset (max
);
13004 offset_int neltsm1
= maxidx
- minidx
;
13006 /* MEMBER is an array with more than one element. */
13010 *sam
= special_array_member::trail_1
;
13013 /* For a reference to a zero- or one-element array member of a union
13014 use the size of the union instead of the size of the member. */
13015 if (TREE_CODE (argtype
) == UNION_TYPE
)
13016 memsize
= TYPE_SIZE_UNIT (argtype
);
13019 /* MEMBER is either a bona fide flexible array member, or a zero-length
13020 array member, or an array of length one treated as such. */
13022 /* If the reference is to a declared object and the member a true
13023 flexible array, try to determine its size from its initializer. */
13024 poly_int64 baseoff
= 0;
13025 tree base
= get_addr_base_and_unit_offset (ref
, &baseoff
);
13026 if (!base
|| !VAR_P (base
))
13028 if (*sam
!= special_array_member::int_0
)
13031 if (TREE_CODE (arg
) != COMPONENT_REF
)
13035 while (TREE_CODE (base
) == COMPONENT_REF
)
13036 base
= TREE_OPERAND (base
, 0);
13037 baseoff
= tree_to_poly_int64 (byte_position (TREE_OPERAND (ref
, 1)));
13040 /* BASE is the declared object of which MEMBER is either a member
13041 or that is cast to ARGTYPE (e.g., a char buffer used to store
13042 an ARGTYPE object). */
13043 tree basetype
= TREE_TYPE (base
);
13045 /* Determine the base type of the referenced object. If it's
13046 the same as ARGTYPE and MEMBER has a known size, return it. */
13047 tree bt
= basetype
;
13048 if (*sam
!= special_array_member::int_0
)
13049 while (TREE_CODE (bt
) == ARRAY_TYPE
)
13050 bt
= TREE_TYPE (bt
);
13051 bool typematch
= useless_type_conversion_p (argtype
, bt
);
13052 if (memsize
&& typematch
)
13055 memsize
= NULL_TREE
;
13058 /* MEMBER is a true flexible array member. Compute its size from
13059 the initializer of the BASE object if it has one. */
13060 if (tree init
= DECL_P (base
) ? DECL_INITIAL (base
) : NULL_TREE
)
13061 if (init
!= error_mark_node
)
13063 init
= get_initializer_for (init
, member
);
13066 memsize
= TYPE_SIZE_UNIT (TREE_TYPE (init
));
13067 if (tree refsize
= TYPE_SIZE_UNIT (argtype
))
13069 /* Use the larger of the initializer size and the tail
13070 padding in the enclosing struct. */
13071 poly_int64 rsz
= tree_to_poly_int64 (refsize
);
13073 if (known_lt (tree_to_poly_int64 (memsize
), rsz
))
13074 memsize
= wide_int_to_tree (TREE_TYPE (memsize
), rsz
);
13086 && DECL_EXTERNAL (base
)
13088 && *sam
!= special_array_member::int_0
)
13089 /* The size of a flexible array member of an extern struct
13090 with no initializer cannot be determined (it's defined
13091 in another translation unit and can have an initializer
13092 with an arbitrary number of elements). */
13095 /* Use the size of the base struct or, for interior zero-length
13096 arrays, the size of the enclosing type. */
13097 memsize
= TYPE_SIZE_UNIT (bt
);
13099 else if (DECL_P (base
))
13100 /* Use the size of the BASE object (possibly an array of some
13101 other type such as char used to store the struct). */
13102 memsize
= DECL_SIZE_UNIT (base
);
13107 /* If the flexible array member has a known size use the greater
13108 of it and the tail padding in the enclosing struct.
13109 Otherwise, when the size of the flexible array member is unknown
13110 and the referenced object is not a struct, use the size of its
13111 type when known. This detects sizes of array buffers when cast
13112 to struct types with flexible array members. */
13115 if (!tree_fits_poly_int64_p (memsize
))
13117 poly_int64 memsz64
= memsize
? tree_to_poly_int64 (memsize
) : 0;
13118 if (known_lt (baseoff
, memsz64
))
13120 memsz64
-= baseoff
;
13121 return wide_int_to_tree (TREE_TYPE (memsize
), memsz64
);
13123 return size_zero_node
;
13126 /* Return "don't know" for an external non-array object since its
13127 flexible array member can be initialized to have any number of
13128 elements. Otherwise, return zero because the flexible array
13129 member has no elements. */
13130 return (DECL_P (base
)
13131 && DECL_EXTERNAL (base
)
13133 || TREE_CODE (basetype
) != ARRAY_TYPE
)
13134 ? NULL_TREE
: size_zero_node
);
13137 /* Return the machine mode of T. For vectors, returns the mode of the
13138 inner type. The main use case is to feed the result to HONOR_NANS,
13139 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
13142 element_mode (const_tree t
)
13146 if (VECTOR_TYPE_P (t
) || TREE_CODE (t
) == COMPLEX_TYPE
)
13148 return TYPE_MODE (t
);
13151 /* Vector types need to re-check the target flags each time we report
13152 the machine mode. We need to do this because attribute target can
13153 change the result of vector_mode_supported_p and have_regs_of_mode
13154 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
13155 change on a per-function basis. */
13156 /* ??? Possibly a better solution is to run through all the types
13157 referenced by a function and re-compute the TYPE_MODE once, rather
13158 than make the TYPE_MODE macro call a function. */
13161 vector_type_mode (const_tree t
)
13165 gcc_assert (TREE_CODE (t
) == VECTOR_TYPE
);
13167 mode
= t
->type_common
.mode
;
13168 if (VECTOR_MODE_P (mode
)
13169 && (!targetm
.vector_mode_supported_p (mode
)
13170 || !have_regs_of_mode
[mode
]))
13172 scalar_int_mode innermode
;
13174 /* For integers, try mapping it to a same-sized scalar mode. */
13175 if (is_int_mode (TREE_TYPE (t
)->type_common
.mode
, &innermode
))
13177 poly_int64 size
= (TYPE_VECTOR_SUBPARTS (t
)
13178 * GET_MODE_BITSIZE (innermode
));
13179 scalar_int_mode mode
;
13180 if (int_mode_for_size (size
, 0).exists (&mode
)
13181 && have_regs_of_mode
[mode
])
13191 /* Return the size in bits of each element of vector type TYPE. */
13194 vector_element_bits (const_tree type
)
13196 gcc_checking_assert (VECTOR_TYPE_P (type
));
13197 if (VECTOR_BOOLEAN_TYPE_P (type
))
13198 return TYPE_PRECISION (TREE_TYPE (type
));
13199 return tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type
)));
13202 /* Calculate the size in bits of each element of vector type TYPE
13203 and return the result as a tree of type bitsizetype. */
13206 vector_element_bits_tree (const_tree type
)
13208 gcc_checking_assert (VECTOR_TYPE_P (type
));
13209 if (VECTOR_BOOLEAN_TYPE_P (type
))
13210 return bitsize_int (vector_element_bits (type
));
13211 return TYPE_SIZE (TREE_TYPE (type
));
13214 /* Verify that basic properties of T match TV and thus T can be a variant of
13215 TV. TV should be the more specified variant (i.e. the main variant). */
13218 verify_type_variant (const_tree t
, tree tv
)
13220 /* Type variant can differ by:
13222 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13223 ENCODE_QUAL_ADDR_SPACE.
13224 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13225 in this case some values may not be set in the variant types
13226 (see TYPE_COMPLETE_P checks).
13227 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13228 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13229 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13230 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13231 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13232 this is necessary to make it possible to merge types form different TUs
13233 - arrays, pointers and references may have TREE_TYPE that is a variant
13234 of TREE_TYPE of their main variants.
13235 - aggregates may have new TYPE_FIELDS list that list variants of
13236 the main variant TYPE_FIELDS.
13237 - vector types may differ by TYPE_VECTOR_OPAQUE
13240 /* Convenience macro for matching individual fields. */
13241 #define verify_variant_match(flag) \
13243 if (flag (tv) != flag (t)) \
13245 error ("type variant differs by %s", #flag); \
13251 /* tree_base checks. */
13253 verify_variant_match (TREE_CODE
);
13254 /* FIXME: Ada builds non-artificial variants of artificial types. */
13256 if (TYPE_ARTIFICIAL (tv
))
13257 verify_variant_match (TYPE_ARTIFICIAL
);
13259 if (POINTER_TYPE_P (tv
))
13260 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL
);
13261 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13262 verify_variant_match (TYPE_UNSIGNED
);
13263 verify_variant_match (TYPE_PACKED
);
13264 if (TREE_CODE (t
) == REFERENCE_TYPE
)
13265 verify_variant_match (TYPE_REF_IS_RVALUE
);
13266 if (AGGREGATE_TYPE_P (t
))
13267 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER
);
13269 verify_variant_match (TYPE_SATURATING
);
13270 /* FIXME: This check trigger during libstdc++ build. */
13272 if (RECORD_OR_UNION_TYPE_P (t
) && COMPLETE_TYPE_P (t
))
13273 verify_variant_match (TYPE_FINAL_P
);
13276 /* tree_type_common checks. */
13278 if (COMPLETE_TYPE_P (t
))
13280 verify_variant_match (TYPE_MODE
);
13281 if (TREE_CODE (TYPE_SIZE (t
)) != PLACEHOLDER_EXPR
13282 && TREE_CODE (TYPE_SIZE (tv
)) != PLACEHOLDER_EXPR
)
13283 verify_variant_match (TYPE_SIZE
);
13284 if (TREE_CODE (TYPE_SIZE_UNIT (t
)) != PLACEHOLDER_EXPR
13285 && TREE_CODE (TYPE_SIZE_UNIT (tv
)) != PLACEHOLDER_EXPR
13286 && TYPE_SIZE_UNIT (t
) != TYPE_SIZE_UNIT (tv
))
13288 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t
),
13289 TYPE_SIZE_UNIT (tv
), 0));
13290 error ("type variant has different %<TYPE_SIZE_UNIT%>");
13292 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13293 debug_tree (TYPE_SIZE_UNIT (tv
));
13294 error ("type%'s %<TYPE_SIZE_UNIT%>");
13295 debug_tree (TYPE_SIZE_UNIT (t
));
13298 verify_variant_match (TYPE_NEEDS_CONSTRUCTING
);
13300 verify_variant_match (TYPE_PRECISION
);
13301 if (RECORD_OR_UNION_TYPE_P (t
))
13302 verify_variant_match (TYPE_TRANSPARENT_AGGR
);
13303 else if (TREE_CODE (t
) == ARRAY_TYPE
)
13304 verify_variant_match (TYPE_NONALIASED_COMPONENT
);
13305 /* During LTO we merge variant lists from diferent translation units
13306 that may differ BY TYPE_CONTEXT that in turn may point
13307 to TRANSLATION_UNIT_DECL.
13308 Ada also builds variants of types with different TYPE_CONTEXT. */
13310 if (!in_lto_p
|| !TYPE_FILE_SCOPE_P (t
))
13311 verify_variant_match (TYPE_CONTEXT
);
13313 if (TREE_CODE (t
) == ARRAY_TYPE
|| TREE_CODE (t
) == INTEGER_TYPE
)
13314 verify_variant_match (TYPE_STRING_FLAG
);
13315 if (TREE_CODE (t
) == RECORD_TYPE
|| TREE_CODE (t
) == UNION_TYPE
)
13316 verify_variant_match (TYPE_CXX_ODR_P
);
13317 if (TYPE_ALIAS_SET_KNOWN_P (t
))
13319 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13324 /* tree_type_non_common checks. */
13326 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13327 and dangle the pointer from time to time. */
13328 if (RECORD_OR_UNION_TYPE_P (t
) && TYPE_VFIELD (t
) != TYPE_VFIELD (tv
)
13329 && (in_lto_p
|| !TYPE_VFIELD (tv
)
13330 || TREE_CODE (TYPE_VFIELD (tv
)) != TREE_LIST
))
13332 error ("type variant has different %<TYPE_VFIELD%>");
13336 if ((TREE_CODE (t
) == ENUMERAL_TYPE
&& COMPLETE_TYPE_P (t
))
13337 || TREE_CODE (t
) == INTEGER_TYPE
13338 || TREE_CODE (t
) == BOOLEAN_TYPE
13339 || TREE_CODE (t
) == REAL_TYPE
13340 || TREE_CODE (t
) == FIXED_POINT_TYPE
)
13342 verify_variant_match (TYPE_MAX_VALUE
);
13343 verify_variant_match (TYPE_MIN_VALUE
);
13345 if (TREE_CODE (t
) == METHOD_TYPE
)
13346 verify_variant_match (TYPE_METHOD_BASETYPE
);
13347 if (TREE_CODE (t
) == OFFSET_TYPE
)
13348 verify_variant_match (TYPE_OFFSET_BASETYPE
);
13349 if (TREE_CODE (t
) == ARRAY_TYPE
)
13350 verify_variant_match (TYPE_ARRAY_MAX_SIZE
);
13351 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13352 or even type's main variant. This is needed to make bootstrap pass
13353 and the bug seems new in GCC 5.
13354 C++ FE should be updated to make this consistent and we should check
13355 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13356 is a match with main variant.
13358 Also disable the check for Java for now because of parser hack that builds
13359 first an dummy BINFO and then sometimes replace it by real BINFO in some
13361 if (RECORD_OR_UNION_TYPE_P (t
) && TYPE_BINFO (t
) && TYPE_BINFO (tv
)
13362 && TYPE_BINFO (t
) != TYPE_BINFO (tv
)
13363 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13364 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13365 at LTO time only. */
13366 && (in_lto_p
&& odr_type_p (t
)))
13368 error ("type variant has different %<TYPE_BINFO%>");
13370 error ("type variant%'s %<TYPE_BINFO%>");
13371 debug_tree (TYPE_BINFO (tv
));
13372 error ("type%'s %<TYPE_BINFO%>");
13373 debug_tree (TYPE_BINFO (t
));
13377 /* Check various uses of TYPE_VALUES_RAW. */
13378 if (TREE_CODE (t
) == ENUMERAL_TYPE
13379 && TYPE_VALUES (t
))
13380 verify_variant_match (TYPE_VALUES
);
13381 else if (TREE_CODE (t
) == ARRAY_TYPE
)
13382 verify_variant_match (TYPE_DOMAIN
);
13383 /* Permit incomplete variants of complete type. While FEs may complete
13384 all variants, this does not happen for C++ templates in all cases. */
13385 else if (RECORD_OR_UNION_TYPE_P (t
)
13386 && COMPLETE_TYPE_P (t
)
13387 && TYPE_FIELDS (t
) != TYPE_FIELDS (tv
))
13391 /* Fortran builds qualified variants as new records with items of
13392 qualified type. Verify that they looks same. */
13393 for (f1
= TYPE_FIELDS (t
), f2
= TYPE_FIELDS (tv
);
13395 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
13396 if (TREE_CODE (f1
) != FIELD_DECL
|| TREE_CODE (f2
) != FIELD_DECL
13397 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1
))
13398 != TYPE_MAIN_VARIANT (TREE_TYPE (f2
))
13399 /* FIXME: gfc_nonrestricted_type builds all types as variants
13400 with exception of pointer types. It deeply copies the type
13401 which means that we may end up with a variant type
13402 referring non-variant pointer. We may change it to
13403 produce types as variants, too, like
13404 objc_get_protocol_qualified_type does. */
13405 && !POINTER_TYPE_P (TREE_TYPE (f1
)))
13406 || DECL_FIELD_OFFSET (f1
) != DECL_FIELD_OFFSET (f2
)
13407 || DECL_FIELD_BIT_OFFSET (f1
) != DECL_FIELD_BIT_OFFSET (f2
))
13411 error ("type variant has different %<TYPE_FIELDS%>");
13413 error ("first mismatch is field");
13415 error ("and field");
13420 else if ((TREE_CODE (t
) == FUNCTION_TYPE
|| TREE_CODE (t
) == METHOD_TYPE
))
13421 verify_variant_match (TYPE_ARG_TYPES
);
13422 /* For C++ the qualified variant of array type is really an array type
13423 of qualified TREE_TYPE.
13424 objc builds variants of pointer where pointer to type is a variant, too
13425 in objc_get_protocol_qualified_type. */
13426 if (TREE_TYPE (t
) != TREE_TYPE (tv
)
13427 && ((TREE_CODE (t
) != ARRAY_TYPE
13428 && !POINTER_TYPE_P (t
))
13429 || TYPE_MAIN_VARIANT (TREE_TYPE (t
))
13430 != TYPE_MAIN_VARIANT (TREE_TYPE (tv
))))
13432 error ("type variant has different %<TREE_TYPE%>");
13434 error ("type variant%'s %<TREE_TYPE%>");
13435 debug_tree (TREE_TYPE (tv
));
13436 error ("type%'s %<TREE_TYPE%>");
13437 debug_tree (TREE_TYPE (t
));
13440 if (type_with_alias_set_p (t
)
13441 && !gimple_canonical_types_compatible_p (t
, tv
, false))
13443 error ("type is not compatible with its variant");
13445 error ("type variant%'s %<TREE_TYPE%>");
13446 debug_tree (TREE_TYPE (tv
));
13447 error ("type%'s %<TREE_TYPE%>");
13448 debug_tree (TREE_TYPE (t
));
13452 #undef verify_variant_match
13456 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13457 the middle-end types_compatible_p function. It needs to avoid
13458 claiming types are different for types that should be treated
13459 the same with respect to TBAA. Canonical types are also used
13460 for IL consistency checks via the useless_type_conversion_p
13461 predicate which does not handle all type kinds itself but falls
13462 back to pointer-comparison of TYPE_CANONICAL for aggregates
13465 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13466 type calculation because we need to allow inter-operability between signed
13467 and unsigned variants. */
13470 type_with_interoperable_signedness (const_tree type
)
13472 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13473 signed char and unsigned char. Similarly fortran FE builds
13474 C_SIZE_T as signed type, while C defines it unsigned. */
13476 return tree_code_for_canonical_type_merging (TREE_CODE (type
))
13478 && (TYPE_PRECISION (type
) == TYPE_PRECISION (signed_char_type_node
)
13479 || TYPE_PRECISION (type
) == TYPE_PRECISION (size_type_node
));
13482 /* Return true iff T1 and T2 are structurally identical for what
13484 This function is used both by lto.cc canonical type merging and by the
13485 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13486 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13487 only for LTO because only in these cases TYPE_CANONICAL equivalence
13488 correspond to one defined by gimple_canonical_types_compatible_p. */
13491 gimple_canonical_types_compatible_p (const_tree t1
, const_tree t2
,
13492 bool trust_type_canonical
)
13494 /* Type variants should be same as the main variant. When not doing sanity
13495 checking to verify this fact, go to main variants and save some work. */
13496 if (trust_type_canonical
)
13498 t1
= TYPE_MAIN_VARIANT (t1
);
13499 t2
= TYPE_MAIN_VARIANT (t2
);
13502 /* Check first for the obvious case of pointer identity. */
13506 /* Check that we have two types to compare. */
13507 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
13510 /* We consider complete types always compatible with incomplete type.
13511 This does not make sense for canonical type calculation and thus we
13512 need to ensure that we are never called on it.
13514 FIXME: For more correctness the function probably should have three modes
13515 1) mode assuming that types are complete mathcing their structure
13516 2) mode allowing incomplete types but producing equivalence classes
13517 and thus ignoring all info from complete types
13518 3) mode allowing incomplete types to match complete but checking
13519 compatibility between complete types.
13521 1 and 2 can be used for canonical type calculation. 3 is the real
13522 definition of type compatibility that can be used i.e. for warnings during
13523 declaration merging. */
13525 gcc_assert (!trust_type_canonical
13526 || (type_with_alias_set_p (t1
) && type_with_alias_set_p (t2
)));
13528 /* If the types have been previously registered and found equal
13531 if (TYPE_CANONICAL (t1
) && TYPE_CANONICAL (t2
)
13532 && trust_type_canonical
)
13534 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13535 they are always NULL, but they are set to non-NULL for types
13536 constructed by build_pointer_type and variants. In this case the
13537 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13538 all pointers are considered equal. Be sure to not return false
13540 gcc_checking_assert (canonical_type_used_p (t1
)
13541 && canonical_type_used_p (t2
));
13542 return TYPE_CANONICAL (t1
) == TYPE_CANONICAL (t2
);
13545 /* For types where we do ODR based TBAA the canonical type is always
13546 set correctly, so we know that types are different if their
13547 canonical types does not match. */
13548 if (trust_type_canonical
13549 && (odr_type_p (t1
) && odr_based_tbaa_p (t1
))
13550 != (odr_type_p (t2
) && odr_based_tbaa_p (t2
)))
13553 /* Can't be the same type if the types don't have the same code. */
13554 enum tree_code code
= tree_code_for_canonical_type_merging (TREE_CODE (t1
));
13555 if (code
!= tree_code_for_canonical_type_merging (TREE_CODE (t2
)))
13558 /* Qualifiers do not matter for canonical type comparison purposes. */
13560 /* Void types and nullptr types are always the same. */
13561 if (TREE_CODE (t1
) == VOID_TYPE
13562 || TREE_CODE (t1
) == NULLPTR_TYPE
)
13565 /* Can't be the same type if they have different mode. */
13566 if (TYPE_MODE (t1
) != TYPE_MODE (t2
))
13569 /* Non-aggregate types can be handled cheaply. */
13570 if (INTEGRAL_TYPE_P (t1
)
13571 || SCALAR_FLOAT_TYPE_P (t1
)
13572 || FIXED_POINT_TYPE_P (t1
)
13573 || TREE_CODE (t1
) == VECTOR_TYPE
13574 || TREE_CODE (t1
) == COMPLEX_TYPE
13575 || TREE_CODE (t1
) == OFFSET_TYPE
13576 || POINTER_TYPE_P (t1
))
13578 /* Can't be the same type if they have different recision. */
13579 if (TYPE_PRECISION (t1
) != TYPE_PRECISION (t2
))
13582 /* In some cases the signed and unsigned types are required to be
13584 if (TYPE_UNSIGNED (t1
) != TYPE_UNSIGNED (t2
)
13585 && !type_with_interoperable_signedness (t1
))
13588 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13589 interoperable with "signed char". Unless all frontends are revisited
13590 to agree on these types, we must ignore the flag completely. */
13592 /* Fortran standard define C_PTR type that is compatible with every
13593 C pointer. For this reason we need to glob all pointers into one.
13594 Still pointers in different address spaces are not compatible. */
13595 if (POINTER_TYPE_P (t1
))
13597 if (TYPE_ADDR_SPACE (TREE_TYPE (t1
))
13598 != TYPE_ADDR_SPACE (TREE_TYPE (t2
)))
13602 /* Tail-recurse to components. */
13603 if (TREE_CODE (t1
) == VECTOR_TYPE
13604 || TREE_CODE (t1
) == COMPLEX_TYPE
)
13605 return gimple_canonical_types_compatible_p (TREE_TYPE (t1
),
13607 trust_type_canonical
);
13612 /* Do type-specific comparisons. */
13613 switch (TREE_CODE (t1
))
13616 /* Array types are the same if the element types are the same and
13617 the number of elements are the same. */
13618 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
),
13619 trust_type_canonical
)
13620 || TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
)
13621 || TYPE_REVERSE_STORAGE_ORDER (t1
) != TYPE_REVERSE_STORAGE_ORDER (t2
)
13622 || TYPE_NONALIASED_COMPONENT (t1
) != TYPE_NONALIASED_COMPONENT (t2
))
13626 tree i1
= TYPE_DOMAIN (t1
);
13627 tree i2
= TYPE_DOMAIN (t2
);
13629 /* For an incomplete external array, the type domain can be
13630 NULL_TREE. Check this condition also. */
13631 if (i1
== NULL_TREE
&& i2
== NULL_TREE
)
13633 else if (i1
== NULL_TREE
|| i2
== NULL_TREE
)
13637 tree min1
= TYPE_MIN_VALUE (i1
);
13638 tree min2
= TYPE_MIN_VALUE (i2
);
13639 tree max1
= TYPE_MAX_VALUE (i1
);
13640 tree max2
= TYPE_MAX_VALUE (i2
);
13642 /* The minimum/maximum values have to be the same. */
13645 && ((TREE_CODE (min1
) == PLACEHOLDER_EXPR
13646 && TREE_CODE (min2
) == PLACEHOLDER_EXPR
)
13647 || operand_equal_p (min1
, min2
, 0))))
13650 && ((TREE_CODE (max1
) == PLACEHOLDER_EXPR
13651 && TREE_CODE (max2
) == PLACEHOLDER_EXPR
)
13652 || operand_equal_p (max1
, max2
, 0)))))
13660 case FUNCTION_TYPE
:
13661 /* Function types are the same if the return type and arguments types
13663 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
),
13664 trust_type_canonical
))
13667 if (TYPE_ARG_TYPES (t1
) == TYPE_ARG_TYPES (t2
)
13668 && (TYPE_NO_NAMED_ARGS_STDARG_P (t1
)
13669 == TYPE_NO_NAMED_ARGS_STDARG_P (t2
)))
13673 tree parms1
, parms2
;
13675 for (parms1
= TYPE_ARG_TYPES (t1
), parms2
= TYPE_ARG_TYPES (t2
);
13677 parms1
= TREE_CHAIN (parms1
), parms2
= TREE_CHAIN (parms2
))
13679 if (!gimple_canonical_types_compatible_p
13680 (TREE_VALUE (parms1
), TREE_VALUE (parms2
),
13681 trust_type_canonical
))
13685 if (parms1
|| parms2
)
13693 case QUAL_UNION_TYPE
:
13697 /* Don't try to compare variants of an incomplete type, before
13698 TYPE_FIELDS has been copied around. */
13699 if (!COMPLETE_TYPE_P (t1
) && !COMPLETE_TYPE_P (t2
))
13703 if (TYPE_REVERSE_STORAGE_ORDER (t1
) != TYPE_REVERSE_STORAGE_ORDER (t2
))
13706 /* For aggregate types, all the fields must be the same. */
13707 for (f1
= TYPE_FIELDS (t1
), f2
= TYPE_FIELDS (t2
);
13709 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
13711 /* Skip non-fields and zero-sized fields. */
13712 while (f1
&& (TREE_CODE (f1
) != FIELD_DECL
13714 && integer_zerop (DECL_SIZE (f1
)))))
13715 f1
= TREE_CHAIN (f1
);
13716 while (f2
&& (TREE_CODE (f2
) != FIELD_DECL
13718 && integer_zerop (DECL_SIZE (f2
)))))
13719 f2
= TREE_CHAIN (f2
);
13722 /* The fields must have the same name, offset and type. */
13723 if (DECL_NONADDRESSABLE_P (f1
) != DECL_NONADDRESSABLE_P (f2
)
13724 || !gimple_compare_field_offset (f1
, f2
)
13725 || !gimple_canonical_types_compatible_p
13726 (TREE_TYPE (f1
), TREE_TYPE (f2
),
13727 trust_type_canonical
))
13731 /* If one aggregate has more fields than the other, they
13732 are not the same. */
13740 /* Consider all types with language specific trees in them mutually
13741 compatible. This is executed only from verify_type and false
13742 positives can be tolerated. */
13743 gcc_assert (!in_lto_p
);
13748 /* For OPAQUE_TYPE T, it should have only size and alignment information
13749 and its mode should be of class MODE_OPAQUE. This function verifies
13750 these properties of T match TV which is the main variant of T and TC
13751 which is the canonical of T. */
13754 verify_opaque_type (const_tree t
, tree tv
, tree tc
)
13756 gcc_assert (OPAQUE_TYPE_P (t
));
13757 gcc_assert (tv
&& tv
== TYPE_MAIN_VARIANT (tv
));
13758 gcc_assert (tc
&& tc
== TYPE_CANONICAL (tc
));
13760 /* For an opaque type T1, check if some of its properties match
13761 the corresponding ones of the other opaque type T2, emit some
13762 error messages for those inconsistent ones. */
13763 auto check_properties_for_opaque_type
= [](const_tree t1
, tree t2
,
13764 const char *kind_msg
)
13766 if (!OPAQUE_TYPE_P (t2
))
13768 error ("type %s is not an opaque type", kind_msg
);
13772 if (!OPAQUE_MODE_P (TYPE_MODE (t2
)))
13774 error ("type %s is not with opaque mode", kind_msg
);
13778 if (TYPE_MODE (t1
) != TYPE_MODE (t2
))
13780 error ("type %s differs by %<TYPE_MODE%>", kind_msg
);
13784 poly_uint64 t1_size
= tree_to_poly_uint64 (TYPE_SIZE (t1
));
13785 poly_uint64 t2_size
= tree_to_poly_uint64 (TYPE_SIZE (t2
));
13786 if (maybe_ne (t1_size
, t2_size
))
13788 error ("type %s differs by %<TYPE_SIZE%>", kind_msg
);
13792 if (TYPE_ALIGN (t1
) != TYPE_ALIGN (t2
))
13794 error ("type %s differs by %<TYPE_ALIGN%>", kind_msg
);
13798 if (TYPE_USER_ALIGN (t1
) != TYPE_USER_ALIGN (t2
))
13800 error ("type %s differs by %<TYPE_USER_ALIGN%>", kind_msg
);
13807 check_properties_for_opaque_type (t
, tv
, "variant");
13810 check_properties_for_opaque_type (t
, tc
, "canonical");
13813 /* Verify type T. */
13816 verify_type (const_tree t
)
13818 bool error_found
= false;
13819 tree mv
= TYPE_MAIN_VARIANT (t
);
13820 tree ct
= TYPE_CANONICAL (t
);
13822 if (OPAQUE_TYPE_P (t
))
13824 verify_opaque_type (t
, mv
, ct
);
13830 error ("main variant is not defined");
13831 error_found
= true;
13833 else if (mv
!= TYPE_MAIN_VARIANT (mv
))
13835 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
13837 error_found
= true;
13839 else if (t
!= mv
&& !verify_type_variant (t
, mv
))
13840 error_found
= true;
13844 else if (TYPE_CANONICAL (ct
) != ct
)
13846 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
13848 error_found
= true;
13850 /* Method and function types cannot be used to address memory and thus
13851 TYPE_CANONICAL really matters only for determining useless conversions.
13853 FIXME: C++ FE produce declarations of builtin functions that are not
13854 compatible with main variants. */
13855 else if (TREE_CODE (t
) == FUNCTION_TYPE
)
13858 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
13859 with variably sized arrays because their sizes possibly
13860 gimplified to different variables. */
13861 && !variably_modified_type_p (ct
, NULL
)
13862 && !gimple_canonical_types_compatible_p (t
, ct
, false)
13863 && COMPLETE_TYPE_P (t
))
13865 error ("%<TYPE_CANONICAL%> is not compatible");
13867 error_found
= true;
13870 if (COMPLETE_TYPE_P (t
) && TYPE_CANONICAL (t
)
13871 && TYPE_MODE (t
) != TYPE_MODE (TYPE_CANONICAL (t
)))
13873 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
13875 error_found
= true;
13877 if (TYPE_MAIN_VARIANT (t
) == t
&& ct
&& TYPE_MAIN_VARIANT (ct
) != ct
)
13879 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
13881 debug_tree (TYPE_MAIN_VARIANT (ct
));
13882 error_found
= true;
13886 /* Check various uses of TYPE_MIN_VALUE_RAW. */
13887 if (RECORD_OR_UNION_TYPE_P (t
))
13889 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13890 and danagle the pointer from time to time. */
13891 if (TYPE_VFIELD (t
)
13892 && TREE_CODE (TYPE_VFIELD (t
)) != FIELD_DECL
13893 && TREE_CODE (TYPE_VFIELD (t
)) != TREE_LIST
)
13895 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
13896 debug_tree (TYPE_VFIELD (t
));
13897 error_found
= true;
13900 else if (TREE_CODE (t
) == POINTER_TYPE
)
13902 if (TYPE_NEXT_PTR_TO (t
)
13903 && TREE_CODE (TYPE_NEXT_PTR_TO (t
)) != POINTER_TYPE
)
13905 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
13906 debug_tree (TYPE_NEXT_PTR_TO (t
));
13907 error_found
= true;
13910 else if (TREE_CODE (t
) == REFERENCE_TYPE
)
13912 if (TYPE_NEXT_REF_TO (t
)
13913 && TREE_CODE (TYPE_NEXT_REF_TO (t
)) != REFERENCE_TYPE
)
13915 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
13916 debug_tree (TYPE_NEXT_REF_TO (t
));
13917 error_found
= true;
13920 else if (INTEGRAL_TYPE_P (t
) || TREE_CODE (t
) == REAL_TYPE
13921 || TREE_CODE (t
) == FIXED_POINT_TYPE
)
13923 /* FIXME: The following check should pass:
13924 useless_type_conversion_p (const_cast <tree> (t),
13925 TREE_TYPE (TYPE_MIN_VALUE (t))
13926 but does not for C sizetypes in LTO. */
13929 /* Check various uses of TYPE_MAXVAL_RAW. */
13930 if (RECORD_OR_UNION_TYPE_P (t
))
13932 if (!TYPE_BINFO (t
))
13934 else if (TREE_CODE (TYPE_BINFO (t
)) != TREE_BINFO
)
13936 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
13937 debug_tree (TYPE_BINFO (t
));
13938 error_found
= true;
13940 else if (TREE_TYPE (TYPE_BINFO (t
)) != TYPE_MAIN_VARIANT (t
))
13942 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
13943 debug_tree (TREE_TYPE (TYPE_BINFO (t
)));
13944 error_found
= true;
13947 else if (TREE_CODE (t
) == FUNCTION_TYPE
|| TREE_CODE (t
) == METHOD_TYPE
)
13949 if (TYPE_METHOD_BASETYPE (t
)
13950 && TREE_CODE (TYPE_METHOD_BASETYPE (t
)) != RECORD_TYPE
13951 && TREE_CODE (TYPE_METHOD_BASETYPE (t
)) != UNION_TYPE
)
13953 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
13954 debug_tree (TYPE_METHOD_BASETYPE (t
));
13955 error_found
= true;
13958 else if (TREE_CODE (t
) == OFFSET_TYPE
)
13960 if (TYPE_OFFSET_BASETYPE (t
)
13961 && TREE_CODE (TYPE_OFFSET_BASETYPE (t
)) != RECORD_TYPE
13962 && TREE_CODE (TYPE_OFFSET_BASETYPE (t
)) != UNION_TYPE
)
13964 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
13965 debug_tree (TYPE_OFFSET_BASETYPE (t
));
13966 error_found
= true;
13969 else if (INTEGRAL_TYPE_P (t
) || TREE_CODE (t
) == REAL_TYPE
13970 || TREE_CODE (t
) == FIXED_POINT_TYPE
)
13972 /* FIXME: The following check should pass:
13973 useless_type_conversion_p (const_cast <tree> (t),
13974 TREE_TYPE (TYPE_MAX_VALUE (t))
13975 but does not for C sizetypes in LTO. */
13977 else if (TREE_CODE (t
) == ARRAY_TYPE
)
13979 if (TYPE_ARRAY_MAX_SIZE (t
)
13980 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t
)) != INTEGER_CST
)
13982 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
13983 debug_tree (TYPE_ARRAY_MAX_SIZE (t
));
13984 error_found
= true;
13987 else if (TYPE_MAX_VALUE_RAW (t
))
13989 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
13990 debug_tree (TYPE_MAX_VALUE_RAW (t
));
13991 error_found
= true;
13994 if (TYPE_LANG_SLOT_1 (t
) && in_lto_p
)
13996 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
13997 debug_tree (TYPE_LANG_SLOT_1 (t
));
13998 error_found
= true;
14001 /* Check various uses of TYPE_VALUES_RAW. */
14002 if (TREE_CODE (t
) == ENUMERAL_TYPE
)
14003 for (tree l
= TYPE_VALUES (t
); l
; l
= TREE_CHAIN (l
))
14005 tree value
= TREE_VALUE (l
);
14006 tree name
= TREE_PURPOSE (l
);
14008 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
14009 CONST_DECL of ENUMERAL TYPE. */
14010 if (TREE_CODE (value
) != INTEGER_CST
&& TREE_CODE (value
) != CONST_DECL
)
14012 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
14013 debug_tree (value
);
14015 error_found
= true;
14017 if (TREE_CODE (TREE_TYPE (value
)) != INTEGER_TYPE
14018 && TREE_CODE (TREE_TYPE (value
)) != BOOLEAN_TYPE
14019 && !useless_type_conversion_p (const_cast <tree
> (t
), TREE_TYPE (value
)))
14021 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
14023 debug_tree (value
);
14025 error_found
= true;
14027 if (TREE_CODE (name
) != IDENTIFIER_NODE
)
14029 error ("enum value name is not %<IDENTIFIER_NODE%>");
14030 debug_tree (value
);
14032 error_found
= true;
14035 else if (TREE_CODE (t
) == ARRAY_TYPE
)
14037 if (TYPE_DOMAIN (t
) && TREE_CODE (TYPE_DOMAIN (t
)) != INTEGER_TYPE
)
14039 error ("array %<TYPE_DOMAIN%> is not integer type");
14040 debug_tree (TYPE_DOMAIN (t
));
14041 error_found
= true;
14044 else if (RECORD_OR_UNION_TYPE_P (t
))
14046 if (TYPE_FIELDS (t
) && !COMPLETE_TYPE_P (t
) && in_lto_p
)
14048 error ("%<TYPE_FIELDS%> defined in incomplete type");
14049 error_found
= true;
14051 for (tree fld
= TYPE_FIELDS (t
); fld
; fld
= TREE_CHAIN (fld
))
14053 /* TODO: verify properties of decls. */
14054 if (TREE_CODE (fld
) == FIELD_DECL
)
14056 else if (TREE_CODE (fld
) == TYPE_DECL
)
14058 else if (TREE_CODE (fld
) == CONST_DECL
)
14060 else if (VAR_P (fld
))
14062 else if (TREE_CODE (fld
) == TEMPLATE_DECL
)
14064 else if (TREE_CODE (fld
) == USING_DECL
)
14066 else if (TREE_CODE (fld
) == FUNCTION_DECL
)
14070 error ("wrong tree in %<TYPE_FIELDS%> list");
14072 error_found
= true;
14076 else if (TREE_CODE (t
) == INTEGER_TYPE
14077 || TREE_CODE (t
) == BOOLEAN_TYPE
14078 || TREE_CODE (t
) == OFFSET_TYPE
14079 || TREE_CODE (t
) == REFERENCE_TYPE
14080 || TREE_CODE (t
) == NULLPTR_TYPE
14081 || TREE_CODE (t
) == POINTER_TYPE
)
14083 if (TYPE_CACHED_VALUES_P (t
) != (TYPE_CACHED_VALUES (t
) != NULL
))
14085 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
14087 TYPE_CACHED_VALUES_P (t
), (void *)TYPE_CACHED_VALUES (t
));
14088 error_found
= true;
14090 else if (TYPE_CACHED_VALUES_P (t
) && TREE_CODE (TYPE_CACHED_VALUES (t
)) != TREE_VEC
)
14092 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
14093 debug_tree (TYPE_CACHED_VALUES (t
));
14094 error_found
= true;
14096 /* Verify just enough of cache to ensure that no one copied it to new type.
14097 All copying should go by copy_node that should clear it. */
14098 else if (TYPE_CACHED_VALUES_P (t
))
14101 for (i
= 0; i
< TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t
)); i
++)
14102 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t
), i
)
14103 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t
), i
)) != t
)
14105 error ("wrong %<TYPE_CACHED_VALUES%> entry");
14106 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t
), i
));
14107 error_found
= true;
14112 else if (TREE_CODE (t
) == FUNCTION_TYPE
|| TREE_CODE (t
) == METHOD_TYPE
)
14113 for (tree l
= TYPE_ARG_TYPES (t
); l
; l
= TREE_CHAIN (l
))
14115 /* C++ FE uses TREE_PURPOSE to store initial values. */
14116 if (TREE_PURPOSE (l
) && in_lto_p
)
14118 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
14120 error_found
= true;
14122 if (!TYPE_P (TREE_VALUE (l
)))
14124 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
14126 error_found
= true;
14129 else if (!is_lang_specific (t
) && TYPE_VALUES_RAW (t
))
14131 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
14132 debug_tree (TYPE_VALUES_RAW (t
));
14133 error_found
= true;
14135 if (TREE_CODE (t
) != INTEGER_TYPE
14136 && TREE_CODE (t
) != BOOLEAN_TYPE
14137 && TREE_CODE (t
) != OFFSET_TYPE
14138 && TREE_CODE (t
) != REFERENCE_TYPE
14139 && TREE_CODE (t
) != NULLPTR_TYPE
14140 && TREE_CODE (t
) != POINTER_TYPE
14141 && TYPE_CACHED_VALUES_P (t
))
14143 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
14144 error_found
= true;
14147 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14148 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14150 if (TREE_CODE (t
) == METHOD_TYPE
14151 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t
)) != TYPE_METHOD_BASETYPE (t
))
14153 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
14154 error_found
= true;
14159 debug_tree (const_cast <tree
> (t
));
14160 internal_error ("%qs failed", __func__
);
14165 /* Return 1 if ARG interpreted as signed in its precision is known to be
14166 always positive or 2 if ARG is known to be always negative, or 3 if
14167 ARG may be positive or negative. */
14170 get_range_pos_neg (tree arg
)
14172 if (arg
== error_mark_node
)
14175 int prec
= TYPE_PRECISION (TREE_TYPE (arg
));
14177 if (TREE_CODE (arg
) == INTEGER_CST
)
14179 wide_int w
= wi::sext (wi::to_wide (arg
), prec
);
14185 while (CONVERT_EXPR_P (arg
)
14186 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg
, 0)))
14187 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg
, 0))) <= prec
)
14189 arg
= TREE_OPERAND (arg
, 0);
14190 /* Narrower value zero extended into wider type
14191 will always result in positive values. */
14192 if (TYPE_UNSIGNED (TREE_TYPE (arg
))
14193 && TYPE_PRECISION (TREE_TYPE (arg
)) < prec
)
14195 prec
= TYPE_PRECISION (TREE_TYPE (arg
));
14200 if (TREE_CODE (arg
) != SSA_NAME
)
14203 while (!get_global_range_query ()->range_of_expr (r
, arg
) || r
.kind () != VR_RANGE
)
14205 gimple
*g
= SSA_NAME_DEF_STMT (arg
);
14206 if (is_gimple_assign (g
)
14207 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g
)))
14209 tree t
= gimple_assign_rhs1 (g
);
14210 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
14211 && TYPE_PRECISION (TREE_TYPE (t
)) <= prec
)
14213 if (TYPE_UNSIGNED (TREE_TYPE (t
))
14214 && TYPE_PRECISION (TREE_TYPE (t
)) < prec
)
14216 prec
= TYPE_PRECISION (TREE_TYPE (t
));
14225 if (TYPE_UNSIGNED (TREE_TYPE (arg
)))
14227 /* For unsigned values, the "positive" range comes
14228 below the "negative" range. */
14229 if (!wi::neg_p (wi::sext (r
.upper_bound (), prec
), SIGNED
))
14231 if (wi::neg_p (wi::sext (r
.lower_bound (), prec
), SIGNED
))
14236 if (!wi::neg_p (wi::sext (r
.lower_bound (), prec
), SIGNED
))
14238 if (wi::neg_p (wi::sext (r
.upper_bound (), prec
), SIGNED
))
14247 /* Return true if ARG is marked with the nonnull attribute in the
14248 current function signature. */
14251 nonnull_arg_p (const_tree arg
)
14253 tree t
, attrs
, fntype
;
14254 unsigned HOST_WIDE_INT arg_num
;
14256 gcc_assert (TREE_CODE (arg
) == PARM_DECL
14257 && (POINTER_TYPE_P (TREE_TYPE (arg
))
14258 || TREE_CODE (TREE_TYPE (arg
)) == OFFSET_TYPE
));
14260 /* The static chain decl is always non null. */
14261 if (arg
== cfun
->static_chain_decl
)
14264 /* THIS argument of method is always non-NULL. */
14265 if (TREE_CODE (TREE_TYPE (cfun
->decl
)) == METHOD_TYPE
14266 && arg
== DECL_ARGUMENTS (cfun
->decl
)
14267 && flag_delete_null_pointer_checks
)
14270 /* Values passed by reference are always non-NULL. */
14271 if (TREE_CODE (TREE_TYPE (arg
)) == REFERENCE_TYPE
14272 && flag_delete_null_pointer_checks
)
14275 fntype
= TREE_TYPE (cfun
->decl
);
14276 for (attrs
= TYPE_ATTRIBUTES (fntype
); attrs
; attrs
= TREE_CHAIN (attrs
))
14278 attrs
= lookup_attribute ("nonnull", attrs
);
14280 /* If "nonnull" wasn't specified, we know nothing about the argument. */
14281 if (attrs
== NULL_TREE
)
14284 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
14285 if (TREE_VALUE (attrs
) == NULL_TREE
)
14288 /* Get the position number for ARG in the function signature. */
14289 for (arg_num
= 1, t
= DECL_ARGUMENTS (cfun
->decl
);
14291 t
= DECL_CHAIN (t
), arg_num
++)
14297 gcc_assert (t
== arg
);
14299 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14300 for (t
= TREE_VALUE (attrs
); t
; t
= TREE_CHAIN (t
))
14302 if (compare_tree_int (TREE_VALUE (t
), arg_num
) == 0)
14310 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14314 set_block (location_t loc
, tree block
)
14316 location_t pure_loc
= get_pure_location (loc
);
14317 source_range src_range
= get_range_from_loc (line_table
, loc
);
14318 unsigned discriminator
= get_discriminator_from_loc (line_table
, loc
);
14319 return COMBINE_LOCATION_DATA (line_table
, pure_loc
, src_range
, block
, discriminator
);
14323 set_source_range (tree expr
, location_t start
, location_t finish
)
14325 source_range src_range
;
14326 src_range
.m_start
= start
;
14327 src_range
.m_finish
= finish
;
14328 return set_source_range (expr
, src_range
);
14332 set_source_range (tree expr
, source_range src_range
)
14334 if (!EXPR_P (expr
))
14335 return UNKNOWN_LOCATION
;
14337 location_t expr_location
= EXPR_LOCATION (expr
);
14338 location_t pure_loc
= get_pure_location (expr_location
);
14339 unsigned discriminator
= get_discriminator_from_loc (expr_location
);
14340 location_t adhoc
= COMBINE_LOCATION_DATA (line_table
,
14345 SET_EXPR_LOCATION (expr
, adhoc
);
14349 /* Return EXPR, potentially wrapped with a node expression LOC,
14350 if !CAN_HAVE_LOCATION_P (expr).
14352 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
14353 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
14355 Wrapper nodes can be identified using location_wrapper_p. */
14358 maybe_wrap_with_location (tree expr
, location_t loc
)
14362 if (loc
== UNKNOWN_LOCATION
)
14364 if (CAN_HAVE_LOCATION_P (expr
))
14366 /* We should only be adding wrappers for constants and for decls,
14367 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
14368 gcc_assert (CONSTANT_CLASS_P (expr
)
14370 || EXCEPTIONAL_CLASS_P (expr
));
14372 /* For now, don't add wrappers to exceptional tree nodes, to minimize
14373 any impact of the wrapper nodes. */
14374 if (EXCEPTIONAL_CLASS_P (expr
) || error_operand_p (expr
))
14377 /* Compiler-generated temporary variables don't need a wrapper. */
14378 if (DECL_P (expr
) && DECL_ARTIFICIAL (expr
) && DECL_IGNORED_P (expr
))
14381 /* If any auto_suppress_location_wrappers are active, don't create
14383 if (suppress_location_wrappers
> 0)
14387 = (((CONSTANT_CLASS_P (expr
) && TREE_CODE (expr
) != STRING_CST
)
14388 || (TREE_CODE (expr
) == CONST_DECL
&& !TREE_STATIC (expr
)))
14389 ? NON_LVALUE_EXPR
: VIEW_CONVERT_EXPR
);
14390 tree wrapper
= build1_loc (loc
, code
, TREE_TYPE (expr
), expr
);
14391 /* Mark this node as being a wrapper. */
14392 EXPR_LOCATION_WRAPPER_P (wrapper
) = 1;
14396 int suppress_location_wrappers
;
14398 /* Return the name of combined function FN, for debugging purposes. */
14401 combined_fn_name (combined_fn fn
)
14403 if (builtin_fn_p (fn
))
14405 tree fndecl
= builtin_decl_explicit (as_builtin_fn (fn
));
14406 return IDENTIFIER_POINTER (DECL_NAME (fndecl
));
14409 return internal_fn_name (as_internal_fn (fn
));
14412 /* Return a bitmap with a bit set corresponding to each argument in
14413 a function call type FNTYPE declared with attribute nonnull,
14414 or null if none of the function's argument are nonnull. The caller
14415 must free the bitmap. */
14418 get_nonnull_args (const_tree fntype
)
14420 if (fntype
== NULL_TREE
)
14423 bitmap argmap
= NULL
;
14424 if (TREE_CODE (fntype
) == METHOD_TYPE
)
14426 /* The this pointer in C++ non-static member functions is
14427 implicitly nonnull whether or not it's declared as such. */
14428 argmap
= BITMAP_ALLOC (NULL
);
14429 bitmap_set_bit (argmap
, 0);
14432 tree attrs
= TYPE_ATTRIBUTES (fntype
);
14436 /* A function declaration can specify multiple attribute nonnull,
14437 each with zero or more arguments. The loop below creates a bitmap
14438 representing a union of all the arguments. An empty (but non-null)
14439 bitmap means that all arguments have been declaraed nonnull. */
14440 for ( ; attrs
; attrs
= TREE_CHAIN (attrs
))
14442 attrs
= lookup_attribute ("nonnull", attrs
);
14447 argmap
= BITMAP_ALLOC (NULL
);
14449 if (!TREE_VALUE (attrs
))
14451 /* Clear the bitmap in case a previous attribute nonnull
14452 set it and this one overrides it for all arguments. */
14453 bitmap_clear (argmap
);
14457 /* Iterate over the indices of the format arguments declared nonnull
14458 and set a bit for each. */
14459 for (tree idx
= TREE_VALUE (attrs
); idx
; idx
= TREE_CHAIN (idx
))
14461 unsigned int val
= TREE_INT_CST_LOW (TREE_VALUE (idx
)) - 1;
14462 bitmap_set_bit (argmap
, val
);
14469 /* Returns true if TYPE is a type where it and all of its subobjects
14470 (recursively) are of structure, union, or array type. */
14473 is_empty_type (const_tree type
)
14475 if (RECORD_OR_UNION_TYPE_P (type
))
14477 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
14478 if (TREE_CODE (field
) == FIELD_DECL
14479 && !DECL_PADDING_P (field
)
14480 && !is_empty_type (TREE_TYPE (field
)))
14484 else if (TREE_CODE (type
) == ARRAY_TYPE
)
14485 return (integer_minus_onep (array_type_nelts (type
))
14486 || TYPE_DOMAIN (type
) == NULL_TREE
14487 || is_empty_type (TREE_TYPE (type
)));
14491 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
14492 that shouldn't be passed via stack. */
14495 default_is_empty_record (const_tree type
)
14497 if (!abi_version_at_least (12))
14500 if (type
== error_mark_node
)
14503 if (TREE_ADDRESSABLE (type
))
14506 return is_empty_type (TYPE_MAIN_VARIANT (type
));
14509 /* Determine whether TYPE is a structure with a flexible array member,
14510 or a union containing such a structure (possibly recursively). */
14513 flexible_array_type_p (const_tree type
)
14516 switch (TREE_CODE (type
))
14520 for (x
= TYPE_FIELDS (type
); x
!= NULL_TREE
; x
= DECL_CHAIN (x
))
14521 if (TREE_CODE (x
) == FIELD_DECL
)
14523 if (last
== NULL_TREE
)
14525 if (TREE_CODE (TREE_TYPE (last
)) == ARRAY_TYPE
14526 && TYPE_SIZE (TREE_TYPE (last
)) == NULL_TREE
14527 && TYPE_DOMAIN (TREE_TYPE (last
)) != NULL_TREE
14528 && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last
))) == NULL_TREE
)
14532 for (x
= TYPE_FIELDS (type
); x
!= NULL_TREE
; x
= DECL_CHAIN (x
))
14534 if (TREE_CODE (x
) == FIELD_DECL
14535 && flexible_array_type_p (TREE_TYPE (x
)))
14544 /* Like int_size_in_bytes, but handle empty records specially. */
14547 arg_int_size_in_bytes (const_tree type
)
14549 return TYPE_EMPTY_P (type
) ? 0 : int_size_in_bytes (type
);
14552 /* Like size_in_bytes, but handle empty records specially. */
14555 arg_size_in_bytes (const_tree type
)
14557 return TYPE_EMPTY_P (type
) ? size_zero_node
: size_in_bytes (type
);
14560 /* Return true if an expression with CODE has to have the same result type as
14561 its first operand. */
14564 expr_type_first_operand_type_p (tree_code code
)
14577 case TRUNC_DIV_EXPR
:
14578 case CEIL_DIV_EXPR
:
14579 case FLOOR_DIV_EXPR
:
14580 case ROUND_DIV_EXPR
:
14581 case TRUNC_MOD_EXPR
:
14582 case CEIL_MOD_EXPR
:
14583 case FLOOR_MOD_EXPR
:
14584 case ROUND_MOD_EXPR
:
14586 case EXACT_DIV_EXPR
:
14604 /* Return a typenode for the "standard" C type with a given name. */
14606 get_typenode_from_name (const char *name
)
14608 if (name
== NULL
|| *name
== '\0')
14611 if (strcmp (name
, "char") == 0)
14612 return char_type_node
;
14613 if (strcmp (name
, "unsigned char") == 0)
14614 return unsigned_char_type_node
;
14615 if (strcmp (name
, "signed char") == 0)
14616 return signed_char_type_node
;
14618 if (strcmp (name
, "short int") == 0)
14619 return short_integer_type_node
;
14620 if (strcmp (name
, "short unsigned int") == 0)
14621 return short_unsigned_type_node
;
14623 if (strcmp (name
, "int") == 0)
14624 return integer_type_node
;
14625 if (strcmp (name
, "unsigned int") == 0)
14626 return unsigned_type_node
;
14628 if (strcmp (name
, "long int") == 0)
14629 return long_integer_type_node
;
14630 if (strcmp (name
, "long unsigned int") == 0)
14631 return long_unsigned_type_node
;
14633 if (strcmp (name
, "long long int") == 0)
14634 return long_long_integer_type_node
;
14635 if (strcmp (name
, "long long unsigned int") == 0)
14636 return long_long_unsigned_type_node
;
14638 gcc_unreachable ();
14641 /* List of pointer types used to declare builtins before we have seen their
14644 Keep the size up to date in tree.h ! */
14645 const builtin_structptr_type builtin_structptr_types
[6] =
14647 { fileptr_type_node
, ptr_type_node
, "FILE" },
14648 { const_tm_ptr_type_node
, const_ptr_type_node
, "tm" },
14649 { fenv_t_ptr_type_node
, ptr_type_node
, "fenv_t" },
14650 { const_fenv_t_ptr_type_node
, const_ptr_type_node
, "fenv_t" },
14651 { fexcept_t_ptr_type_node
, ptr_type_node
, "fexcept_t" },
14652 { const_fexcept_t_ptr_type_node
, const_ptr_type_node
, "fexcept_t" }
14655 /* Return the maximum object size. */
14658 max_object_size (void)
14660 /* To do: Make this a configurable parameter. */
14661 return TYPE_MAX_VALUE (ptrdiff_type_node
);
14664 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
14665 parameter default to false and that weeds out error_mark_node. */
14668 verify_type_context (location_t loc
, type_context_kind context
,
14669 const_tree type
, bool silent_p
)
14671 if (type
== error_mark_node
)
14674 gcc_assert (TYPE_P (type
));
14675 return (!targetm
.verify_type_context
14676 || targetm
.verify_type_context (loc
, context
, type
, silent_p
));
14679 /* Return true if NEW_ASM and DELETE_ASM name a valid pair of new and
14680 delete operators. Return false if they may or may not name such
14681 a pair and, when nonnull, set *PCERTAIN to true if they certainly
14685 valid_new_delete_pair_p (tree new_asm
, tree delete_asm
,
14686 bool *pcertain
/* = NULL */)
14690 pcertain
= &certain
;
14692 const char *new_name
= IDENTIFIER_POINTER (new_asm
);
14693 const char *delete_name
= IDENTIFIER_POINTER (delete_asm
);
14694 unsigned int new_len
= IDENTIFIER_LENGTH (new_asm
);
14695 unsigned int delete_len
= IDENTIFIER_LENGTH (delete_asm
);
14697 /* The following failures are due to invalid names so they're not
14698 considered certain mismatches. */
14701 if (new_len
< 5 || delete_len
< 6)
14703 if (new_name
[0] == '_')
14704 ++new_name
, --new_len
;
14705 if (new_name
[0] == '_')
14706 ++new_name
, --new_len
;
14707 if (delete_name
[0] == '_')
14708 ++delete_name
, --delete_len
;
14709 if (delete_name
[0] == '_')
14710 ++delete_name
, --delete_len
;
14711 if (new_len
< 4 || delete_len
< 5)
14714 /* The following failures are due to names of user-defined operators
14715 so they're also not considered certain mismatches. */
14717 /* *_len is now just the length after initial underscores. */
14718 if (new_name
[0] != 'Z' || new_name
[1] != 'n')
14720 if (delete_name
[0] != 'Z' || delete_name
[1] != 'd')
14723 /* The following failures are certain mismatches. */
14726 /* _Znw must match _Zdl, _Zna must match _Zda. */
14727 if ((new_name
[2] != 'w' || delete_name
[2] != 'l')
14728 && (new_name
[2] != 'a' || delete_name
[2] != 'a'))
14730 /* 'j', 'm' and 'y' correspond to size_t. */
14731 if (new_name
[3] != 'j' && new_name
[3] != 'm' && new_name
[3] != 'y')
14733 if (delete_name
[3] != 'P' || delete_name
[4] != 'v')
14736 || (new_len
== 18 && !memcmp (new_name
+ 4, "RKSt9nothrow_t", 14)))
14738 /* _ZnXY or _ZnXYRKSt9nothrow_t matches
14739 _ZdXPv, _ZdXPvY and _ZdXPvRKSt9nothrow_t. */
14740 if (delete_len
== 5)
14742 if (delete_len
== 6 && delete_name
[5] == new_name
[3])
14744 if (delete_len
== 19 && !memcmp (delete_name
+ 5, "RKSt9nothrow_t", 14))
14747 else if ((new_len
== 19 && !memcmp (new_name
+ 4, "St11align_val_t", 15))
14749 && !memcmp (new_name
+ 4, "St11align_val_tRKSt9nothrow_t", 29)))
14751 /* _ZnXYSt11align_val_t or _ZnXYSt11align_val_tRKSt9nothrow_t matches
14752 _ZdXPvSt11align_val_t or _ZdXPvYSt11align_val_t or or
14753 _ZdXPvSt11align_val_tRKSt9nothrow_t. */
14754 if (delete_len
== 20 && !memcmp (delete_name
+ 5, "St11align_val_t", 15))
14756 if (delete_len
== 21
14757 && delete_name
[5] == new_name
[3]
14758 && !memcmp (delete_name
+ 6, "St11align_val_t", 15))
14760 if (delete_len
== 34
14761 && !memcmp (delete_name
+ 5, "St11align_val_tRKSt9nothrow_t", 29))
14765 /* The negative result is conservative. */
14770 /* Return the zero-based number corresponding to the argument being
14771 deallocated if FNDECL is a deallocation function or an out-of-bounds
14772 value if it isn't. */
14775 fndecl_dealloc_argno (tree fndecl
)
14777 /* A call to operator delete isn't recognized as one to a built-in. */
14778 if (DECL_IS_OPERATOR_DELETE_P (fndecl
))
14780 if (DECL_IS_REPLACEABLE_OPERATOR (fndecl
))
14783 /* Avoid placement delete that's not been inlined. */
14784 tree fname
= DECL_ASSEMBLER_NAME (fndecl
);
14785 if (id_equal (fname
, "_ZdlPvS_") // ordinary form
14786 || id_equal (fname
, "_ZdaPvS_")) // array form
14791 /* TODO: Handle user-defined functions with attribute malloc? Handle
14792 known non-built-ins like fopen? */
14793 if (fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
14795 switch (DECL_FUNCTION_CODE (fndecl
))
14797 case BUILT_IN_FREE
:
14798 case BUILT_IN_REALLOC
:
14806 tree attrs
= DECL_ATTRIBUTES (fndecl
);
14810 for (tree atfree
= attrs
;
14811 (atfree
= lookup_attribute ("*dealloc", atfree
));
14812 atfree
= TREE_CHAIN (atfree
))
14814 tree alloc
= TREE_VALUE (atfree
);
14818 tree pos
= TREE_CHAIN (alloc
);
14822 pos
= TREE_VALUE (pos
);
14823 return TREE_INT_CST_LOW (pos
) - 1;
14829 /* If EXPR refers to a character array or pointer declared attribute
14830 nonstring, return a decl for that array or pointer and set *REF
14831 to the referenced enclosing object or pointer. Otherwise return
14835 get_attr_nonstring_decl (tree expr
, tree
*ref
)
14838 tree var
= NULL_TREE
;
14839 if (TREE_CODE (decl
) == SSA_NAME
)
14841 gimple
*def
= SSA_NAME_DEF_STMT (decl
);
14843 if (is_gimple_assign (def
))
14845 tree_code code
= gimple_assign_rhs_code (def
);
14846 if (code
== ADDR_EXPR
14847 || code
== COMPONENT_REF
14848 || code
== VAR_DECL
)
14849 decl
= gimple_assign_rhs1 (def
);
14852 var
= SSA_NAME_VAR (decl
);
14855 if (TREE_CODE (decl
) == ADDR_EXPR
)
14856 decl
= TREE_OPERAND (decl
, 0);
14858 /* To simplify calling code, store the referenced DECL regardless of
14859 the attribute determined below, but avoid storing the SSA_NAME_VAR
14860 obtained above (it's not useful for dataflow purposes). */
14864 /* Use the SSA_NAME_VAR that was determined above to see if it's
14865 declared nonstring. Otherwise drill down into the referenced
14869 else if (TREE_CODE (decl
) == ARRAY_REF
)
14870 decl
= TREE_OPERAND (decl
, 0);
14871 else if (TREE_CODE (decl
) == COMPONENT_REF
)
14872 decl
= TREE_OPERAND (decl
, 1);
14873 else if (TREE_CODE (decl
) == MEM_REF
)
14874 return get_attr_nonstring_decl (TREE_OPERAND (decl
, 0), ref
);
14877 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl
)))
14883 /* Return length of attribute names string,
14884 if arglist chain > 1, -1 otherwise. */
14887 get_target_clone_attr_len (tree arglist
)
14890 int str_len_sum
= 0;
14893 for (arg
= arglist
; arg
; arg
= TREE_CHAIN (arg
))
14895 const char *str
= TREE_STRING_POINTER (TREE_VALUE (arg
));
14896 size_t len
= strlen (str
);
14897 str_len_sum
+= len
+ 1;
14898 for (const char *p
= strchr (str
, ','); p
; p
= strchr (p
+ 1, ','))
14904 return str_len_sum
;
14908 tree_cc_finalize (void)
14910 clear_nonstandard_integer_type_cache ();
14915 namespace selftest
{
14917 /* Selftests for tree. */
14919 /* Verify that integer constants are sane. */
14922 test_integer_constants ()
14924 ASSERT_TRUE (integer_type_node
!= NULL
);
14925 ASSERT_TRUE (build_int_cst (integer_type_node
, 0) != NULL
);
14927 tree type
= integer_type_node
;
14929 tree zero
= build_zero_cst (type
);
14930 ASSERT_EQ (INTEGER_CST
, TREE_CODE (zero
));
14931 ASSERT_EQ (type
, TREE_TYPE (zero
));
14933 tree one
= build_int_cst (type
, 1);
14934 ASSERT_EQ (INTEGER_CST
, TREE_CODE (one
));
14935 ASSERT_EQ (type
, TREE_TYPE (zero
));
14938 /* Verify identifiers. */
14941 test_identifiers ()
14943 tree identifier
= get_identifier ("foo");
14944 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier
));
14945 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier
));
14948 /* Verify LABEL_DECL. */
14953 tree identifier
= get_identifier ("err");
14954 tree label_decl
= build_decl (UNKNOWN_LOCATION
, LABEL_DECL
,
14955 identifier
, void_type_node
);
14956 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl
));
14957 ASSERT_FALSE (FORCED_LABEL (label_decl
));
14960 /* Return a new VECTOR_CST node whose type is TYPE and whose values
14961 are given by VALS. */
14964 build_vector (tree type
, const vec
<tree
> &vals MEM_STAT_DECL
)
14966 gcc_assert (known_eq (vals
.length (), TYPE_VECTOR_SUBPARTS (type
)));
14967 tree_vector_builder
builder (type
, vals
.length (), 1);
14968 builder
.splice (vals
);
14969 return builder
.build ();
14972 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
14975 check_vector_cst (const vec
<tree
> &expected
, tree actual
)
14977 ASSERT_KNOWN_EQ (expected
.length (),
14978 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual
)));
14979 for (unsigned int i
= 0; i
< expected
.length (); ++i
)
14980 ASSERT_EQ (wi::to_wide (expected
[i
]),
14981 wi::to_wide (vector_cst_elt (actual
, i
)));
14984 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
14985 and that its elements match EXPECTED. */
14988 check_vector_cst_duplicate (const vec
<tree
> &expected
, tree actual
,
14989 unsigned int npatterns
)
14991 ASSERT_EQ (npatterns
, VECTOR_CST_NPATTERNS (actual
));
14992 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual
));
14993 ASSERT_EQ (npatterns
, vector_cst_encoded_nelts (actual
));
14994 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual
));
14995 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual
));
14996 check_vector_cst (expected
, actual
);
14999 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
15000 and NPATTERNS background elements, and that its elements match
15004 check_vector_cst_fill (const vec
<tree
> &expected
, tree actual
,
15005 unsigned int npatterns
)
15007 ASSERT_EQ (npatterns
, VECTOR_CST_NPATTERNS (actual
));
15008 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual
));
15009 ASSERT_EQ (2 * npatterns
, vector_cst_encoded_nelts (actual
));
15010 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual
));
15011 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual
));
15012 check_vector_cst (expected
, actual
);
15015 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
15016 and that its elements match EXPECTED. */
15019 check_vector_cst_stepped (const vec
<tree
> &expected
, tree actual
,
15020 unsigned int npatterns
)
15022 ASSERT_EQ (npatterns
, VECTOR_CST_NPATTERNS (actual
));
15023 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual
));
15024 ASSERT_EQ (3 * npatterns
, vector_cst_encoded_nelts (actual
));
15025 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual
));
15026 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual
));
15027 check_vector_cst (expected
, actual
);
15030 /* Test the creation of VECTOR_CSTs. */
15033 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO
)
15035 auto_vec
<tree
, 8> elements (8);
15036 elements
.quick_grow (8);
15037 tree element_type
= build_nonstandard_integer_type (16, true);
15038 tree vector_type
= build_vector_type (element_type
, 8);
15040 /* Test a simple linear series with a base of 0 and a step of 1:
15041 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
15042 for (unsigned int i
= 0; i
< 8; ++i
)
15043 elements
[i
] = build_int_cst (element_type
, i
);
15044 tree vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15045 check_vector_cst_stepped (elements
, vector
, 1);
15047 /* Try the same with the first element replaced by 100:
15048 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
15049 elements
[0] = build_int_cst (element_type
, 100);
15050 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15051 check_vector_cst_stepped (elements
, vector
, 1);
15053 /* Try a series that wraps around.
15054 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
15055 for (unsigned int i
= 1; i
< 8; ++i
)
15056 elements
[i
] = build_int_cst (element_type
, (65530 + i
) & 0xffff);
15057 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15058 check_vector_cst_stepped (elements
, vector
, 1);
15060 /* Try a downward series:
15061 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
15062 for (unsigned int i
= 1; i
< 8; ++i
)
15063 elements
[i
] = build_int_cst (element_type
, 80 - i
);
15064 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15065 check_vector_cst_stepped (elements
, vector
, 1);
15067 /* Try two interleaved series with different bases and steps:
15068 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
15069 elements
[1] = build_int_cst (element_type
, 53);
15070 for (unsigned int i
= 2; i
< 8; i
+= 2)
15072 elements
[i
] = build_int_cst (element_type
, 70 - i
* 2);
15073 elements
[i
+ 1] = build_int_cst (element_type
, 200 + i
* 3);
15075 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15076 check_vector_cst_stepped (elements
, vector
, 2);
15078 /* Try a duplicated value:
15079 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
15080 for (unsigned int i
= 1; i
< 8; ++i
)
15081 elements
[i
] = elements
[0];
15082 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15083 check_vector_cst_duplicate (elements
, vector
, 1);
15085 /* Try an interleaved duplicated value:
15086 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
15087 elements
[1] = build_int_cst (element_type
, 55);
15088 for (unsigned int i
= 2; i
< 8; ++i
)
15089 elements
[i
] = elements
[i
- 2];
15090 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15091 check_vector_cst_duplicate (elements
, vector
, 2);
15093 /* Try a duplicated value with 2 exceptions
15094 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
15095 elements
[0] = build_int_cst (element_type
, 41);
15096 elements
[1] = build_int_cst (element_type
, 97);
15097 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15098 check_vector_cst_fill (elements
, vector
, 2);
15100 /* Try with and without a step
15101 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
15102 for (unsigned int i
= 3; i
< 8; i
+= 2)
15103 elements
[i
] = build_int_cst (element_type
, i
* 7);
15104 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15105 check_vector_cst_stepped (elements
, vector
, 2);
15107 /* Try a fully-general constant:
15108 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
15109 elements
[5] = build_int_cst (element_type
, 9990);
15110 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15111 check_vector_cst_fill (elements
, vector
, 4);
15114 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
15115 Helper function for test_location_wrappers, to deal with STRIP_NOPS
15116 modifying its argument in-place. */
15119 check_strip_nops (tree node
, tree expected
)
15122 ASSERT_EQ (expected
, node
);
15125 /* Verify location wrappers. */
15128 test_location_wrappers ()
15130 location_t loc
= BUILTINS_LOCATION
;
15132 ASSERT_EQ (NULL_TREE
, maybe_wrap_with_location (NULL_TREE
, loc
));
15134 /* Wrapping a constant. */
15135 tree int_cst
= build_int_cst (integer_type_node
, 42);
15136 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst
));
15137 ASSERT_FALSE (location_wrapper_p (int_cst
));
15139 tree wrapped_int_cst
= maybe_wrap_with_location (int_cst
, loc
);
15140 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst
));
15141 ASSERT_EQ (loc
, EXPR_LOCATION (wrapped_int_cst
));
15142 ASSERT_EQ (int_cst
, tree_strip_any_location_wrapper (wrapped_int_cst
));
15144 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
15145 ASSERT_EQ (int_cst
, maybe_wrap_with_location (int_cst
, UNKNOWN_LOCATION
));
15147 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
15148 tree cast
= build1 (NOP_EXPR
, char_type_node
, int_cst
);
15149 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast
));
15150 ASSERT_EQ (cast
, maybe_wrap_with_location (cast
, loc
));
15152 /* Wrapping a STRING_CST. */
15153 tree string_cst
= build_string (4, "foo");
15154 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst
));
15155 ASSERT_FALSE (location_wrapper_p (string_cst
));
15157 tree wrapped_string_cst
= maybe_wrap_with_location (string_cst
, loc
);
15158 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst
));
15159 ASSERT_EQ (VIEW_CONVERT_EXPR
, TREE_CODE (wrapped_string_cst
));
15160 ASSERT_EQ (loc
, EXPR_LOCATION (wrapped_string_cst
));
15161 ASSERT_EQ (string_cst
, tree_strip_any_location_wrapper (wrapped_string_cst
));
15164 /* Wrapping a variable. */
15165 tree int_var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
,
15166 get_identifier ("some_int_var"),
15167 integer_type_node
);
15168 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var
));
15169 ASSERT_FALSE (location_wrapper_p (int_var
));
15171 tree wrapped_int_var
= maybe_wrap_with_location (int_var
, loc
);
15172 ASSERT_TRUE (location_wrapper_p (wrapped_int_var
));
15173 ASSERT_EQ (loc
, EXPR_LOCATION (wrapped_int_var
));
15174 ASSERT_EQ (int_var
, tree_strip_any_location_wrapper (wrapped_int_var
));
15176 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
15178 tree r_cast
= build1 (NON_LVALUE_EXPR
, integer_type_node
, int_var
);
15179 ASSERT_FALSE (location_wrapper_p (r_cast
));
15180 ASSERT_EQ (r_cast
, tree_strip_any_location_wrapper (r_cast
));
15182 /* Verify that STRIP_NOPS removes wrappers. */
15183 check_strip_nops (wrapped_int_cst
, int_cst
);
15184 check_strip_nops (wrapped_string_cst
, string_cst
);
15185 check_strip_nops (wrapped_int_var
, int_var
);
15188 /* Test various tree predicates. Verify that location wrappers don't
15189 affect the results. */
15194 /* Build various constants and wrappers around them. */
15196 location_t loc
= BUILTINS_LOCATION
;
15198 tree i_0
= build_int_cst (integer_type_node
, 0);
15199 tree wr_i_0
= maybe_wrap_with_location (i_0
, loc
);
15201 tree i_1
= build_int_cst (integer_type_node
, 1);
15202 tree wr_i_1
= maybe_wrap_with_location (i_1
, loc
);
15204 tree i_m1
= build_int_cst (integer_type_node
, -1);
15205 tree wr_i_m1
= maybe_wrap_with_location (i_m1
, loc
);
15207 tree f_0
= build_real_from_int_cst (float_type_node
, i_0
);
15208 tree wr_f_0
= maybe_wrap_with_location (f_0
, loc
);
15209 tree f_1
= build_real_from_int_cst (float_type_node
, i_1
);
15210 tree wr_f_1
= maybe_wrap_with_location (f_1
, loc
);
15211 tree f_m1
= build_real_from_int_cst (float_type_node
, i_m1
);
15212 tree wr_f_m1
= maybe_wrap_with_location (f_m1
, loc
);
15214 tree c_i_0
= build_complex (NULL_TREE
, i_0
, i_0
);
15215 tree c_i_1
= build_complex (NULL_TREE
, i_1
, i_0
);
15216 tree c_i_m1
= build_complex (NULL_TREE
, i_m1
, i_0
);
15218 tree c_f_0
= build_complex (NULL_TREE
, f_0
, f_0
);
15219 tree c_f_1
= build_complex (NULL_TREE
, f_1
, f_0
);
15220 tree c_f_m1
= build_complex (NULL_TREE
, f_m1
, f_0
);
15222 /* TODO: vector constants. */
15224 /* Test integer_onep. */
15225 ASSERT_FALSE (integer_onep (i_0
));
15226 ASSERT_FALSE (integer_onep (wr_i_0
));
15227 ASSERT_TRUE (integer_onep (i_1
));
15228 ASSERT_TRUE (integer_onep (wr_i_1
));
15229 ASSERT_FALSE (integer_onep (i_m1
));
15230 ASSERT_FALSE (integer_onep (wr_i_m1
));
15231 ASSERT_FALSE (integer_onep (f_0
));
15232 ASSERT_FALSE (integer_onep (wr_f_0
));
15233 ASSERT_FALSE (integer_onep (f_1
));
15234 ASSERT_FALSE (integer_onep (wr_f_1
));
15235 ASSERT_FALSE (integer_onep (f_m1
));
15236 ASSERT_FALSE (integer_onep (wr_f_m1
));
15237 ASSERT_FALSE (integer_onep (c_i_0
));
15238 ASSERT_TRUE (integer_onep (c_i_1
));
15239 ASSERT_FALSE (integer_onep (c_i_m1
));
15240 ASSERT_FALSE (integer_onep (c_f_0
));
15241 ASSERT_FALSE (integer_onep (c_f_1
));
15242 ASSERT_FALSE (integer_onep (c_f_m1
));
15244 /* Test integer_zerop. */
15245 ASSERT_TRUE (integer_zerop (i_0
));
15246 ASSERT_TRUE (integer_zerop (wr_i_0
));
15247 ASSERT_FALSE (integer_zerop (i_1
));
15248 ASSERT_FALSE (integer_zerop (wr_i_1
));
15249 ASSERT_FALSE (integer_zerop (i_m1
));
15250 ASSERT_FALSE (integer_zerop (wr_i_m1
));
15251 ASSERT_FALSE (integer_zerop (f_0
));
15252 ASSERT_FALSE (integer_zerop (wr_f_0
));
15253 ASSERT_FALSE (integer_zerop (f_1
));
15254 ASSERT_FALSE (integer_zerop (wr_f_1
));
15255 ASSERT_FALSE (integer_zerop (f_m1
));
15256 ASSERT_FALSE (integer_zerop (wr_f_m1
));
15257 ASSERT_TRUE (integer_zerop (c_i_0
));
15258 ASSERT_FALSE (integer_zerop (c_i_1
));
15259 ASSERT_FALSE (integer_zerop (c_i_m1
));
15260 ASSERT_FALSE (integer_zerop (c_f_0
));
15261 ASSERT_FALSE (integer_zerop (c_f_1
));
15262 ASSERT_FALSE (integer_zerop (c_f_m1
));
15264 /* Test integer_all_onesp. */
15265 ASSERT_FALSE (integer_all_onesp (i_0
));
15266 ASSERT_FALSE (integer_all_onesp (wr_i_0
));
15267 ASSERT_FALSE (integer_all_onesp (i_1
));
15268 ASSERT_FALSE (integer_all_onesp (wr_i_1
));
15269 ASSERT_TRUE (integer_all_onesp (i_m1
));
15270 ASSERT_TRUE (integer_all_onesp (wr_i_m1
));
15271 ASSERT_FALSE (integer_all_onesp (f_0
));
15272 ASSERT_FALSE (integer_all_onesp (wr_f_0
));
15273 ASSERT_FALSE (integer_all_onesp (f_1
));
15274 ASSERT_FALSE (integer_all_onesp (wr_f_1
));
15275 ASSERT_FALSE (integer_all_onesp (f_m1
));
15276 ASSERT_FALSE (integer_all_onesp (wr_f_m1
));
15277 ASSERT_FALSE (integer_all_onesp (c_i_0
));
15278 ASSERT_FALSE (integer_all_onesp (c_i_1
));
15279 ASSERT_FALSE (integer_all_onesp (c_i_m1
));
15280 ASSERT_FALSE (integer_all_onesp (c_f_0
));
15281 ASSERT_FALSE (integer_all_onesp (c_f_1
));
15282 ASSERT_FALSE (integer_all_onesp (c_f_m1
));
15284 /* Test integer_minus_onep. */
15285 ASSERT_FALSE (integer_minus_onep (i_0
));
15286 ASSERT_FALSE (integer_minus_onep (wr_i_0
));
15287 ASSERT_FALSE (integer_minus_onep (i_1
));
15288 ASSERT_FALSE (integer_minus_onep (wr_i_1
));
15289 ASSERT_TRUE (integer_minus_onep (i_m1
));
15290 ASSERT_TRUE (integer_minus_onep (wr_i_m1
));
15291 ASSERT_FALSE (integer_minus_onep (f_0
));
15292 ASSERT_FALSE (integer_minus_onep (wr_f_0
));
15293 ASSERT_FALSE (integer_minus_onep (f_1
));
15294 ASSERT_FALSE (integer_minus_onep (wr_f_1
));
15295 ASSERT_FALSE (integer_minus_onep (f_m1
));
15296 ASSERT_FALSE (integer_minus_onep (wr_f_m1
));
15297 ASSERT_FALSE (integer_minus_onep (c_i_0
));
15298 ASSERT_FALSE (integer_minus_onep (c_i_1
));
15299 ASSERT_TRUE (integer_minus_onep (c_i_m1
));
15300 ASSERT_FALSE (integer_minus_onep (c_f_0
));
15301 ASSERT_FALSE (integer_minus_onep (c_f_1
));
15302 ASSERT_FALSE (integer_minus_onep (c_f_m1
));
15304 /* Test integer_each_onep. */
15305 ASSERT_FALSE (integer_each_onep (i_0
));
15306 ASSERT_FALSE (integer_each_onep (wr_i_0
));
15307 ASSERT_TRUE (integer_each_onep (i_1
));
15308 ASSERT_TRUE (integer_each_onep (wr_i_1
));
15309 ASSERT_FALSE (integer_each_onep (i_m1
));
15310 ASSERT_FALSE (integer_each_onep (wr_i_m1
));
15311 ASSERT_FALSE (integer_each_onep (f_0
));
15312 ASSERT_FALSE (integer_each_onep (wr_f_0
));
15313 ASSERT_FALSE (integer_each_onep (f_1
));
15314 ASSERT_FALSE (integer_each_onep (wr_f_1
));
15315 ASSERT_FALSE (integer_each_onep (f_m1
));
15316 ASSERT_FALSE (integer_each_onep (wr_f_m1
));
15317 ASSERT_FALSE (integer_each_onep (c_i_0
));
15318 ASSERT_FALSE (integer_each_onep (c_i_1
));
15319 ASSERT_FALSE (integer_each_onep (c_i_m1
));
15320 ASSERT_FALSE (integer_each_onep (c_f_0
));
15321 ASSERT_FALSE (integer_each_onep (c_f_1
));
15322 ASSERT_FALSE (integer_each_onep (c_f_m1
));
15324 /* Test integer_truep. */
15325 ASSERT_FALSE (integer_truep (i_0
));
15326 ASSERT_FALSE (integer_truep (wr_i_0
));
15327 ASSERT_TRUE (integer_truep (i_1
));
15328 ASSERT_TRUE (integer_truep (wr_i_1
));
15329 ASSERT_FALSE (integer_truep (i_m1
));
15330 ASSERT_FALSE (integer_truep (wr_i_m1
));
15331 ASSERT_FALSE (integer_truep (f_0
));
15332 ASSERT_FALSE (integer_truep (wr_f_0
));
15333 ASSERT_FALSE (integer_truep (f_1
));
15334 ASSERT_FALSE (integer_truep (wr_f_1
));
15335 ASSERT_FALSE (integer_truep (f_m1
));
15336 ASSERT_FALSE (integer_truep (wr_f_m1
));
15337 ASSERT_FALSE (integer_truep (c_i_0
));
15338 ASSERT_TRUE (integer_truep (c_i_1
));
15339 ASSERT_FALSE (integer_truep (c_i_m1
));
15340 ASSERT_FALSE (integer_truep (c_f_0
));
15341 ASSERT_FALSE (integer_truep (c_f_1
));
15342 ASSERT_FALSE (integer_truep (c_f_m1
));
15344 /* Test integer_nonzerop. */
15345 ASSERT_FALSE (integer_nonzerop (i_0
));
15346 ASSERT_FALSE (integer_nonzerop (wr_i_0
));
15347 ASSERT_TRUE (integer_nonzerop (i_1
));
15348 ASSERT_TRUE (integer_nonzerop (wr_i_1
));
15349 ASSERT_TRUE (integer_nonzerop (i_m1
));
15350 ASSERT_TRUE (integer_nonzerop (wr_i_m1
));
15351 ASSERT_FALSE (integer_nonzerop (f_0
));
15352 ASSERT_FALSE (integer_nonzerop (wr_f_0
));
15353 ASSERT_FALSE (integer_nonzerop (f_1
));
15354 ASSERT_FALSE (integer_nonzerop (wr_f_1
));
15355 ASSERT_FALSE (integer_nonzerop (f_m1
));
15356 ASSERT_FALSE (integer_nonzerop (wr_f_m1
));
15357 ASSERT_FALSE (integer_nonzerop (c_i_0
));
15358 ASSERT_TRUE (integer_nonzerop (c_i_1
));
15359 ASSERT_TRUE (integer_nonzerop (c_i_m1
));
15360 ASSERT_FALSE (integer_nonzerop (c_f_0
));
15361 ASSERT_FALSE (integer_nonzerop (c_f_1
));
15362 ASSERT_FALSE (integer_nonzerop (c_f_m1
));
15364 /* Test real_zerop. */
15365 ASSERT_FALSE (real_zerop (i_0
));
15366 ASSERT_FALSE (real_zerop (wr_i_0
));
15367 ASSERT_FALSE (real_zerop (i_1
));
15368 ASSERT_FALSE (real_zerop (wr_i_1
));
15369 ASSERT_FALSE (real_zerop (i_m1
));
15370 ASSERT_FALSE (real_zerop (wr_i_m1
));
15371 ASSERT_TRUE (real_zerop (f_0
));
15372 ASSERT_TRUE (real_zerop (wr_f_0
));
15373 ASSERT_FALSE (real_zerop (f_1
));
15374 ASSERT_FALSE (real_zerop (wr_f_1
));
15375 ASSERT_FALSE (real_zerop (f_m1
));
15376 ASSERT_FALSE (real_zerop (wr_f_m1
));
15377 ASSERT_FALSE (real_zerop (c_i_0
));
15378 ASSERT_FALSE (real_zerop (c_i_1
));
15379 ASSERT_FALSE (real_zerop (c_i_m1
));
15380 ASSERT_TRUE (real_zerop (c_f_0
));
15381 ASSERT_FALSE (real_zerop (c_f_1
));
15382 ASSERT_FALSE (real_zerop (c_f_m1
));
15384 /* Test real_onep. */
15385 ASSERT_FALSE (real_onep (i_0
));
15386 ASSERT_FALSE (real_onep (wr_i_0
));
15387 ASSERT_FALSE (real_onep (i_1
));
15388 ASSERT_FALSE (real_onep (wr_i_1
));
15389 ASSERT_FALSE (real_onep (i_m1
));
15390 ASSERT_FALSE (real_onep (wr_i_m1
));
15391 ASSERT_FALSE (real_onep (f_0
));
15392 ASSERT_FALSE (real_onep (wr_f_0
));
15393 ASSERT_TRUE (real_onep (f_1
));
15394 ASSERT_TRUE (real_onep (wr_f_1
));
15395 ASSERT_FALSE (real_onep (f_m1
));
15396 ASSERT_FALSE (real_onep (wr_f_m1
));
15397 ASSERT_FALSE (real_onep (c_i_0
));
15398 ASSERT_FALSE (real_onep (c_i_1
));
15399 ASSERT_FALSE (real_onep (c_i_m1
));
15400 ASSERT_FALSE (real_onep (c_f_0
));
15401 ASSERT_TRUE (real_onep (c_f_1
));
15402 ASSERT_FALSE (real_onep (c_f_m1
));
15404 /* Test real_minus_onep. */
15405 ASSERT_FALSE (real_minus_onep (i_0
));
15406 ASSERT_FALSE (real_minus_onep (wr_i_0
));
15407 ASSERT_FALSE (real_minus_onep (i_1
));
15408 ASSERT_FALSE (real_minus_onep (wr_i_1
));
15409 ASSERT_FALSE (real_minus_onep (i_m1
));
15410 ASSERT_FALSE (real_minus_onep (wr_i_m1
));
15411 ASSERT_FALSE (real_minus_onep (f_0
));
15412 ASSERT_FALSE (real_minus_onep (wr_f_0
));
15413 ASSERT_FALSE (real_minus_onep (f_1
));
15414 ASSERT_FALSE (real_minus_onep (wr_f_1
));
15415 ASSERT_TRUE (real_minus_onep (f_m1
));
15416 ASSERT_TRUE (real_minus_onep (wr_f_m1
));
15417 ASSERT_FALSE (real_minus_onep (c_i_0
));
15418 ASSERT_FALSE (real_minus_onep (c_i_1
));
15419 ASSERT_FALSE (real_minus_onep (c_i_m1
));
15420 ASSERT_FALSE (real_minus_onep (c_f_0
));
15421 ASSERT_FALSE (real_minus_onep (c_f_1
));
15422 ASSERT_TRUE (real_minus_onep (c_f_m1
));
15425 ASSERT_TRUE (zerop (i_0
));
15426 ASSERT_TRUE (zerop (wr_i_0
));
15427 ASSERT_FALSE (zerop (i_1
));
15428 ASSERT_FALSE (zerop (wr_i_1
));
15429 ASSERT_FALSE (zerop (i_m1
));
15430 ASSERT_FALSE (zerop (wr_i_m1
));
15431 ASSERT_TRUE (zerop (f_0
));
15432 ASSERT_TRUE (zerop (wr_f_0
));
15433 ASSERT_FALSE (zerop (f_1
));
15434 ASSERT_FALSE (zerop (wr_f_1
));
15435 ASSERT_FALSE (zerop (f_m1
));
15436 ASSERT_FALSE (zerop (wr_f_m1
));
15437 ASSERT_TRUE (zerop (c_i_0
));
15438 ASSERT_FALSE (zerop (c_i_1
));
15439 ASSERT_FALSE (zerop (c_i_m1
));
15440 ASSERT_TRUE (zerop (c_f_0
));
15441 ASSERT_FALSE (zerop (c_f_1
));
15442 ASSERT_FALSE (zerop (c_f_m1
));
15444 /* Test tree_expr_nonnegative_p. */
15445 ASSERT_TRUE (tree_expr_nonnegative_p (i_0
));
15446 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0
));
15447 ASSERT_TRUE (tree_expr_nonnegative_p (i_1
));
15448 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1
));
15449 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1
));
15450 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1
));
15451 ASSERT_TRUE (tree_expr_nonnegative_p (f_0
));
15452 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0
));
15453 ASSERT_TRUE (tree_expr_nonnegative_p (f_1
));
15454 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1
));
15455 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1
));
15456 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1
));
15457 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0
));
15458 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1
));
15459 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1
));
15460 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0
));
15461 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1
));
15462 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1
));
15464 /* Test tree_expr_nonzero_p. */
15465 ASSERT_FALSE (tree_expr_nonzero_p (i_0
));
15466 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0
));
15467 ASSERT_TRUE (tree_expr_nonzero_p (i_1
));
15468 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1
));
15469 ASSERT_TRUE (tree_expr_nonzero_p (i_m1
));
15470 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1
));
15472 /* Test integer_valued_real_p. */
15473 ASSERT_FALSE (integer_valued_real_p (i_0
));
15474 ASSERT_TRUE (integer_valued_real_p (f_0
));
15475 ASSERT_TRUE (integer_valued_real_p (wr_f_0
));
15476 ASSERT_TRUE (integer_valued_real_p (f_1
));
15477 ASSERT_TRUE (integer_valued_real_p (wr_f_1
));
15479 /* Test integer_pow2p. */
15480 ASSERT_FALSE (integer_pow2p (i_0
));
15481 ASSERT_TRUE (integer_pow2p (i_1
));
15482 ASSERT_TRUE (integer_pow2p (wr_i_1
));
15484 /* Test uniform_integer_cst_p. */
15485 ASSERT_TRUE (uniform_integer_cst_p (i_0
));
15486 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0
));
15487 ASSERT_TRUE (uniform_integer_cst_p (i_1
));
15488 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1
));
15489 ASSERT_TRUE (uniform_integer_cst_p (i_m1
));
15490 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1
));
15491 ASSERT_FALSE (uniform_integer_cst_p (f_0
));
15492 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0
));
15493 ASSERT_FALSE (uniform_integer_cst_p (f_1
));
15494 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1
));
15495 ASSERT_FALSE (uniform_integer_cst_p (f_m1
));
15496 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1
));
15497 ASSERT_FALSE (uniform_integer_cst_p (c_i_0
));
15498 ASSERT_FALSE (uniform_integer_cst_p (c_i_1
));
15499 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1
));
15500 ASSERT_FALSE (uniform_integer_cst_p (c_f_0
));
15501 ASSERT_FALSE (uniform_integer_cst_p (c_f_1
));
15502 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1
));
15505 /* Check that string escaping works correctly. */
15508 test_escaped_strings (void)
15511 escaped_string msg
;
15514 /* ASSERT_STREQ does not accept NULL as a valid test
15515 result, so we have to use ASSERT_EQ instead. */
15516 ASSERT_EQ (NULL
, (const char *) msg
);
15519 ASSERT_STREQ ("", (const char *) msg
);
15521 msg
.escape ("foobar");
15522 ASSERT_STREQ ("foobar", (const char *) msg
);
15524 /* Ensure that we have -fmessage-length set to 0. */
15525 saved_cutoff
= pp_line_cutoff (global_dc
->printer
);
15526 pp_line_cutoff (global_dc
->printer
) = 0;
15528 msg
.escape ("foo\nbar");
15529 ASSERT_STREQ ("foo\\nbar", (const char *) msg
);
15531 msg
.escape ("\a\b\f\n\r\t\v");
15532 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg
);
15534 /* Now repeat the tests with -fmessage-length set to 5. */
15535 pp_line_cutoff (global_dc
->printer
) = 5;
15537 /* Note that the newline is not translated into an escape. */
15538 msg
.escape ("foo\nbar");
15539 ASSERT_STREQ ("foo\nbar", (const char *) msg
);
15541 msg
.escape ("\a\b\f\n\r\t\v");
15542 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg
);
15544 /* Restore the original message length setting. */
15545 pp_line_cutoff (global_dc
->printer
) = saved_cutoff
;
15548 /* Run all of the selftests within this file. */
15553 test_integer_constants ();
15554 test_identifiers ();
15556 test_vector_cst_patterns ();
15557 test_location_wrappers ();
15558 test_predicates ();
15559 test_escaped_strings ();
15562 } // namespace selftest
15564 #endif /* CHECKING_P */
15566 #include "gt-tree.h"