1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2024 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
32 #include "coretypes.h"
37 #include "tree-pass.h"
40 #include "diagnostic.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
47 #include "toplev.h" /* get_random_seed */
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
57 #include "langhooks-def.h"
58 #include "tree-diagnostic.h"
61 #include "print-tree.h"
62 #include "ipa-utils.h"
64 #include "stringpool.h"
68 #include "tree-vector-builder.h"
69 #include "gimple-fold.h"
70 #include "escaped_string.h"
71 #include "gimple-range.h"
72 #include "gomp-constants.h"
77 /* Names of tree components.
78 Used for printing out the tree and error messages. */
79 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
80 #define END_OF_BASE_TREE_CODES "@dummy",
82 static const char *const tree_code_name
[] = {
83 #include "all-tree.def"
87 #undef END_OF_BASE_TREE_CODES
89 /* Each tree code class has an associated string representation.
90 These must correspond to the tree_code_class entries. */
92 const char *const tree_code_class_strings
[] =
107 /* obstack.[ch] explicitly declined to prototype this. */
108 extern int _obstack_allocated_p (struct obstack
*h
, void *obj
);
110 /* Statistics-gathering stuff. */
112 static uint64_t tree_code_counts
[MAX_TREE_CODES
];
113 uint64_t tree_node_counts
[(int) all_kinds
];
114 uint64_t tree_node_sizes
[(int) all_kinds
];
116 /* Keep in sync with tree.h:enum tree_node_kind. */
117 static const char * const tree_node_kind_names
[] = {
136 /* Unique id for next decl created. */
137 static GTY(()) int next_decl_uid
;
138 /* Unique id for next type created. */
139 static GTY(()) unsigned next_type_uid
= 1;
140 /* Unique id for next debug decl created. Use negative numbers,
141 to catch erroneous uses. */
142 static GTY(()) int next_debug_decl_uid
;
144 /* Since we cannot rehash a type after it is in the table, we have to
145 keep the hash code. */
147 struct GTY((for_user
)) type_hash
{
152 /* Initial size of the hash table (rounded to next prime). */
153 #define TYPE_HASH_INITIAL_SIZE 1000
155 struct type_cache_hasher
: ggc_cache_ptr_hash
<type_hash
>
157 static hashval_t
hash (type_hash
*t
) { return t
->hash
; }
158 static bool equal (type_hash
*a
, type_hash
*b
);
161 keep_cache_entry (type_hash
*&t
)
163 return ggc_marked_p (t
->type
);
167 /* Now here is the hash table. When recording a type, it is added to
168 the slot whose index is the hash code. Note that the hash table is
169 used for several kinds of types (function types, array types and
170 array index range types, for now). While all these live in the
171 same table, they are completely independent, and the hash code is
172 computed differently for each of these. */
174 static GTY ((cache
)) hash_table
<type_cache_hasher
> *type_hash_table
;
176 /* Hash table and temporary node for larger integer const values. */
177 static GTY (()) tree int_cst_node
;
179 struct int_cst_hasher
: ggc_cache_ptr_hash
<tree_node
>
181 static hashval_t
hash (tree t
);
182 static bool equal (tree x
, tree y
);
185 static GTY ((cache
)) hash_table
<int_cst_hasher
> *int_cst_hash_table
;
187 /* Class and variable for making sure that there is a single POLY_INT_CST
188 for a given value. */
189 struct poly_int_cst_hasher
: ggc_cache_ptr_hash
<tree_node
>
191 typedef std::pair
<tree
, const poly_wide_int
*> compare_type
;
192 static hashval_t
hash (tree t
);
193 static bool equal (tree x
, const compare_type
&y
);
196 static GTY ((cache
)) hash_table
<poly_int_cst_hasher
> *poly_int_cst_hash_table
;
198 /* Hash table for optimization flags and target option flags. Use the same
199 hash table for both sets of options. Nodes for building the current
200 optimization and target option nodes. The assumption is most of the time
201 the options created will already be in the hash table, so we avoid
202 allocating and freeing up a node repeatably. */
203 static GTY (()) tree cl_optimization_node
;
204 static GTY (()) tree cl_target_option_node
;
206 struct cl_option_hasher
: ggc_cache_ptr_hash
<tree_node
>
208 static hashval_t
hash (tree t
);
209 static bool equal (tree x
, tree y
);
212 static GTY ((cache
)) hash_table
<cl_option_hasher
> *cl_option_hash_table
;
214 /* General tree->tree mapping structure for use in hash tables. */
218 hash_table
<tree_decl_map_cache_hasher
> *debug_expr_for_decl
;
221 hash_table
<tree_decl_map_cache_hasher
> *value_expr_for_decl
;
224 hash_table
<tree_vec_map_cache_hasher
> *debug_args_for_decl
;
226 static void set_type_quals (tree
, int);
227 static void print_type_hash_statistics (void);
228 static void print_debug_expr_statistics (void);
229 static void print_value_expr_statistics (void);
231 tree global_trees
[TI_MAX
];
232 tree integer_types
[itk_none
];
234 bool int_n_enabled_p
[NUM_INT_N_ENTS
];
235 struct int_n_trees_t int_n_trees
[NUM_INT_N_ENTS
];
237 bool tree_contains_struct
[MAX_TREE_CODES
][64];
239 /* Number of operands for each OMP clause. */
240 unsigned const char omp_clause_num_ops
[] =
242 0, /* OMP_CLAUSE_ERROR */
243 1, /* OMP_CLAUSE_PRIVATE */
244 1, /* OMP_CLAUSE_SHARED */
245 1, /* OMP_CLAUSE_FIRSTPRIVATE */
246 2, /* OMP_CLAUSE_LASTPRIVATE */
247 5, /* OMP_CLAUSE_REDUCTION */
248 5, /* OMP_CLAUSE_TASK_REDUCTION */
249 5, /* OMP_CLAUSE_IN_REDUCTION */
250 1, /* OMP_CLAUSE_COPYIN */
251 1, /* OMP_CLAUSE_COPYPRIVATE */
252 3, /* OMP_CLAUSE_LINEAR */
253 1, /* OMP_CLAUSE_AFFINITY */
254 2, /* OMP_CLAUSE_ALIGNED */
255 3, /* OMP_CLAUSE_ALLOCATE */
256 1, /* OMP_CLAUSE_DEPEND */
257 1, /* OMP_CLAUSE_NONTEMPORAL */
258 1, /* OMP_CLAUSE_UNIFORM */
259 1, /* OMP_CLAUSE_ENTER */
260 1, /* OMP_CLAUSE_LINK */
261 1, /* OMP_CLAUSE_DETACH */
262 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
263 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
264 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
265 1, /* OMP_CLAUSE_INCLUSIVE */
266 1, /* OMP_CLAUSE_EXCLUSIVE */
267 2, /* OMP_CLAUSE_FROM */
268 2, /* OMP_CLAUSE_TO */
269 2, /* OMP_CLAUSE_MAP */
270 1, /* OMP_CLAUSE_HAS_DEVICE_ADDR */
271 1, /* OMP_CLAUSE_DOACROSS */
272 2, /* OMP_CLAUSE__CACHE_ */
273 2, /* OMP_CLAUSE_GANG */
274 1, /* OMP_CLAUSE_ASYNC */
275 1, /* OMP_CLAUSE_WAIT */
276 0, /* OMP_CLAUSE_AUTO */
277 0, /* OMP_CLAUSE_SEQ */
278 1, /* OMP_CLAUSE__LOOPTEMP_ */
279 1, /* OMP_CLAUSE__REDUCTEMP_ */
280 1, /* OMP_CLAUSE__CONDTEMP_ */
281 1, /* OMP_CLAUSE__SCANTEMP_ */
282 1, /* OMP_CLAUSE_IF */
283 1, /* OMP_CLAUSE_SELF */
284 1, /* OMP_CLAUSE_NUM_THREADS */
285 1, /* OMP_CLAUSE_SCHEDULE */
286 0, /* OMP_CLAUSE_NOWAIT */
287 1, /* OMP_CLAUSE_ORDERED */
288 0, /* OMP_CLAUSE_DEFAULT */
289 3, /* OMP_CLAUSE_COLLAPSE */
290 0, /* OMP_CLAUSE_UNTIED */
291 1, /* OMP_CLAUSE_FINAL */
292 0, /* OMP_CLAUSE_MERGEABLE */
293 1, /* OMP_CLAUSE_DEVICE */
294 1, /* OMP_CLAUSE_DIST_SCHEDULE */
295 0, /* OMP_CLAUSE_INBRANCH */
296 0, /* OMP_CLAUSE_NOTINBRANCH */
297 2, /* OMP_CLAUSE_NUM_TEAMS */
298 1, /* OMP_CLAUSE_THREAD_LIMIT */
299 0, /* OMP_CLAUSE_PROC_BIND */
300 1, /* OMP_CLAUSE_SAFELEN */
301 1, /* OMP_CLAUSE_SIMDLEN */
302 0, /* OMP_CLAUSE_DEVICE_TYPE */
303 0, /* OMP_CLAUSE_FOR */
304 0, /* OMP_CLAUSE_PARALLEL */
305 0, /* OMP_CLAUSE_SECTIONS */
306 0, /* OMP_CLAUSE_TASKGROUP */
307 1, /* OMP_CLAUSE_PRIORITY */
308 1, /* OMP_CLAUSE_GRAINSIZE */
309 1, /* OMP_CLAUSE_NUM_TASKS */
310 0, /* OMP_CLAUSE_NOGROUP */
311 0, /* OMP_CLAUSE_THREADS */
312 0, /* OMP_CLAUSE_SIMD */
313 1, /* OMP_CLAUSE_HINT */
314 0, /* OMP_CLAUSE_DEFAULTMAP */
315 0, /* OMP_CLAUSE_ORDER */
316 0, /* OMP_CLAUSE_BIND */
317 1, /* OMP_CLAUSE_FILTER */
318 1, /* OMP_CLAUSE_INDIRECT */
319 1, /* OMP_CLAUSE__SIMDUID_ */
320 0, /* OMP_CLAUSE__SIMT_ */
321 0, /* OMP_CLAUSE_INDEPENDENT */
322 1, /* OMP_CLAUSE_WORKER */
323 1, /* OMP_CLAUSE_VECTOR */
324 1, /* OMP_CLAUSE_NUM_GANGS */
325 1, /* OMP_CLAUSE_NUM_WORKERS */
326 1, /* OMP_CLAUSE_VECTOR_LENGTH */
327 3, /* OMP_CLAUSE_TILE */
328 0, /* OMP_CLAUSE_IF_PRESENT */
329 0, /* OMP_CLAUSE_FINALIZE */
330 0, /* OMP_CLAUSE_NOHOST */
333 const char * const omp_clause_code_name
[] =
426 /* Unless specific to OpenACC, we tend to internally maintain OpenMP-centric
427 clause names, but for use in diagnostics etc. would like to use the "user"
431 user_omp_clause_code_name (tree clause
, bool oacc
)
433 /* For OpenACC, the 'OMP_CLAUSE_MAP_KIND' of an 'OMP_CLAUSE_MAP' is used to
434 distinguish clauses as seen by the user. See also where front ends do
435 'build_omp_clause' with 'OMP_CLAUSE_MAP'. */
436 if (oacc
&& OMP_CLAUSE_CODE (clause
) == OMP_CLAUSE_MAP
)
437 switch (OMP_CLAUSE_MAP_KIND (clause
))
439 case GOMP_MAP_FORCE_ALLOC
:
440 case GOMP_MAP_ALLOC
: return "create";
441 case GOMP_MAP_FORCE_TO
:
442 case GOMP_MAP_TO
: return "copyin";
443 case GOMP_MAP_FORCE_FROM
:
444 case GOMP_MAP_FROM
: return "copyout";
445 case GOMP_MAP_FORCE_TOFROM
:
446 case GOMP_MAP_TOFROM
: return "copy";
447 case GOMP_MAP_RELEASE
: return "delete";
448 case GOMP_MAP_FORCE_PRESENT
: return "present";
449 case GOMP_MAP_ATTACH
: return "attach";
450 case GOMP_MAP_FORCE_DETACH
:
451 case GOMP_MAP_DETACH
: return "detach";
452 case GOMP_MAP_DEVICE_RESIDENT
: return "device_resident";
453 case GOMP_MAP_LINK
: return "link";
454 case GOMP_MAP_FORCE_DEVICEPTR
: return "deviceptr";
458 return omp_clause_code_name
[OMP_CLAUSE_CODE (clause
)];
462 /* Return the tree node structure used by tree code CODE. */
464 static inline enum tree_node_structure_enum
465 tree_node_structure_for_code (enum tree_code code
)
467 switch (TREE_CODE_CLASS (code
))
469 case tcc_declaration
:
472 case CONST_DECL
: return TS_CONST_DECL
;
473 case DEBUG_EXPR_DECL
: return TS_DECL_WRTL
;
474 case FIELD_DECL
: return TS_FIELD_DECL
;
475 case FUNCTION_DECL
: return TS_FUNCTION_DECL
;
476 case LABEL_DECL
: return TS_LABEL_DECL
;
477 case PARM_DECL
: return TS_PARM_DECL
;
478 case RESULT_DECL
: return TS_RESULT_DECL
;
479 case TRANSLATION_UNIT_DECL
: return TS_TRANSLATION_UNIT_DECL
;
480 case TYPE_DECL
: return TS_TYPE_DECL
;
481 case VAR_DECL
: return TS_VAR_DECL
;
482 default: return TS_DECL_NON_COMMON
;
485 case tcc_type
: return TS_TYPE_NON_COMMON
;
493 case tcc_vl_exp
: return TS_EXP
;
495 default: /* tcc_constant and tcc_exceptional */
501 /* tcc_constant cases. */
502 case COMPLEX_CST
: return TS_COMPLEX
;
503 case FIXED_CST
: return TS_FIXED_CST
;
504 case INTEGER_CST
: return TS_INT_CST
;
505 case POLY_INT_CST
: return TS_POLY_INT_CST
;
506 case REAL_CST
: return TS_REAL_CST
;
507 case STRING_CST
: return TS_STRING
;
508 case VECTOR_CST
: return TS_VECTOR
;
509 case VOID_CST
: return TS_TYPED
;
511 /* tcc_exceptional cases. */
512 case BLOCK
: return TS_BLOCK
;
513 case CONSTRUCTOR
: return TS_CONSTRUCTOR
;
514 case ERROR_MARK
: return TS_COMMON
;
515 case IDENTIFIER_NODE
: return TS_IDENTIFIER
;
516 case OMP_CLAUSE
: return TS_OMP_CLAUSE
;
517 case OPTIMIZATION_NODE
: return TS_OPTIMIZATION
;
518 case PLACEHOLDER_EXPR
: return TS_COMMON
;
519 case SSA_NAME
: return TS_SSA_NAME
;
520 case STATEMENT_LIST
: return TS_STATEMENT_LIST
;
521 case TARGET_OPTION_NODE
: return TS_TARGET_OPTION
;
522 case TREE_BINFO
: return TS_BINFO
;
523 case TREE_LIST
: return TS_LIST
;
524 case TREE_VEC
: return TS_VEC
;
532 /* Initialize tree_contains_struct to describe the hierarchy of tree
536 initialize_tree_contains_struct (void)
540 for (i
= ERROR_MARK
; i
< LAST_AND_UNUSED_TREE_CODE
; i
++)
543 enum tree_node_structure_enum ts_code
;
545 code
= (enum tree_code
) i
;
546 ts_code
= tree_node_structure_for_code (code
);
548 /* Mark the TS structure itself. */
549 tree_contains_struct
[code
][ts_code
] = 1;
551 /* Mark all the structures that TS is derived from. */
556 case TS_OPTIMIZATION
:
557 case TS_TARGET_OPTION
:
563 case TS_POLY_INT_CST
:
572 case TS_STATEMENT_LIST
:
573 MARK_TS_TYPED (code
);
577 case TS_DECL_MINIMAL
:
583 MARK_TS_COMMON (code
);
586 case TS_TYPE_WITH_LANG_SPECIFIC
:
587 MARK_TS_TYPE_COMMON (code
);
590 case TS_TYPE_NON_COMMON
:
591 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code
);
595 MARK_TS_DECL_MINIMAL (code
);
600 MARK_TS_DECL_COMMON (code
);
603 case TS_DECL_NON_COMMON
:
604 MARK_TS_DECL_WITH_VIS (code
);
607 case TS_DECL_WITH_VIS
:
611 MARK_TS_DECL_WRTL (code
);
615 MARK_TS_DECL_COMMON (code
);
619 MARK_TS_DECL_WITH_VIS (code
);
623 case TS_FUNCTION_DECL
:
624 MARK_TS_DECL_NON_COMMON (code
);
627 case TS_TRANSLATION_UNIT_DECL
:
628 MARK_TS_DECL_COMMON (code
);
636 /* Basic consistency checks for attributes used in fold. */
637 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_NON_COMMON
]);
638 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_NON_COMMON
]);
639 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_DECL_COMMON
]);
640 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_COMMON
]);
641 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_COMMON
]);
642 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_COMMON
]);
643 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_COMMON
]);
644 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_COMMON
]);
645 gcc_assert (tree_contains_struct
[TRANSLATION_UNIT_DECL
][TS_DECL_COMMON
]);
646 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_COMMON
]);
647 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_DECL_COMMON
]);
648 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_WRTL
]);
649 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_WRTL
]);
650 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_WRTL
]);
651 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_WRTL
]);
652 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_WRTL
]);
653 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_DECL_MINIMAL
]);
654 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_MINIMAL
]);
655 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_MINIMAL
]);
656 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_MINIMAL
]);
657 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_MINIMAL
]);
658 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_MINIMAL
]);
659 gcc_assert (tree_contains_struct
[TRANSLATION_UNIT_DECL
][TS_DECL_MINIMAL
]);
660 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_MINIMAL
]);
661 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_DECL_MINIMAL
]);
662 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_WITH_VIS
]);
663 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_WITH_VIS
]);
664 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_WITH_VIS
]);
665 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_VAR_DECL
]);
666 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_FIELD_DECL
]);
667 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_PARM_DECL
]);
668 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_LABEL_DECL
]);
669 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_RESULT_DECL
]);
670 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_CONST_DECL
]);
671 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_TYPE_DECL
]);
672 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_FUNCTION_DECL
]);
673 gcc_assert (tree_contains_struct
[IMPORTED_DECL
][TS_DECL_MINIMAL
]);
674 gcc_assert (tree_contains_struct
[IMPORTED_DECL
][TS_DECL_COMMON
]);
675 gcc_assert (tree_contains_struct
[NAMELIST_DECL
][TS_DECL_MINIMAL
]);
676 gcc_assert (tree_contains_struct
[NAMELIST_DECL
][TS_DECL_COMMON
]);
685 /* Initialize the hash table of types. */
687 = hash_table
<type_cache_hasher
>::create_ggc (TYPE_HASH_INITIAL_SIZE
);
690 = hash_table
<tree_decl_map_cache_hasher
>::create_ggc (512);
693 = hash_table
<tree_decl_map_cache_hasher
>::create_ggc (512);
695 int_cst_hash_table
= hash_table
<int_cst_hasher
>::create_ggc (1024);
697 poly_int_cst_hash_table
= hash_table
<poly_int_cst_hasher
>::create_ggc (64);
699 int_cst_node
= make_int_cst (1, 1);
701 cl_option_hash_table
= hash_table
<cl_option_hasher
>::create_ggc (64);
703 cl_optimization_node
= make_node (OPTIMIZATION_NODE
);
704 cl_target_option_node
= make_node (TARGET_OPTION_NODE
);
706 /* Initialize the tree_contains_struct array. */
707 initialize_tree_contains_struct ();
708 lang_hooks
.init_ts ();
712 /* The name of the object as the assembler will see it (but before any
713 translations made by ASM_OUTPUT_LABELREF). Often this is the same
714 as DECL_NAME. It is an IDENTIFIER_NODE. */
716 decl_assembler_name (tree decl
)
718 if (!DECL_ASSEMBLER_NAME_SET_P (decl
))
719 lang_hooks
.set_decl_assembler_name (decl
);
720 return DECL_ASSEMBLER_NAME_RAW (decl
);
723 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
724 (either of which may be NULL). Inform the FE, if this changes the
728 overwrite_decl_assembler_name (tree decl
, tree name
)
730 if (DECL_ASSEMBLER_NAME_RAW (decl
) != name
)
731 lang_hooks
.overwrite_decl_assembler_name (decl
, name
);
734 /* Return true if DECL may need an assembler name to be set. */
737 need_assembler_name_p (tree decl
)
739 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
740 Rule merging. This makes type_odr_p to return true on those types during
741 LTO and by comparing the mangled name, we can say what types are intended
742 to be equivalent across compilation unit.
744 We do not store names of type_in_anonymous_namespace_p.
746 Record, union and enumeration type have linkage that allows use
747 to check type_in_anonymous_namespace_p. We do not mangle compound types
748 that always can be compared structurally.
750 Similarly for builtin types, we compare properties of their main variant.
751 A special case are integer types where mangling do make differences
752 between char/signed char/unsigned char etc. Storing name for these makes
753 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
754 See cp/mangle.cc:write_builtin_type for details. */
756 if (TREE_CODE (decl
) == TYPE_DECL
)
759 && decl
== TYPE_NAME (TREE_TYPE (decl
))
760 && TYPE_MAIN_VARIANT (TREE_TYPE (decl
)) == TREE_TYPE (decl
)
761 && !TYPE_ARTIFICIAL (TREE_TYPE (decl
))
762 && ((TREE_CODE (TREE_TYPE (decl
)) != RECORD_TYPE
763 && TREE_CODE (TREE_TYPE (decl
)) != UNION_TYPE
)
764 || TYPE_CXX_ODR_P (TREE_TYPE (decl
)))
765 && (type_with_linkage_p (TREE_TYPE (decl
))
766 || TREE_CODE (TREE_TYPE (decl
)) == INTEGER_TYPE
)
767 && !variably_modified_type_p (TREE_TYPE (decl
), NULL_TREE
))
768 return !DECL_ASSEMBLER_NAME_SET_P (decl
);
771 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
772 if (!VAR_OR_FUNCTION_DECL_P (decl
))
775 /* If DECL already has its assembler name set, it does not need a
777 if (!HAS_DECL_ASSEMBLER_NAME_P (decl
)
778 || DECL_ASSEMBLER_NAME_SET_P (decl
))
781 /* Abstract decls do not need an assembler name. */
782 if (DECL_ABSTRACT_P (decl
))
785 /* For VAR_DECLs, only static, public and external symbols need an
788 && !TREE_STATIC (decl
)
789 && !TREE_PUBLIC (decl
)
790 && !DECL_EXTERNAL (decl
))
793 if (TREE_CODE (decl
) == FUNCTION_DECL
)
795 /* Do not set assembler name on builtins. Allow RTL expansion to
796 decide whether to expand inline or via a regular call. */
797 if (fndecl_built_in_p (decl
)
798 && DECL_BUILT_IN_CLASS (decl
) != BUILT_IN_FRONTEND
)
801 /* Functions represented in the callgraph need an assembler name. */
802 if (cgraph_node::get (decl
) != NULL
)
805 /* Unused and not public functions don't need an assembler name. */
806 if (!TREE_USED (decl
) && !TREE_PUBLIC (decl
))
813 /* If T needs an assembler name, have one created for it. */
816 assign_assembler_name_if_needed (tree t
)
818 if (need_assembler_name_p (t
))
820 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
821 diagnostics that use input_location to show locus
822 information. The problem here is that, at this point,
823 input_location is generally anchored to the end of the file
824 (since the parser is long gone), so we don't have a good
825 position to pin it to.
827 To alleviate this problem, this uses the location of T's
828 declaration. Examples of this are
829 testsuite/g++.dg/template/cond2.C and
830 testsuite/g++.dg/template/pr35240.C. */
831 location_t saved_location
= input_location
;
832 input_location
= DECL_SOURCE_LOCATION (t
);
834 decl_assembler_name (t
);
836 input_location
= saved_location
;
840 /* When the target supports COMDAT groups, this indicates which group the
841 DECL is associated with. This can be either an IDENTIFIER_NODE or a
842 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
844 decl_comdat_group (const_tree node
)
846 struct symtab_node
*snode
= symtab_node::get (node
);
849 return snode
->get_comdat_group ();
852 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
854 decl_comdat_group_id (const_tree node
)
856 struct symtab_node
*snode
= symtab_node::get (node
);
859 return snode
->get_comdat_group_id ();
862 /* When the target supports named section, return its name as IDENTIFIER_NODE
863 or NULL if it is in no section. */
865 decl_section_name (const_tree node
)
867 struct symtab_node
*snode
= symtab_node::get (node
);
870 return snode
->get_section ();
873 /* Set section name of NODE to VALUE (that is expected to be
876 set_decl_section_name (tree node
, const char *value
)
878 struct symtab_node
*snode
;
882 snode
= symtab_node::get (node
);
886 else if (VAR_P (node
))
887 snode
= varpool_node::get_create (node
);
889 snode
= cgraph_node::get_create (node
);
890 snode
->set_section (value
);
893 /* Set section name of NODE to match the section name of OTHER.
895 set_decl_section_name (decl, other) is equivalent to
896 set_decl_section_name (decl, DECL_SECTION_NAME (other)), but possibly more
899 set_decl_section_name (tree decl
, const_tree other
)
901 struct symtab_node
*other_node
= symtab_node::get (other
);
904 struct symtab_node
*decl_node
;
906 decl_node
= varpool_node::get_create (decl
);
908 decl_node
= cgraph_node::get_create (decl
);
909 decl_node
->set_section (*other_node
);
913 struct symtab_node
*decl_node
= symtab_node::get (decl
);
916 decl_node
->set_section (NULL
);
920 /* Return TLS model of a variable NODE. */
922 decl_tls_model (const_tree node
)
924 struct varpool_node
*snode
= varpool_node::get (node
);
926 return TLS_MODEL_NONE
;
927 return snode
->tls_model
;
930 /* Set TLS model of variable NODE to MODEL. */
932 set_decl_tls_model (tree node
, enum tls_model model
)
934 struct varpool_node
*vnode
;
936 if (model
== TLS_MODEL_NONE
)
938 vnode
= varpool_node::get (node
);
943 vnode
= varpool_node::get_create (node
);
944 vnode
->tls_model
= model
;
947 /* Compute the number of bytes occupied by a tree with code CODE.
948 This function cannot be used for nodes that have variable sizes,
949 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
951 tree_code_size (enum tree_code code
)
953 switch (TREE_CODE_CLASS (code
))
955 case tcc_declaration
: /* A decl node */
958 case FIELD_DECL
: return sizeof (tree_field_decl
);
959 case PARM_DECL
: return sizeof (tree_parm_decl
);
960 case VAR_DECL
: return sizeof (tree_var_decl
);
961 case LABEL_DECL
: return sizeof (tree_label_decl
);
962 case RESULT_DECL
: return sizeof (tree_result_decl
);
963 case CONST_DECL
: return sizeof (tree_const_decl
);
964 case TYPE_DECL
: return sizeof (tree_type_decl
);
965 case FUNCTION_DECL
: return sizeof (tree_function_decl
);
966 case DEBUG_EXPR_DECL
: return sizeof (tree_decl_with_rtl
);
967 case TRANSLATION_UNIT_DECL
: return sizeof (tree_translation_unit_decl
);
970 case NAMELIST_DECL
: return sizeof (tree_decl_non_common
);
972 gcc_checking_assert (code
>= NUM_TREE_CODES
);
973 return lang_hooks
.tree_size (code
);
976 case tcc_type
: /* a type node */
988 case FIXED_POINT_TYPE
:
994 case QUAL_UNION_TYPE
:
999 case LANG_TYPE
: return sizeof (tree_type_non_common
);
1001 gcc_checking_assert (code
>= NUM_TREE_CODES
);
1002 return lang_hooks
.tree_size (code
);
1005 case tcc_reference
: /* a reference */
1006 case tcc_expression
: /* an expression */
1007 case tcc_statement
: /* an expression with side effects */
1008 case tcc_comparison
: /* a comparison expression */
1009 case tcc_unary
: /* a unary arithmetic expression */
1010 case tcc_binary
: /* a binary arithmetic expression */
1011 return (sizeof (struct tree_exp
)
1012 + (TREE_CODE_LENGTH (code
) - 1) * sizeof (tree
));
1014 case tcc_constant
: /* a constant */
1017 case VOID_CST
: return sizeof (tree_typed
);
1018 case INTEGER_CST
: gcc_unreachable ();
1019 case POLY_INT_CST
: return sizeof (tree_poly_int_cst
);
1020 case REAL_CST
: return sizeof (tree_real_cst
);
1021 case FIXED_CST
: return sizeof (tree_fixed_cst
);
1022 case COMPLEX_CST
: return sizeof (tree_complex
);
1023 case VECTOR_CST
: gcc_unreachable ();
1024 case STRING_CST
: gcc_unreachable ();
1026 gcc_checking_assert (code
>= NUM_TREE_CODES
);
1027 return lang_hooks
.tree_size (code
);
1030 case tcc_exceptional
: /* something random, like an identifier. */
1033 case IDENTIFIER_NODE
: return lang_hooks
.identifier_size
;
1034 case TREE_LIST
: return sizeof (tree_list
);
1037 case PLACEHOLDER_EXPR
: return sizeof (tree_common
);
1039 case TREE_VEC
: gcc_unreachable ();
1040 case OMP_CLAUSE
: gcc_unreachable ();
1042 case SSA_NAME
: return sizeof (tree_ssa_name
);
1044 case STATEMENT_LIST
: return sizeof (tree_statement_list
);
1045 case BLOCK
: return sizeof (struct tree_block
);
1046 case CONSTRUCTOR
: return sizeof (tree_constructor
);
1047 case OPTIMIZATION_NODE
: return sizeof (tree_optimization_option
);
1048 case TARGET_OPTION_NODE
: return sizeof (tree_target_option
);
1051 gcc_checking_assert (code
>= NUM_TREE_CODES
);
1052 return lang_hooks
.tree_size (code
);
1060 /* Compute the number of bytes occupied by NODE. This routine only
1061 looks at TREE_CODE, except for those nodes that have variable sizes. */
1063 tree_size (const_tree node
)
1065 const enum tree_code code
= TREE_CODE (node
);
1069 return (sizeof (struct tree_int_cst
)
1070 + (TREE_INT_CST_EXT_NUNITS (node
) - 1) * sizeof (HOST_WIDE_INT
));
1073 return (offsetof (struct tree_binfo
, base_binfos
)
1075 ::embedded_size (BINFO_N_BASE_BINFOS (node
)));
1078 return (sizeof (struct tree_vec
)
1079 + (TREE_VEC_LENGTH (node
) - 1) * sizeof (tree
));
1082 return (sizeof (struct tree_vector
)
1083 + (vector_cst_encoded_nelts (node
) - 1) * sizeof (tree
));
1086 return TREE_STRING_LENGTH (node
) + offsetof (struct tree_string
, str
) + 1;
1089 return (sizeof (struct tree_omp_clause
)
1090 + (omp_clause_num_ops
[OMP_CLAUSE_CODE (node
)] - 1)
1094 if (TREE_CODE_CLASS (code
) == tcc_vl_exp
)
1095 return (sizeof (struct tree_exp
)
1096 + (VL_EXP_OPERAND_LENGTH (node
) - 1) * sizeof (tree
));
1098 return tree_code_size (code
);
1102 /* Return tree node kind based on tree CODE. */
1104 static tree_node_kind
1105 get_stats_node_kind (enum tree_code code
)
1107 enum tree_code_class type
= TREE_CODE_CLASS (code
);
1111 case tcc_declaration
: /* A decl node */
1113 case tcc_type
: /* a type node */
1115 case tcc_statement
: /* an expression with side effects */
1117 case tcc_reference
: /* a reference */
1119 case tcc_expression
: /* an expression */
1120 case tcc_comparison
: /* a comparison expression */
1121 case tcc_unary
: /* a unary arithmetic expression */
1122 case tcc_binary
: /* a binary arithmetic expression */
1124 case tcc_constant
: /* a constant */
1126 case tcc_exceptional
: /* something random, like an identifier. */
1129 case IDENTIFIER_NODE
:
1136 return ssa_name_kind
;
1142 return omp_clause_kind
;
1154 /* Record interesting allocation statistics for a tree node with CODE
1158 record_node_allocation_statistics (enum tree_code code
, size_t length
)
1160 if (!GATHER_STATISTICS
)
1163 tree_node_kind kind
= get_stats_node_kind (code
);
1165 tree_code_counts
[(int) code
]++;
1166 tree_node_counts
[(int) kind
]++;
1167 tree_node_sizes
[(int) kind
] += length
;
1170 /* Allocate and return a new UID from the DECL_UID namespace. */
1173 allocate_decl_uid (void)
1175 return next_decl_uid
++;
1178 /* Return a newly allocated node of code CODE. For decl and type
1179 nodes, some other fields are initialized. The rest of the node is
1180 initialized to zero. This function cannot be used for TREE_VEC,
1181 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1184 Achoo! I got a code in the node. */
1187 make_node (enum tree_code code MEM_STAT_DECL
)
1190 enum tree_code_class type
= TREE_CODE_CLASS (code
);
1191 size_t length
= tree_code_size (code
);
1193 record_node_allocation_statistics (code
, length
);
1195 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
1196 TREE_SET_CODE (t
, code
);
1201 if (code
!= DEBUG_BEGIN_STMT
)
1202 TREE_SIDE_EFFECTS (t
) = 1;
1205 case tcc_declaration
:
1206 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_COMMON
))
1208 if (code
== FUNCTION_DECL
)
1210 SET_DECL_ALIGN (t
, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY
));
1211 SET_DECL_MODE (t
, FUNCTION_MODE
);
1214 SET_DECL_ALIGN (t
, 1);
1216 DECL_SOURCE_LOCATION (t
) = input_location
;
1217 if (TREE_CODE (t
) == DEBUG_EXPR_DECL
)
1218 DECL_UID (t
) = --next_debug_decl_uid
;
1221 DECL_UID (t
) = allocate_decl_uid ();
1222 SET_DECL_PT_UID (t
, -1);
1224 if (TREE_CODE (t
) == LABEL_DECL
)
1225 LABEL_DECL_UID (t
) = -1;
1230 TYPE_UID (t
) = next_type_uid
++;
1231 SET_TYPE_ALIGN (t
, BITS_PER_UNIT
);
1232 TYPE_USER_ALIGN (t
) = 0;
1233 TYPE_MAIN_VARIANT (t
) = t
;
1234 TYPE_CANONICAL (t
) = t
;
1236 /* Default to no attributes for type, but let target change that. */
1237 TYPE_ATTRIBUTES (t
) = NULL_TREE
;
1238 targetm
.set_default_type_attributes (t
);
1240 /* We have not yet computed the alias set for this type. */
1241 TYPE_ALIAS_SET (t
) = -1;
1245 TREE_CONSTANT (t
) = 1;
1248 case tcc_expression
:
1254 case PREDECREMENT_EXPR
:
1255 case PREINCREMENT_EXPR
:
1256 case POSTDECREMENT_EXPR
:
1257 case POSTINCREMENT_EXPR
:
1258 /* All of these have side-effects, no matter what their
1260 TREE_SIDE_EFFECTS (t
) = 1;
1268 case tcc_exceptional
:
1271 case TARGET_OPTION_NODE
:
1272 TREE_TARGET_OPTION(t
)
1273 = ggc_cleared_alloc
<struct cl_target_option
> ();
1276 case OPTIMIZATION_NODE
:
1277 TREE_OPTIMIZATION (t
)
1278 = ggc_cleared_alloc
<struct cl_optimization
> ();
1287 /* Other classes need no special treatment. */
1294 /* Free tree node. */
1297 free_node (tree node
)
1299 enum tree_code code
= TREE_CODE (node
);
1300 if (GATHER_STATISTICS
)
1302 enum tree_node_kind kind
= get_stats_node_kind (code
);
1304 gcc_checking_assert (tree_code_counts
[(int) TREE_CODE (node
)] != 0);
1305 gcc_checking_assert (tree_node_counts
[(int) kind
] != 0);
1306 gcc_checking_assert (tree_node_sizes
[(int) kind
] >= tree_size (node
));
1308 tree_code_counts
[(int) TREE_CODE (node
)]--;
1309 tree_node_counts
[(int) kind
]--;
1310 tree_node_sizes
[(int) kind
] -= tree_size (node
);
1312 if (CODE_CONTAINS_STRUCT (code
, TS_CONSTRUCTOR
))
1313 vec_free (CONSTRUCTOR_ELTS (node
));
1314 else if (code
== BLOCK
)
1315 vec_free (BLOCK_NONLOCALIZED_VARS (node
));
1316 else if (code
== TREE_BINFO
)
1317 vec_free (BINFO_BASE_ACCESSES (node
));
1318 else if (code
== OPTIMIZATION_NODE
)
1319 cl_optimization_option_free (TREE_OPTIMIZATION (node
));
1320 else if (code
== TARGET_OPTION_NODE
)
1321 cl_target_option_free (TREE_TARGET_OPTION (node
));
1325 /* Return a new node with the same contents as NODE except that its
1326 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1329 copy_node (tree node MEM_STAT_DECL
)
1332 enum tree_code code
= TREE_CODE (node
);
1335 gcc_assert (code
!= STATEMENT_LIST
);
1337 length
= tree_size (node
);
1338 record_node_allocation_statistics (code
, length
);
1339 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
1340 memcpy (t
, node
, length
);
1342 if (CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
1344 TREE_ASM_WRITTEN (t
) = 0;
1345 TREE_VISITED (t
) = 0;
1347 if (TREE_CODE_CLASS (code
) == tcc_declaration
)
1349 if (code
== DEBUG_EXPR_DECL
)
1350 DECL_UID (t
) = --next_debug_decl_uid
;
1353 DECL_UID (t
) = allocate_decl_uid ();
1354 if (DECL_PT_UID_SET_P (node
))
1355 SET_DECL_PT_UID (t
, DECL_PT_UID (node
));
1357 if ((TREE_CODE (node
) == PARM_DECL
|| VAR_P (node
))
1358 && DECL_HAS_VALUE_EXPR_P (node
))
1360 SET_DECL_VALUE_EXPR (t
, DECL_VALUE_EXPR (node
));
1361 DECL_HAS_VALUE_EXPR_P (t
) = 1;
1363 /* DECL_DEBUG_EXPR is copied explicitly by callers. */
1366 DECL_HAS_DEBUG_EXPR_P (t
) = 0;
1367 t
->decl_with_vis
.symtab_node
= NULL
;
1369 if (VAR_P (node
) && DECL_HAS_INIT_PRIORITY_P (node
))
1371 SET_DECL_INIT_PRIORITY (t
, DECL_INIT_PRIORITY (node
));
1372 DECL_HAS_INIT_PRIORITY_P (t
) = 1;
1374 if (TREE_CODE (node
) == FUNCTION_DECL
)
1376 DECL_STRUCT_FUNCTION (t
) = NULL
;
1377 t
->decl_with_vis
.symtab_node
= NULL
;
1380 else if (TREE_CODE_CLASS (code
) == tcc_type
)
1382 TYPE_UID (t
) = next_type_uid
++;
1383 /* The following is so that the debug code for
1384 the copy is different from the original type.
1385 The two statements usually duplicate each other
1386 (because they clear fields of the same union),
1387 but the optimizer should catch that. */
1388 TYPE_SYMTAB_ADDRESS (t
) = 0;
1389 TYPE_SYMTAB_DIE (t
) = 0;
1391 /* Do not copy the values cache. */
1392 if (TYPE_CACHED_VALUES_P (t
))
1394 TYPE_CACHED_VALUES_P (t
) = 0;
1395 TYPE_CACHED_VALUES (t
) = NULL_TREE
;
1398 else if (code
== TARGET_OPTION_NODE
)
1400 TREE_TARGET_OPTION (t
) = ggc_alloc
<struct cl_target_option
>();
1401 memcpy (TREE_TARGET_OPTION (t
), TREE_TARGET_OPTION (node
),
1402 sizeof (struct cl_target_option
));
1404 else if (code
== OPTIMIZATION_NODE
)
1406 TREE_OPTIMIZATION (t
) = ggc_alloc
<struct cl_optimization
>();
1407 memcpy (TREE_OPTIMIZATION (t
), TREE_OPTIMIZATION (node
),
1408 sizeof (struct cl_optimization
));
1414 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1415 For example, this can copy a list made of TREE_LIST nodes. */
1418 copy_list (tree list
)
1426 head
= prev
= copy_node (list
);
1427 next
= TREE_CHAIN (list
);
1430 TREE_CHAIN (prev
) = copy_node (next
);
1431 prev
= TREE_CHAIN (prev
);
1432 next
= TREE_CHAIN (next
);
1438 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1439 INTEGER_CST with value CST and type TYPE. */
1442 get_int_cst_ext_nunits (tree type
, const wide_int
&cst
)
1444 gcc_checking_assert (cst
.get_precision () == TYPE_PRECISION (type
));
1445 /* We need extra HWIs if CST is an unsigned integer with its
1447 if (TYPE_UNSIGNED (type
) && wi::neg_p (cst
))
1448 return cst
.get_precision () / HOST_BITS_PER_WIDE_INT
+ 1;
1449 return cst
.get_len ();
1452 /* Return a new INTEGER_CST with value CST and type TYPE. */
1455 build_new_int_cst (tree type
, const wide_int
&cst
)
1457 unsigned int len
= cst
.get_len ();
1458 unsigned int ext_len
= get_int_cst_ext_nunits (type
, cst
);
1459 tree nt
= make_int_cst (len
, ext_len
);
1464 TREE_INT_CST_ELT (nt
, ext_len
)
1465 = zext_hwi (-1, cst
.get_precision () % HOST_BITS_PER_WIDE_INT
);
1466 for (unsigned int i
= len
; i
< ext_len
; ++i
)
1467 TREE_INT_CST_ELT (nt
, i
) = -1;
1469 else if (TYPE_UNSIGNED (type
)
1470 && cst
.get_precision () < len
* HOST_BITS_PER_WIDE_INT
)
1473 TREE_INT_CST_ELT (nt
, len
)
1474 = zext_hwi (cst
.elt (len
),
1475 cst
.get_precision () % HOST_BITS_PER_WIDE_INT
);
1478 for (unsigned int i
= 0; i
< len
; i
++)
1479 TREE_INT_CST_ELT (nt
, i
) = cst
.elt (i
);
1480 TREE_TYPE (nt
) = type
;
1484 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1487 build_new_poly_int_cst (tree type
, tree (&coeffs
)[NUM_POLY_INT_COEFFS
]
1490 size_t length
= sizeof (struct tree_poly_int_cst
);
1491 record_node_allocation_statistics (POLY_INT_CST
, length
);
1493 tree t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
1495 TREE_SET_CODE (t
, POLY_INT_CST
);
1496 TREE_CONSTANT (t
) = 1;
1497 TREE_TYPE (t
) = type
;
1498 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1499 POLY_INT_CST_COEFF (t
, i
) = coeffs
[i
];
1503 /* Create a constant tree that contains CST sign-extended to TYPE. */
1506 build_int_cst (tree type
, poly_int64 cst
)
1508 /* Support legacy code. */
1510 type
= integer_type_node
;
1512 return wide_int_to_tree (type
, wi::shwi (cst
, TYPE_PRECISION (type
)));
1515 /* Create a constant tree that contains CST zero-extended to TYPE. */
1518 build_int_cstu (tree type
, poly_uint64 cst
)
1520 return wide_int_to_tree (type
, wi::uhwi (cst
, TYPE_PRECISION (type
)));
1523 /* Create a constant tree that contains CST sign-extended to TYPE. */
1526 build_int_cst_type (tree type
, poly_int64 cst
)
1529 return wide_int_to_tree (type
, wi::shwi (cst
, TYPE_PRECISION (type
)));
1532 /* Constructs tree in type TYPE from with value given by CST. Signedness
1533 of CST is assumed to be the same as the signedness of TYPE. */
1536 double_int_to_tree (tree type
, double_int cst
)
1538 return wide_int_to_tree (type
, widest_int::from (cst
, TYPE_SIGN (type
)));
1541 /* We force the wide_int CST to the range of the type TYPE by sign or
1542 zero extending it. OVERFLOWABLE indicates if we are interested in
1543 overflow of the value, when >0 we are only interested in signed
1544 overflow, for <0 we are interested in any overflow. OVERFLOWED
1545 indicates whether overflow has already occurred. CONST_OVERFLOWED
1546 indicates whether constant overflow has already occurred. We force
1547 T's value to be within range of T's type (by setting to 0 or 1 all
1548 the bits outside the type's range). We set TREE_OVERFLOWED if,
1549 OVERFLOWED is nonzero,
1550 or OVERFLOWABLE is >0 and signed overflow occurs
1551 or OVERFLOWABLE is <0 and any overflow occurs
1552 We return a new tree node for the extended wide_int. The node
1553 is shared if no overflow flags are set. */
1557 force_fit_type (tree type
, const poly_wide_int_ref
&cst
,
1558 int overflowable
, bool overflowed
)
1560 signop sign
= TYPE_SIGN (type
);
1562 /* If we need to set overflow flags, return a new unshared node. */
1563 if (overflowed
|| !wi::fits_to_tree_p (cst
, type
))
1567 || (overflowable
> 0 && sign
== SIGNED
))
1569 poly_wide_int tmp
= poly_wide_int::from (cst
, TYPE_PRECISION (type
),
1572 if (tmp
.is_constant ())
1573 t
= build_new_int_cst (type
, tmp
.coeffs
[0]);
1576 tree coeffs
[NUM_POLY_INT_COEFFS
];
1577 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1579 coeffs
[i
] = build_new_int_cst (type
, tmp
.coeffs
[i
]);
1580 TREE_OVERFLOW (coeffs
[i
]) = 1;
1582 t
= build_new_poly_int_cst (type
, coeffs
);
1584 TREE_OVERFLOW (t
) = 1;
1589 /* Else build a shared node. */
1590 return wide_int_to_tree (type
, cst
);
1593 /* These are the hash table functions for the hash table of INTEGER_CST
1594 nodes of a sizetype. */
1596 /* Return the hash code X, an INTEGER_CST. */
1599 int_cst_hasher::hash (tree x
)
1601 const_tree
const t
= x
;
1602 hashval_t code
= TYPE_UID (TREE_TYPE (t
));
1605 for (i
= 0; i
< TREE_INT_CST_NUNITS (t
); i
++)
1606 code
= iterative_hash_host_wide_int (TREE_INT_CST_ELT(t
, i
), code
);
1611 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1612 is the same as that given by *Y, which is the same. */
1615 int_cst_hasher::equal (tree x
, tree y
)
1617 const_tree
const xt
= x
;
1618 const_tree
const yt
= y
;
1620 if (TREE_TYPE (xt
) != TREE_TYPE (yt
)
1621 || TREE_INT_CST_NUNITS (xt
) != TREE_INT_CST_NUNITS (yt
)
1622 || TREE_INT_CST_EXT_NUNITS (xt
) != TREE_INT_CST_EXT_NUNITS (yt
))
1625 for (int i
= 0; i
< TREE_INT_CST_NUNITS (xt
); i
++)
1626 if (TREE_INT_CST_ELT (xt
, i
) != TREE_INT_CST_ELT (yt
, i
))
1632 /* Cache wide_int CST into the TYPE_CACHED_VALUES cache for TYPE.
1633 SLOT is the slot entry to store it in, and MAX_SLOTS is the maximum
1634 number of slots that can be cached for the type. */
1637 cache_wide_int_in_type_cache (tree type
, const wide_int
&cst
,
1638 int slot
, int max_slots
)
1640 gcc_checking_assert (slot
>= 0);
1641 /* Initialize cache. */
1642 if (!TYPE_CACHED_VALUES_P (type
))
1644 TYPE_CACHED_VALUES_P (type
) = 1;
1645 TYPE_CACHED_VALUES (type
) = make_tree_vec (max_slots
);
1647 tree t
= TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), slot
);
1650 /* Create a new shared int. */
1651 t
= build_new_int_cst (type
, cst
);
1652 TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), slot
) = t
;
1657 /* Create an INT_CST node of TYPE and value CST.
1658 The returned node is always shared. For small integers we use a
1659 per-type vector cache, for larger ones we use a single hash table.
1660 The value is extended from its precision according to the sign of
1661 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1662 the upper bits and ensures that hashing and value equality based
1663 upon the underlying HOST_WIDE_INTs works without masking. */
1666 wide_int_to_tree_1 (tree type
, const wide_int_ref
&pcst
)
1673 unsigned int prec
= TYPE_PRECISION (type
);
1674 signop sgn
= TYPE_SIGN (type
);
1676 /* Verify that everything is canonical. */
1677 int l
= pcst
.get_len ();
1680 if (pcst
.elt (l
- 1) == 0)
1681 gcc_checking_assert (pcst
.elt (l
- 2) < 0);
1682 if (pcst
.elt (l
- 1) == HOST_WIDE_INT_M1
)
1683 gcc_checking_assert (pcst
.elt (l
- 2) >= 0);
1686 wide_int cst
= wide_int::from (pcst
, prec
, sgn
);
1687 unsigned int ext_len
= get_int_cst_ext_nunits (type
, cst
);
1689 enum tree_code code
= TREE_CODE (type
);
1690 if (code
== POINTER_TYPE
|| code
== REFERENCE_TYPE
)
1692 /* Cache NULL pointer and zero bounds. */
1695 /* Cache upper bounds of pointers. */
1696 else if (cst
== wi::max_value (prec
, sgn
))
1698 /* Cache 1 which is used for a non-zero range. */
1704 t
= cache_wide_int_in_type_cache (type
, cst
, ix
, 3);
1705 /* Make sure no one is clobbering the shared constant. */
1706 gcc_checking_assert (TREE_TYPE (t
) == type
1707 && cst
== wi::to_wide (t
));
1713 /* We just need to store a single HOST_WIDE_INT. */
1715 if (TYPE_UNSIGNED (type
))
1716 hwi
= cst
.to_uhwi ();
1718 hwi
= cst
.to_shwi ();
1723 gcc_assert (hwi
== 0);
1727 case REFERENCE_TYPE
:
1728 /* Ignore pointers, as they were already handled above. */
1732 /* Cache false or true. */
1734 if (IN_RANGE (hwi
, 0, 1))
1741 if (TYPE_SIGN (type
) == UNSIGNED
)
1744 limit
= param_integer_share_limit
;
1745 if (IN_RANGE (hwi
, 0, param_integer_share_limit
- 1))
1750 /* Cache [-1, N). */
1751 limit
= param_integer_share_limit
+ 1;
1752 if (IN_RANGE (hwi
, -1, param_integer_share_limit
- 1))
1766 t
= cache_wide_int_in_type_cache (type
, cst
, ix
, limit
);
1767 /* Make sure no one is clobbering the shared constant. */
1768 gcc_checking_assert (TREE_TYPE (t
) == type
1769 && TREE_INT_CST_NUNITS (t
) == 1
1770 && TREE_INT_CST_EXT_NUNITS (t
) == 1
1771 && TREE_INT_CST_ELT (t
, 0) == hwi
);
1776 /* Use the cache of larger shared ints, using int_cst_node as
1779 TREE_INT_CST_ELT (int_cst_node
, 0) = hwi
;
1780 TREE_TYPE (int_cst_node
) = type
;
1782 tree
*slot
= int_cst_hash_table
->find_slot (int_cst_node
, INSERT
);
1786 /* Insert this one into the hash table. */
1789 /* Make a new node for next time round. */
1790 int_cst_node
= make_int_cst (1, 1);
1796 /* The value either hashes properly or we drop it on the floor
1797 for the gc to take care of. There will not be enough of them
1800 tree nt
= build_new_int_cst (type
, cst
);
1801 tree
*slot
= int_cst_hash_table
->find_slot (nt
, INSERT
);
1805 /* Insert this one into the hash table. */
1817 poly_int_cst_hasher::hash (tree t
)
1819 inchash::hash hstate
;
1821 hstate
.add_int (TYPE_UID (TREE_TYPE (t
)));
1822 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1823 hstate
.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t
, i
)));
1825 return hstate
.end ();
1829 poly_int_cst_hasher::equal (tree x
, const compare_type
&y
)
1831 if (TREE_TYPE (x
) != y
.first
)
1833 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1834 if (wi::to_wide (POLY_INT_CST_COEFF (x
, i
)) != y
.second
->coeffs
[i
])
1839 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1840 The elements must also have type TYPE. */
1843 build_poly_int_cst (tree type
, const poly_wide_int_ref
&values
)
1845 unsigned int prec
= TYPE_PRECISION (type
);
1846 gcc_assert (prec
<= values
.coeffs
[0].get_precision ());
1847 poly_wide_int c
= poly_wide_int::from (values
, prec
, SIGNED
);
1850 h
.add_int (TYPE_UID (type
));
1851 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1852 h
.add_wide_int (c
.coeffs
[i
]);
1853 poly_int_cst_hasher::compare_type
comp (type
, &c
);
1854 tree
*slot
= poly_int_cst_hash_table
->find_slot_with_hash (comp
, h
.end (),
1856 if (*slot
== NULL_TREE
)
1858 tree coeffs
[NUM_POLY_INT_COEFFS
];
1859 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1860 coeffs
[i
] = wide_int_to_tree_1 (type
, c
.coeffs
[i
]);
1861 *slot
= build_new_poly_int_cst (type
, coeffs
);
1866 /* Create a constant tree with value VALUE in type TYPE. */
1869 wide_int_to_tree (tree type
, const poly_wide_int_ref
&value
)
1871 if (value
.is_constant ())
1872 return wide_int_to_tree_1 (type
, value
.coeffs
[0]);
1873 return build_poly_int_cst (type
, value
);
1876 /* Insert INTEGER_CST T into a cache of integer constants. And return
1877 the cached constant (which may or may not be T). If MIGHT_DUPLICATE
1878 is false, and T falls into the type's 'smaller values' range, there
1879 cannot be an existing entry. Otherwise, if MIGHT_DUPLICATE is true,
1880 or the value is large, should an existing entry exist, it is
1881 returned (rather than inserting T). */
1884 cache_integer_cst (tree t
, bool might_duplicate ATTRIBUTE_UNUSED
)
1886 tree type
= TREE_TYPE (t
);
1889 int prec
= TYPE_PRECISION (type
);
1891 gcc_assert (!TREE_OVERFLOW (t
));
1893 /* The caching indices here must match those in
1894 wide_int_to_type_1. */
1895 switch (TREE_CODE (type
))
1898 gcc_checking_assert (integer_zerop (t
));
1902 case REFERENCE_TYPE
:
1904 if (integer_zerop (t
))
1906 else if (integer_onep (t
))
1915 /* Cache false or true. */
1917 if (wi::ltu_p (wi::to_wide (t
), 2))
1918 ix
= TREE_INT_CST_ELT (t
, 0);
1924 if (TYPE_UNSIGNED (type
))
1927 limit
= param_integer_share_limit
;
1929 /* This is a little hokie, but if the prec is smaller than
1930 what is necessary to hold param_integer_share_limit, then the
1931 obvious test will not get the correct answer. */
1932 if (prec
< HOST_BITS_PER_WIDE_INT
)
1934 if (tree_to_uhwi (t
)
1935 < (unsigned HOST_WIDE_INT
) param_integer_share_limit
)
1936 ix
= tree_to_uhwi (t
);
1938 else if (wi::ltu_p (wi::to_wide (t
), param_integer_share_limit
))
1939 ix
= tree_to_uhwi (t
);
1944 limit
= param_integer_share_limit
+ 1;
1946 if (integer_minus_onep (t
))
1948 else if (!wi::neg_p (wi::to_wide (t
)))
1950 if (prec
< HOST_BITS_PER_WIDE_INT
)
1952 if (tree_to_shwi (t
) < param_integer_share_limit
)
1953 ix
= tree_to_shwi (t
) + 1;
1955 else if (wi::ltu_p (wi::to_wide (t
), param_integer_share_limit
))
1956 ix
= tree_to_shwi (t
) + 1;
1962 /* The slot used by TYPE_CACHED_VALUES is used for the enum
1972 /* Look for it in the type's vector of small shared ints. */
1973 if (!TYPE_CACHED_VALUES_P (type
))
1975 TYPE_CACHED_VALUES_P (type
) = 1;
1976 TYPE_CACHED_VALUES (type
) = make_tree_vec (limit
);
1979 if (tree r
= TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
))
1981 gcc_checking_assert (might_duplicate
);
1985 TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
) = t
;
1989 /* Use the cache of larger shared ints. */
1990 tree
*slot
= int_cst_hash_table
->find_slot (t
, INSERT
);
1993 /* If there is already an entry for the number verify it's the
1995 gcc_checking_assert (wi::to_wide (tree (r
)) == wi::to_wide (t
));
1996 /* And return the cached value. */
2000 /* Otherwise insert this one into the hash table. */
2008 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
2009 and the rest are zeros. */
2012 build_low_bits_mask (tree type
, unsigned bits
)
2014 gcc_assert (bits
<= TYPE_PRECISION (type
));
2016 return wide_int_to_tree (type
, wi::mask (bits
, false,
2017 TYPE_PRECISION (type
)));
2020 /* Checks that X is integer constant that can be expressed in (unsigned)
2021 HOST_WIDE_INT without loss of precision. */
2024 cst_and_fits_in_hwi (const_tree x
)
2026 return (TREE_CODE (x
) == INTEGER_CST
2027 && (tree_fits_shwi_p (x
) || tree_fits_uhwi_p (x
)));
2030 /* Build a newly constructed VECTOR_CST with the given values of
2031 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
2034 make_vector (unsigned log2_npatterns
,
2035 unsigned int nelts_per_pattern MEM_STAT_DECL
)
2037 gcc_assert (IN_RANGE (nelts_per_pattern
, 1, 3));
2039 unsigned npatterns
= 1 << log2_npatterns
;
2040 unsigned encoded_nelts
= npatterns
* nelts_per_pattern
;
2041 unsigned length
= (sizeof (struct tree_vector
)
2042 + (encoded_nelts
- 1) * sizeof (tree
));
2044 record_node_allocation_statistics (VECTOR_CST
, length
);
2046 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
2048 TREE_SET_CODE (t
, VECTOR_CST
);
2049 TREE_CONSTANT (t
) = 1;
2050 VECTOR_CST_LOG2_NPATTERNS (t
) = log2_npatterns
;
2051 VECTOR_CST_NELTS_PER_PATTERN (t
) = nelts_per_pattern
;
2056 /* Return a new VECTOR_CST node whose type is TYPE and whose values
2057 are extracted from V, a vector of CONSTRUCTOR_ELT. */
2060 build_vector_from_ctor (tree type
, const vec
<constructor_elt
, va_gc
> *v
)
2062 if (vec_safe_length (v
) == 0)
2063 return build_zero_cst (type
);
2065 unsigned HOST_WIDE_INT idx
, nelts
;
2068 /* We can't construct a VECTOR_CST for a variable number of elements. */
2069 nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
2070 tree_vector_builder
vec (type
, nelts
, 1);
2071 FOR_EACH_CONSTRUCTOR_VALUE (v
, idx
, value
)
2073 if (TREE_CODE (value
) == VECTOR_CST
)
2075 /* If NELTS is constant then this must be too. */
2076 unsigned int sub_nelts
= VECTOR_CST_NELTS (value
).to_constant ();
2077 for (unsigned i
= 0; i
< sub_nelts
; ++i
)
2078 vec
.quick_push (VECTOR_CST_ELT (value
, i
));
2081 vec
.quick_push (value
);
2083 while (vec
.length () < nelts
)
2084 vec
.quick_push (build_zero_cst (TREE_TYPE (type
)));
2086 return vec
.build ();
2089 /* Build a vector of type VECTYPE where all the elements are SCs. */
2091 build_vector_from_val (tree vectype
, tree sc
)
2093 unsigned HOST_WIDE_INT i
, nunits
;
2095 if (sc
== error_mark_node
)
2098 /* Verify that the vector type is suitable for SC. Note that there
2099 is some inconsistency in the type-system with respect to restrict
2100 qualifications of pointers. Vector types always have a main-variant
2101 element type and the qualification is applied to the vector-type.
2102 So TREE_TYPE (vector-type) does not return a properly qualified
2103 vector element-type. */
2104 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc
)),
2105 TREE_TYPE (vectype
)));
2107 if (CONSTANT_CLASS_P (sc
))
2109 tree_vector_builder
v (vectype
, 1, 1);
2113 else if (!TYPE_VECTOR_SUBPARTS (vectype
).is_constant (&nunits
))
2114 return fold_build1 (VEC_DUPLICATE_EXPR
, vectype
, sc
);
2117 vec
<constructor_elt
, va_gc
> *v
;
2118 vec_alloc (v
, nunits
);
2119 for (i
= 0; i
< nunits
; ++i
)
2120 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, sc
);
2121 return build_constructor (vectype
, v
);
2125 /* If TYPE is not a vector type, just return SC, otherwise return
2126 build_vector_from_val (TYPE, SC). */
2129 build_uniform_cst (tree type
, tree sc
)
2131 if (!VECTOR_TYPE_P (type
))
2134 return build_vector_from_val (type
, sc
);
2137 /* Build a vector series of type TYPE in which element I has the value
2138 BASE + I * STEP. The result is a constant if BASE and STEP are constant
2139 and a VEC_SERIES_EXPR otherwise. */
2142 build_vec_series (tree type
, tree base
, tree step
)
2144 if (integer_zerop (step
))
2145 return build_vector_from_val (type
, base
);
2146 if (TREE_CODE (base
) == INTEGER_CST
&& TREE_CODE (step
) == INTEGER_CST
)
2148 tree_vector_builder
builder (type
, 1, 3);
2149 tree elt1
= wide_int_to_tree (TREE_TYPE (base
),
2150 wi::to_wide (base
) + wi::to_wide (step
));
2151 tree elt2
= wide_int_to_tree (TREE_TYPE (base
),
2152 wi::to_wide (elt1
) + wi::to_wide (step
));
2153 builder
.quick_push (base
);
2154 builder
.quick_push (elt1
);
2155 builder
.quick_push (elt2
);
2156 return builder
.build ();
2158 return build2 (VEC_SERIES_EXPR
, type
, base
, step
);
2161 /* Return a vector with the same number of units and number of bits
2162 as VEC_TYPE, but in which the elements are a linear series of unsigned
2163 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
2166 build_index_vector (tree vec_type
, poly_uint64 base
, poly_uint64 step
)
2168 tree index_vec_type
= vec_type
;
2169 tree index_elt_type
= TREE_TYPE (vec_type
);
2170 poly_uint64 nunits
= TYPE_VECTOR_SUBPARTS (vec_type
);
2171 if (!INTEGRAL_TYPE_P (index_elt_type
) || !TYPE_UNSIGNED (index_elt_type
))
2173 index_elt_type
= build_nonstandard_integer_type
2174 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type
)), true);
2175 index_vec_type
= build_vector_type (index_elt_type
, nunits
);
2178 tree_vector_builder
v (index_vec_type
, 1, 3);
2179 for (unsigned int i
= 0; i
< 3; ++i
)
2180 v
.quick_push (build_int_cstu (index_elt_type
, base
+ i
* step
));
2184 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
2185 elements are A and the rest are B. */
2188 build_vector_a_then_b (tree vec_type
, unsigned int num_a
, tree a
, tree b
)
2190 gcc_assert (known_le (num_a
, TYPE_VECTOR_SUBPARTS (vec_type
)));
2191 unsigned int count
= constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type
));
2192 /* Optimize the constant case. */
2193 if ((count
& 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type
).is_constant ())
2195 tree_vector_builder
builder (vec_type
, count
, 2);
2196 for (unsigned int i
= 0; i
< count
* 2; ++i
)
2197 builder
.quick_push (i
< num_a
? a
: b
);
2198 return builder
.build ();
2201 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2202 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
2205 recompute_constructor_flags (tree c
)
2209 bool constant_p
= true;
2210 bool side_effects_p
= false;
2211 vec
<constructor_elt
, va_gc
> *vals
= CONSTRUCTOR_ELTS (c
);
2213 FOR_EACH_CONSTRUCTOR_VALUE (vals
, i
, val
)
2215 /* Mostly ctors will have elts that don't have side-effects, so
2216 the usual case is to scan all the elements. Hence a single
2217 loop for both const and side effects, rather than one loop
2218 each (with early outs). */
2219 if (!TREE_CONSTANT (val
))
2221 if (TREE_SIDE_EFFECTS (val
))
2222 side_effects_p
= true;
2225 TREE_SIDE_EFFECTS (c
) = side_effects_p
;
2226 TREE_CONSTANT (c
) = constant_p
;
2229 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2233 verify_constructor_flags (tree c
)
2237 bool constant_p
= TREE_CONSTANT (c
);
2238 bool side_effects_p
= TREE_SIDE_EFFECTS (c
);
2239 vec
<constructor_elt
, va_gc
> *vals
= CONSTRUCTOR_ELTS (c
);
2241 FOR_EACH_CONSTRUCTOR_VALUE (vals
, i
, val
)
2243 if (constant_p
&& !TREE_CONSTANT (val
))
2244 internal_error ("non-constant element in constant CONSTRUCTOR");
2245 if (!side_effects_p
&& TREE_SIDE_EFFECTS (val
))
2246 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2250 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2251 are in the vec pointed to by VALS. */
2253 build_constructor (tree type
, vec
<constructor_elt
, va_gc
> *vals MEM_STAT_DECL
)
2255 tree c
= make_node (CONSTRUCTOR PASS_MEM_STAT
);
2257 TREE_TYPE (c
) = type
;
2258 CONSTRUCTOR_ELTS (c
) = vals
;
2260 recompute_constructor_flags (c
);
2265 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2268 build_constructor_single (tree type
, tree index
, tree value
)
2270 vec
<constructor_elt
, va_gc
> *v
;
2271 constructor_elt elt
= {index
, value
};
2274 v
->quick_push (elt
);
2276 return build_constructor (type
, v
);
2280 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2281 are in a list pointed to by VALS. */
2283 build_constructor_from_list (tree type
, tree vals
)
2286 vec
<constructor_elt
, va_gc
> *v
= NULL
;
2290 vec_alloc (v
, list_length (vals
));
2291 for (t
= vals
; t
; t
= TREE_CHAIN (t
))
2292 CONSTRUCTOR_APPEND_ELT (v
, TREE_PURPOSE (t
), TREE_VALUE (t
));
2295 return build_constructor (type
, v
);
2298 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2299 are in a vector pointed to by VALS. Note that the TREE_PURPOSE
2300 fields in the constructor remain null. */
2303 build_constructor_from_vec (tree type
, const vec
<tree
, va_gc
> *vals
)
2305 vec
<constructor_elt
, va_gc
> *v
= NULL
;
2308 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, t
);
2310 return build_constructor (type
, v
);
2313 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2314 of elements, provided as index/value pairs. */
2317 build_constructor_va (tree type
, int nelts
, ...)
2319 vec
<constructor_elt
, va_gc
> *v
= NULL
;
2322 va_start (p
, nelts
);
2323 vec_alloc (v
, nelts
);
2326 tree index
= va_arg (p
, tree
);
2327 tree value
= va_arg (p
, tree
);
2328 CONSTRUCTOR_APPEND_ELT (v
, index
, value
);
2331 return build_constructor (type
, v
);
2334 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2337 build_clobber (tree type
, enum clobber_kind kind
)
2339 tree clobber
= build_constructor (type
, NULL
);
2340 TREE_THIS_VOLATILE (clobber
) = true;
2341 CLOBBER_KIND (clobber
) = kind
;
2345 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2348 build_fixed (tree type
, FIXED_VALUE_TYPE f
)
2351 FIXED_VALUE_TYPE
*fp
;
2353 v
= make_node (FIXED_CST
);
2354 fp
= ggc_alloc
<fixed_value
> ();
2355 memcpy (fp
, &f
, sizeof (FIXED_VALUE_TYPE
));
2357 TREE_TYPE (v
) = type
;
2358 TREE_FIXED_CST_PTR (v
) = fp
;
2362 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2365 build_real (tree type
, REAL_VALUE_TYPE d
)
2370 /* dconst{0,1,2,m1,half} are used in various places in
2371 the middle-end and optimizers, allow them here
2372 even for decimal floating point types as an exception
2373 by converting them to decimal. */
2374 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type
))
2375 && (d
.cl
== rvc_normal
|| d
.cl
== rvc_zero
)
2378 if (memcmp (&d
, &dconst1
, sizeof (d
)) == 0)
2379 decimal_real_from_string (&d
, "1");
2380 else if (memcmp (&d
, &dconst2
, sizeof (d
)) == 0)
2381 decimal_real_from_string (&d
, "2");
2382 else if (memcmp (&d
, &dconstm1
, sizeof (d
)) == 0)
2383 decimal_real_from_string (&d
, "-1");
2384 else if (memcmp (&d
, &dconsthalf
, sizeof (d
)) == 0)
2385 decimal_real_from_string (&d
, "0.5");
2386 else if (memcmp (&d
, &dconst0
, sizeof (d
)) == 0)
2388 /* Make sure to give zero the minimum quantum exponent for
2389 the type (which corresponds to all bits zero). */
2390 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
2392 sprintf (buf
, "0e%d", fmt
->emin
- fmt
->p
);
2393 decimal_real_from_string (&d
, buf
);
2399 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2400 Consider doing it via real_convert now. */
2402 v
= make_node (REAL_CST
);
2403 TREE_TYPE (v
) = type
;
2404 memcpy (TREE_REAL_CST_PTR (v
), &d
, sizeof (REAL_VALUE_TYPE
));
2405 TREE_OVERFLOW (v
) = overflow
;
2409 /* Like build_real, but first truncate D to the type. */
2412 build_real_truncate (tree type
, REAL_VALUE_TYPE d
)
2414 return build_real (type
, real_value_truncate (TYPE_MODE (type
), d
));
2417 /* Return a new REAL_CST node whose type is TYPE
2418 and whose value is the integer value of the INTEGER_CST node I. */
2421 real_value_from_int_cst (const_tree type
, const_tree i
)
2425 /* Clear all bits of the real value type so that we can later do
2426 bitwise comparisons to see if two values are the same. */
2427 memset (&d
, 0, sizeof d
);
2429 real_from_integer (&d
, type
? TYPE_MODE (type
) : VOIDmode
, wi::to_wide (i
),
2430 TYPE_SIGN (TREE_TYPE (i
)));
2434 /* Given a tree representing an integer constant I, return a tree
2435 representing the same value as a floating-point constant of type TYPE. */
2438 build_real_from_int_cst (tree type
, const_tree i
)
2441 int overflow
= TREE_OVERFLOW (i
);
2443 v
= build_real (type
, real_value_from_int_cst (type
, i
));
2445 TREE_OVERFLOW (v
) |= overflow
;
2449 /* Return a new REAL_CST node whose type is TYPE
2450 and whose value is the integer value I which has sign SGN. */
2453 build_real_from_wide (tree type
, const wide_int_ref
&i
, signop sgn
)
2457 /* Clear all bits of the real value type so that we can later do
2458 bitwise comparisons to see if two values are the same. */
2459 memset (&d
, 0, sizeof d
);
2461 real_from_integer (&d
, TYPE_MODE (type
), i
, sgn
);
2462 return build_real (type
, d
);
2465 /* Return a newly constructed STRING_CST node whose value is the LEN
2466 characters at STR when STR is nonnull, or all zeros otherwise.
2467 Note that for a C string literal, LEN should include the trailing NUL.
2468 The TREE_TYPE is not initialized. */
2471 build_string (unsigned len
, const char *str
/*= NULL */)
2473 /* Do not waste bytes provided by padding of struct tree_string. */
2474 unsigned size
= len
+ offsetof (struct tree_string
, str
) + 1;
2476 record_node_allocation_statistics (STRING_CST
, size
);
2478 tree s
= (tree
) ggc_internal_alloc (size
);
2480 memset (s
, 0, sizeof (struct tree_typed
));
2481 TREE_SET_CODE (s
, STRING_CST
);
2482 TREE_CONSTANT (s
) = 1;
2483 TREE_STRING_LENGTH (s
) = len
;
2485 memcpy (s
->string
.str
, str
, len
);
2487 memset (s
->string
.str
, 0, len
);
2488 s
->string
.str
[len
] = '\0';
2493 /* Return a newly constructed COMPLEX_CST node whose value is
2494 specified by the real and imaginary parts REAL and IMAG.
2495 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2496 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2499 build_complex (tree type
, tree real
, tree imag
)
2501 gcc_assert (CONSTANT_CLASS_P (real
));
2502 gcc_assert (CONSTANT_CLASS_P (imag
));
2504 tree t
= make_node (COMPLEX_CST
);
2506 TREE_REALPART (t
) = real
;
2507 TREE_IMAGPART (t
) = imag
;
2508 TREE_TYPE (t
) = type
? type
: build_complex_type (TREE_TYPE (real
));
2509 TREE_OVERFLOW (t
) = TREE_OVERFLOW (real
) | TREE_OVERFLOW (imag
);
2513 /* Build a complex (inf +- 0i), such as for the result of cproj.
2514 TYPE is the complex tree type of the result. If NEG is true, the
2515 imaginary zero is negative. */
2518 build_complex_inf (tree type
, bool neg
)
2520 REAL_VALUE_TYPE rzero
= dconst0
;
2523 return build_complex (type
, build_real (TREE_TYPE (type
), dconstinf
),
2524 build_real (TREE_TYPE (type
), rzero
));
2527 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2528 element is set to 1. In particular, this is 1 + i for complex types. */
2531 build_each_one_cst (tree type
)
2533 if (TREE_CODE (type
) == COMPLEX_TYPE
)
2535 tree scalar
= build_one_cst (TREE_TYPE (type
));
2536 return build_complex (type
, scalar
, scalar
);
2539 return build_one_cst (type
);
2542 /* Return a constant of arithmetic type TYPE which is the
2543 multiplicative identity of the set TYPE. */
2546 build_one_cst (tree type
)
2548 switch (TREE_CODE (type
))
2550 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2551 case POINTER_TYPE
: case REFERENCE_TYPE
:
2552 case OFFSET_TYPE
: case BITINT_TYPE
:
2553 return build_int_cst (type
, 1);
2556 return build_real (type
, dconst1
);
2558 case FIXED_POINT_TYPE
:
2559 /* We can only generate 1 for accum types. */
2560 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type
)));
2561 return build_fixed (type
, FCONST1 (TYPE_MODE (type
)));
2565 tree scalar
= build_one_cst (TREE_TYPE (type
));
2567 return build_vector_from_val (type
, scalar
);
2571 return build_complex (type
,
2572 build_one_cst (TREE_TYPE (type
)),
2573 build_zero_cst (TREE_TYPE (type
)));
2580 /* Return an integer of type TYPE containing all 1's in as much precision as
2581 it contains, or a complex or vector whose subparts are such integers. */
2584 build_all_ones_cst (tree type
)
2586 if (TREE_CODE (type
) == COMPLEX_TYPE
)
2588 tree scalar
= build_all_ones_cst (TREE_TYPE (type
));
2589 return build_complex (type
, scalar
, scalar
);
2592 return build_minus_one_cst (type
);
2595 /* Return a constant of arithmetic type TYPE which is the
2596 opposite of the multiplicative identity of the set TYPE. */
2599 build_minus_one_cst (tree type
)
2601 switch (TREE_CODE (type
))
2603 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2604 case POINTER_TYPE
: case REFERENCE_TYPE
:
2605 case OFFSET_TYPE
: case BITINT_TYPE
:
2606 return build_int_cst (type
, -1);
2609 return build_real (type
, dconstm1
);
2611 case FIXED_POINT_TYPE
:
2612 /* We can only generate 1 for accum types. */
2613 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type
)));
2614 return build_fixed (type
,
2615 fixed_from_double_int (double_int_minus_one
,
2616 SCALAR_TYPE_MODE (type
)));
2620 tree scalar
= build_minus_one_cst (TREE_TYPE (type
));
2622 return build_vector_from_val (type
, scalar
);
2626 return build_complex (type
,
2627 build_minus_one_cst (TREE_TYPE (type
)),
2628 build_zero_cst (TREE_TYPE (type
)));
2635 /* Build 0 constant of type TYPE. This is used by constructor folding
2636 and thus the constant should be represented in memory by
2640 build_zero_cst (tree type
)
2642 switch (TREE_CODE (type
))
2644 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2645 case POINTER_TYPE
: case REFERENCE_TYPE
:
2646 case OFFSET_TYPE
: case NULLPTR_TYPE
: case BITINT_TYPE
:
2647 return build_int_cst (type
, 0);
2650 return build_real (type
, dconst0
);
2652 case FIXED_POINT_TYPE
:
2653 return build_fixed (type
, FCONST0 (TYPE_MODE (type
)));
2657 tree scalar
= build_zero_cst (TREE_TYPE (type
));
2659 return build_vector_from_val (type
, scalar
);
2664 tree zero
= build_zero_cst (TREE_TYPE (type
));
2666 return build_complex (type
, zero
, zero
);
2670 if (!AGGREGATE_TYPE_P (type
))
2671 return fold_convert (type
, integer_zero_node
);
2672 return build_constructor (type
, NULL
);
2676 /* Build a constant of integer type TYPE, made of VALUE's bits replicated
2677 every WIDTH bits to fit TYPE's precision. */
2680 build_replicated_int_cst (tree type
, unsigned int width
, HOST_WIDE_INT value
)
2682 int n
= ((TYPE_PRECISION (type
) + HOST_BITS_PER_WIDE_INT
- 1)
2683 / HOST_BITS_PER_WIDE_INT
);
2684 unsigned HOST_WIDE_INT low
, mask
;
2685 HOST_WIDE_INT a
[WIDE_INT_MAX_INL_ELTS
];
2688 gcc_assert (n
&& n
<= WIDE_INT_MAX_INL_ELTS
);
2690 if (width
== HOST_BITS_PER_WIDE_INT
)
2694 mask
= ((HOST_WIDE_INT
)1 << width
) - 1;
2695 low
= (unsigned HOST_WIDE_INT
) ~0 / mask
* (value
& mask
);
2698 for (i
= 0; i
< n
; i
++)
2701 gcc_assert (TYPE_PRECISION (type
) <= MAX_BITSIZE_MODE_ANY_INT
);
2702 return wide_int_to_tree (type
, wide_int::from_array (a
, n
,
2703 TYPE_PRECISION (type
)));
2706 /* If floating-point type TYPE has an IEEE-style sign bit, return an
2707 unsigned constant in which only the sign bit is set. Return null
2711 sign_mask_for (tree type
)
2713 /* Avoid having to choose between a real-only sign and a pair of signs.
2714 This could be relaxed if the choice becomes obvious later. */
2715 if (TREE_CODE (type
) == COMPLEX_TYPE
)
2718 auto eltmode
= as_a
<scalar_float_mode
> (element_mode (type
));
2719 auto bits
= REAL_MODE_FORMAT (eltmode
)->ieee_bits
;
2720 if (!bits
|| !pow2p_hwi (bits
))
2723 tree inttype
= unsigned_type_for (type
);
2727 auto mask
= wi::set_bit_in_zero (bits
- 1, bits
);
2728 if (VECTOR_TYPE_P (inttype
))
2730 tree elt
= wide_int_to_tree (TREE_TYPE (inttype
), mask
);
2731 return build_vector_from_val (inttype
, elt
);
2733 return wide_int_to_tree (inttype
, mask
);
2736 /* Build a BINFO with LEN language slots. */
2739 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL
)
2742 size_t length
= (offsetof (struct tree_binfo
, base_binfos
)
2743 + vec
<tree
, va_gc
>::embedded_size (base_binfos
));
2745 record_node_allocation_statistics (TREE_BINFO
, length
);
2747 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
2749 memset (t
, 0, offsetof (struct tree_binfo
, base_binfos
));
2751 TREE_SET_CODE (t
, TREE_BINFO
);
2753 BINFO_BASE_BINFOS (t
)->embedded_init (base_binfos
);
2758 /* Create a CASE_LABEL_EXPR tree node and return it. */
2761 build_case_label (tree low_value
, tree high_value
, tree label_decl
)
2763 tree t
= make_node (CASE_LABEL_EXPR
);
2765 TREE_TYPE (t
) = void_type_node
;
2766 SET_EXPR_LOCATION (t
, DECL_SOURCE_LOCATION (label_decl
));
2768 CASE_LOW (t
) = low_value
;
2769 CASE_HIGH (t
) = high_value
;
2770 CASE_LABEL (t
) = label_decl
;
2771 CASE_CHAIN (t
) = NULL_TREE
;
2776 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2777 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2778 The latter determines the length of the HOST_WIDE_INT vector. */
2781 make_int_cst (int len
, int ext_len MEM_STAT_DECL
)
2784 int length
= ((ext_len
- 1) * sizeof (HOST_WIDE_INT
)
2785 + sizeof (struct tree_int_cst
));
2788 record_node_allocation_statistics (INTEGER_CST
, length
);
2790 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
2792 TREE_SET_CODE (t
, INTEGER_CST
);
2793 TREE_INT_CST_NUNITS (t
) = len
;
2794 TREE_INT_CST_EXT_NUNITS (t
) = ext_len
;
2795 TREE_CONSTANT (t
) = 1;
2800 /* Build a newly constructed TREE_VEC node of length LEN. */
2803 make_tree_vec (int len MEM_STAT_DECL
)
2806 size_t length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2808 record_node_allocation_statistics (TREE_VEC
, length
);
2810 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
2812 TREE_SET_CODE (t
, TREE_VEC
);
2813 TREE_VEC_LENGTH (t
) = len
;
2818 /* Grow a TREE_VEC node to new length LEN. */
2821 grow_tree_vec (tree v
, int len MEM_STAT_DECL
)
2823 gcc_assert (TREE_CODE (v
) == TREE_VEC
);
2825 int oldlen
= TREE_VEC_LENGTH (v
);
2826 gcc_assert (len
> oldlen
);
2828 size_t oldlength
= (oldlen
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2829 size_t length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2831 record_node_allocation_statistics (TREE_VEC
, length
- oldlength
);
2833 v
= (tree
) ggc_realloc (v
, length PASS_MEM_STAT
);
2835 TREE_VEC_LENGTH (v
) = len
;
2840 /* Return true if EXPR is the constant zero, whether it is integral, float or
2841 fixed, and scalar, complex or vector. */
2844 zerop (const_tree expr
)
2846 return (integer_zerop (expr
)
2847 || real_zerop (expr
)
2848 || fixed_zerop (expr
));
2851 /* Return true if EXPR is the integer constant zero or a complex constant
2852 of zero, or a location wrapper for such a constant. */
2855 integer_zerop (const_tree expr
)
2857 STRIP_ANY_LOCATION_WRAPPER (expr
);
2859 switch (TREE_CODE (expr
))
2862 return wi::to_wide (expr
) == 0;
2864 return (integer_zerop (TREE_REALPART (expr
))
2865 && integer_zerop (TREE_IMAGPART (expr
)));
2867 return (VECTOR_CST_NPATTERNS (expr
) == 1
2868 && VECTOR_CST_DUPLICATE_P (expr
)
2869 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr
, 0)));
2875 /* Return true if EXPR is the integer constant one or the corresponding
2876 complex constant, or a location wrapper for such a constant. */
2879 integer_onep (const_tree expr
)
2881 STRIP_ANY_LOCATION_WRAPPER (expr
);
2883 switch (TREE_CODE (expr
))
2886 return wi::eq_p (wi::to_widest (expr
), 1);
2888 return (integer_onep (TREE_REALPART (expr
))
2889 && integer_zerop (TREE_IMAGPART (expr
)));
2891 return (VECTOR_CST_NPATTERNS (expr
) == 1
2892 && VECTOR_CST_DUPLICATE_P (expr
)
2893 && integer_onep (VECTOR_CST_ENCODED_ELT (expr
, 0)));
2899 /* Return true if EXPR is the integer constant one. For complex and vector,
2900 return true if every piece is the integer constant one.
2901 Also return true for location wrappers for such a constant. */
2904 integer_each_onep (const_tree expr
)
2906 STRIP_ANY_LOCATION_WRAPPER (expr
);
2908 if (TREE_CODE (expr
) == COMPLEX_CST
)
2909 return (integer_onep (TREE_REALPART (expr
))
2910 && integer_onep (TREE_IMAGPART (expr
)));
2912 return integer_onep (expr
);
2915 /* Return true if EXPR is an integer containing all 1's in as much precision
2916 as it contains, or a complex or vector whose subparts are such integers,
2917 or a location wrapper for such a constant. */
2920 integer_all_onesp (const_tree expr
)
2922 STRIP_ANY_LOCATION_WRAPPER (expr
);
2924 if (TREE_CODE (expr
) == COMPLEX_CST
2925 && integer_all_onesp (TREE_REALPART (expr
))
2926 && integer_all_onesp (TREE_IMAGPART (expr
)))
2929 else if (TREE_CODE (expr
) == VECTOR_CST
)
2930 return (VECTOR_CST_NPATTERNS (expr
) == 1
2931 && VECTOR_CST_DUPLICATE_P (expr
)
2932 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr
, 0)));
2934 else if (TREE_CODE (expr
) != INTEGER_CST
)
2937 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr
)), UNSIGNED
)
2938 == wi::to_wide (expr
));
2941 /* Return true if EXPR is the integer constant minus one, or a location
2942 wrapper for such a constant. */
2945 integer_minus_onep (const_tree expr
)
2947 STRIP_ANY_LOCATION_WRAPPER (expr
);
2949 if (TREE_CODE (expr
) == COMPLEX_CST
)
2950 return (integer_all_onesp (TREE_REALPART (expr
))
2951 && integer_zerop (TREE_IMAGPART (expr
)));
2953 return integer_all_onesp (expr
);
2956 /* Return true if EXPR is an integer constant that is a power of 2 (i.e., has
2957 only one bit on), or a location wrapper for such a constant. */
2960 integer_pow2p (const_tree expr
)
2962 STRIP_ANY_LOCATION_WRAPPER (expr
);
2964 if (TREE_CODE (expr
) == COMPLEX_CST
2965 && integer_pow2p (TREE_REALPART (expr
))
2966 && integer_zerop (TREE_IMAGPART (expr
)))
2969 if (TREE_CODE (expr
) != INTEGER_CST
)
2972 return wi::popcount (wi::to_wide (expr
)) == 1;
2975 /* Return true if EXPR is an integer constant other than zero or a
2976 complex constant other than zero, or a location wrapper for such a
2980 integer_nonzerop (const_tree expr
)
2982 STRIP_ANY_LOCATION_WRAPPER (expr
);
2984 return ((TREE_CODE (expr
) == INTEGER_CST
2985 && wi::to_wide (expr
) != 0)
2986 || (TREE_CODE (expr
) == COMPLEX_CST
2987 && (integer_nonzerop (TREE_REALPART (expr
))
2988 || integer_nonzerop (TREE_IMAGPART (expr
)))));
2991 /* Return true if EXPR is the integer constant one. For vector,
2992 return true if every piece is the integer constant minus one
2993 (representing the value TRUE).
2994 Also return true for location wrappers for such a constant. */
2997 integer_truep (const_tree expr
)
2999 STRIP_ANY_LOCATION_WRAPPER (expr
);
3001 if (TREE_CODE (expr
) == VECTOR_CST
)
3002 return integer_all_onesp (expr
);
3003 return integer_onep (expr
);
3006 /* Return true if EXPR is the fixed-point constant zero, or a location wrapper
3007 for such a constant. */
3010 fixed_zerop (const_tree expr
)
3012 STRIP_ANY_LOCATION_WRAPPER (expr
);
3014 return (TREE_CODE (expr
) == FIXED_CST
3015 && TREE_FIXED_CST (expr
).data
.is_zero ());
3018 /* Return the power of two represented by a tree node known to be a
3022 tree_log2 (const_tree expr
)
3024 if (TREE_CODE (expr
) == COMPLEX_CST
)
3025 return tree_log2 (TREE_REALPART (expr
));
3027 return wi::exact_log2 (wi::to_wide (expr
));
3030 /* Similar, but return the largest integer Y such that 2 ** Y is less
3031 than or equal to EXPR. */
3034 tree_floor_log2 (const_tree expr
)
3036 if (TREE_CODE (expr
) == COMPLEX_CST
)
3037 return tree_log2 (TREE_REALPART (expr
));
3039 return wi::floor_log2 (wi::to_wide (expr
));
3042 /* Return number of known trailing zero bits in EXPR, or, if the value of
3043 EXPR is known to be zero, the precision of it's type. */
3046 tree_ctz (const_tree expr
)
3048 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr
))
3049 && !POINTER_TYPE_P (TREE_TYPE (expr
)))
3052 unsigned int ret1
, ret2
, prec
= TYPE_PRECISION (TREE_TYPE (expr
));
3053 switch (TREE_CODE (expr
))
3056 ret1
= wi::ctz (wi::to_wide (expr
));
3057 return MIN (ret1
, prec
);
3059 ret1
= wi::ctz (get_nonzero_bits (expr
));
3060 return MIN (ret1
, prec
);
3067 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3070 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
3071 return MIN (ret1
, ret2
);
3072 case POINTER_PLUS_EXPR
:
3073 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3074 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
3075 /* Second operand is sizetype, which could be in theory
3076 wider than pointer's precision. Make sure we never
3077 return more than prec. */
3078 ret2
= MIN (ret2
, prec
);
3079 return MIN (ret1
, ret2
);
3081 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3082 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
3083 return MAX (ret1
, ret2
);
3085 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3086 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
3087 return MIN (ret1
+ ret2
, prec
);
3089 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3090 if (tree_fits_uhwi_p (TREE_OPERAND (expr
, 1))
3091 && (tree_to_uhwi (TREE_OPERAND (expr
, 1)) < prec
))
3093 ret2
= tree_to_uhwi (TREE_OPERAND (expr
, 1));
3094 return MIN (ret1
+ ret2
, prec
);
3098 if (tree_fits_uhwi_p (TREE_OPERAND (expr
, 1))
3099 && (tree_to_uhwi (TREE_OPERAND (expr
, 1)) < prec
))
3101 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3102 ret2
= tree_to_uhwi (TREE_OPERAND (expr
, 1));
3107 case TRUNC_DIV_EXPR
:
3109 case FLOOR_DIV_EXPR
:
3110 case ROUND_DIV_EXPR
:
3111 case EXACT_DIV_EXPR
:
3112 if (TREE_CODE (TREE_OPERAND (expr
, 1)) == INTEGER_CST
3113 && tree_int_cst_sgn (TREE_OPERAND (expr
, 1)) == 1)
3115 int l
= tree_log2 (TREE_OPERAND (expr
, 1));
3118 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3126 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3127 if (ret1
&& ret1
== TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr
, 0))))
3129 return MIN (ret1
, prec
);
3131 return tree_ctz (TREE_OPERAND (expr
, 0));
3133 ret1
= tree_ctz (TREE_OPERAND (expr
, 1));
3136 ret2
= tree_ctz (TREE_OPERAND (expr
, 2));
3137 return MIN (ret1
, ret2
);
3139 return tree_ctz (TREE_OPERAND (expr
, 1));
3141 ret1
= get_pointer_alignment (CONST_CAST_TREE (expr
));
3142 if (ret1
> BITS_PER_UNIT
)
3144 ret1
= ctz_hwi (ret1
/ BITS_PER_UNIT
);
3145 return MIN (ret1
, prec
);
3153 /* Return true if EXPR is the real constant zero. Trailing zeroes matter for
3154 decimal float constants, so don't return true for them.
3155 Also return true for location wrappers around such a constant. */
3158 real_zerop (const_tree expr
)
3160 STRIP_ANY_LOCATION_WRAPPER (expr
);
3162 switch (TREE_CODE (expr
))
3165 return real_equal (&TREE_REAL_CST (expr
), &dconst0
)
3166 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
3168 return real_zerop (TREE_REALPART (expr
))
3169 && real_zerop (TREE_IMAGPART (expr
));
3172 /* Don't simply check for a duplicate because the predicate
3173 accepts both +0.0 and -0.0. */
3174 unsigned count
= vector_cst_encoded_nelts (expr
);
3175 for (unsigned int i
= 0; i
< count
; ++i
)
3176 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr
, i
)))
3185 /* Return true if EXPR is the real constant one in real or complex form.
3186 Trailing zeroes matter for decimal float constants, so don't return
3188 Also return true for location wrappers around such a constant. */
3191 real_onep (const_tree expr
)
3193 STRIP_ANY_LOCATION_WRAPPER (expr
);
3195 switch (TREE_CODE (expr
))
3198 return real_equal (&TREE_REAL_CST (expr
), &dconst1
)
3199 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
3201 return real_onep (TREE_REALPART (expr
))
3202 && real_zerop (TREE_IMAGPART (expr
));
3204 return (VECTOR_CST_NPATTERNS (expr
) == 1
3205 && VECTOR_CST_DUPLICATE_P (expr
)
3206 && real_onep (VECTOR_CST_ENCODED_ELT (expr
, 0)));
3212 /* Return true if EXPR is the real constant minus one. Trailing zeroes
3213 matter for decimal float constants, so don't return true for them.
3214 Also return true for location wrappers around such a constant. */
3217 real_minus_onep (const_tree expr
)
3219 STRIP_ANY_LOCATION_WRAPPER (expr
);
3221 switch (TREE_CODE (expr
))
3224 return real_equal (&TREE_REAL_CST (expr
), &dconstm1
)
3225 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
3227 return real_minus_onep (TREE_REALPART (expr
))
3228 && real_zerop (TREE_IMAGPART (expr
));
3230 return (VECTOR_CST_NPATTERNS (expr
) == 1
3231 && VECTOR_CST_DUPLICATE_P (expr
)
3232 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr
, 0)));
3238 /* Return true if T could be a floating point zero. */
3241 real_maybe_zerop (const_tree expr
)
3243 switch (TREE_CODE (expr
))
3246 /* Can't use real_zerop here, as it always returns false for decimal
3247 floats. And can't use TREE_REAL_CST (expr).cl == rvc_zero
3248 either, as decimal zeros are rvc_normal. */
3249 return real_equal (&TREE_REAL_CST (expr
), &dconst0
);
3251 return (real_maybe_zerop (TREE_REALPART (expr
))
3252 || real_maybe_zerop (TREE_IMAGPART (expr
)));
3255 unsigned count
= vector_cst_encoded_nelts (expr
);
3256 for (unsigned int i
= 0; i
< count
; ++i
)
3257 if (real_maybe_zerop (VECTOR_CST_ENCODED_ELT (expr
, i
)))
3262 /* Perhaps for SSA_NAMEs we could query frange. */
3267 /* True if EXP is a constant or a cast of a constant. */
3270 really_constant_p (const_tree exp
)
3272 /* This is not quite the same as STRIP_NOPS. It does more. */
3273 while (CONVERT_EXPR_P (exp
)
3274 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3275 exp
= TREE_OPERAND (exp
, 0);
3276 return TREE_CONSTANT (exp
);
3279 /* Return true if T holds a polynomial pointer difference, storing it in
3280 *VALUE if so. A true return means that T's precision is no greater
3281 than 64 bits, which is the largest address space we support, so *VALUE
3282 never loses precision. However, the signedness of the result does
3283 not necessarily match the signedness of T: sometimes an unsigned type
3284 like sizetype is used to encode a value that is actually negative. */
3287 ptrdiff_tree_p (const_tree t
, poly_int64
*value
)
3291 if (TREE_CODE (t
) == INTEGER_CST
)
3293 if (!cst_and_fits_in_hwi (t
))
3295 *value
= int_cst_value (t
);
3298 if (POLY_INT_CST_P (t
))
3300 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
3301 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t
, i
)))
3303 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
3304 value
->coeffs
[i
] = int_cst_value (POLY_INT_CST_COEFF (t
, i
));
3311 tree_to_poly_int64 (const_tree t
)
3313 gcc_assert (tree_fits_poly_int64_p (t
));
3314 if (POLY_INT_CST_P (t
))
3315 return poly_int_cst_value (t
).force_shwi ();
3316 return TREE_INT_CST_LOW (t
);
3320 tree_to_poly_uint64 (const_tree t
)
3322 gcc_assert (tree_fits_poly_uint64_p (t
));
3323 if (POLY_INT_CST_P (t
))
3324 return poly_int_cst_value (t
).force_uhwi ();
3325 return TREE_INT_CST_LOW (t
);
3328 /* Return first list element whose TREE_VALUE is ELEM.
3329 Return 0 if ELEM is not in LIST. */
3332 value_member (tree elem
, tree list
)
3336 if (elem
== TREE_VALUE (list
))
3338 list
= TREE_CHAIN (list
);
3343 /* Return first list element whose TREE_PURPOSE is ELEM.
3344 Return 0 if ELEM is not in LIST. */
3347 purpose_member (const_tree elem
, tree list
)
3351 if (elem
== TREE_PURPOSE (list
))
3353 list
= TREE_CHAIN (list
);
3358 /* Return true if ELEM is in V. */
3361 vec_member (const_tree elem
, vec
<tree
, va_gc
> *v
)
3365 FOR_EACH_VEC_SAFE_ELT (v
, ix
, t
)
3371 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3375 chain_index (int idx
, tree chain
)
3377 for (; chain
&& idx
> 0; --idx
)
3378 chain
= TREE_CHAIN (chain
);
3382 /* Return true if ELEM is part of the chain CHAIN. */
3385 chain_member (const_tree elem
, const_tree chain
)
3391 chain
= DECL_CHAIN (chain
);
3397 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3398 We expect a null pointer to mark the end of the chain.
3399 This is the Lisp primitive `length'. */
3402 list_length (const_tree t
)
3405 #ifdef ENABLE_TREE_CHECKING
3413 #ifdef ENABLE_TREE_CHECKING
3416 gcc_assert (p
!= q
);
3424 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3425 UNION_TYPE TYPE, or NULL_TREE if none. */
3428 first_field (const_tree type
)
3430 tree t
= TYPE_FIELDS (type
);
3431 while (t
&& TREE_CODE (t
) != FIELD_DECL
)
3436 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3437 UNION_TYPE TYPE, or NULL_TREE if none. */
3440 last_field (const_tree type
)
3442 tree last
= NULL_TREE
;
3444 for (tree fld
= TYPE_FIELDS (type
); fld
; fld
= TREE_CHAIN (fld
))
3446 if (TREE_CODE (fld
) != FIELD_DECL
)
3455 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3456 by modifying the last node in chain 1 to point to chain 2.
3457 This is the Lisp primitive `nconc'. */
3460 chainon (tree op1
, tree op2
)
3469 for (t1
= op1
; TREE_CHAIN (t1
); t1
= TREE_CHAIN (t1
))
3471 TREE_CHAIN (t1
) = op2
;
3473 #ifdef ENABLE_TREE_CHECKING
3476 for (t2
= op2
; t2
; t2
= TREE_CHAIN (t2
))
3477 gcc_assert (t2
!= t1
);
3484 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3487 tree_last (tree chain
)
3491 while ((next
= TREE_CHAIN (chain
)))
3496 /* Reverse the order of elements in the chain T,
3497 and return the new head of the chain (old last element). */
3502 tree prev
= 0, decl
, next
;
3503 for (decl
= t
; decl
; decl
= next
)
3505 /* We shouldn't be using this function to reverse BLOCK chains; we
3506 have blocks_nreverse for that. */
3507 gcc_checking_assert (TREE_CODE (decl
) != BLOCK
);
3508 next
= TREE_CHAIN (decl
);
3509 TREE_CHAIN (decl
) = prev
;
3515 /* Return a newly created TREE_LIST node whose
3516 purpose and value fields are PARM and VALUE. */
3519 build_tree_list (tree parm
, tree value MEM_STAT_DECL
)
3521 tree t
= make_node (TREE_LIST PASS_MEM_STAT
);
3522 TREE_PURPOSE (t
) = parm
;
3523 TREE_VALUE (t
) = value
;
3527 /* Build a chain of TREE_LIST nodes from a vector. */
3530 build_tree_list_vec (const vec
<tree
, va_gc
> *vec MEM_STAT_DECL
)
3532 tree ret
= NULL_TREE
;
3536 FOR_EACH_VEC_SAFE_ELT (vec
, i
, t
)
3538 *pp
= build_tree_list (NULL
, t PASS_MEM_STAT
);
3539 pp
= &TREE_CHAIN (*pp
);
3544 /* Return a newly created TREE_LIST node whose
3545 purpose and value fields are PURPOSE and VALUE
3546 and whose TREE_CHAIN is CHAIN. */
3549 tree_cons (tree purpose
, tree value
, tree chain MEM_STAT_DECL
)
3553 node
= ggc_alloc_tree_node_stat (sizeof (struct tree_list
) PASS_MEM_STAT
);
3554 memset (node
, 0, sizeof (struct tree_common
));
3556 record_node_allocation_statistics (TREE_LIST
, sizeof (struct tree_list
));
3558 TREE_SET_CODE (node
, TREE_LIST
);
3559 TREE_CHAIN (node
) = chain
;
3560 TREE_PURPOSE (node
) = purpose
;
3561 TREE_VALUE (node
) = value
;
3565 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3569 ctor_to_vec (tree ctor
)
3571 vec
<tree
, va_gc
> *vec
;
3572 vec_alloc (vec
, CONSTRUCTOR_NELTS (ctor
));
3576 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor
), ix
, val
)
3577 vec
->quick_push (val
);
3582 /* Return the size nominally occupied by an object of type TYPE
3583 when it resides in memory. The value is measured in units of bytes,
3584 and its data type is that normally used for type sizes
3585 (which is the first type created by make_signed_type or
3586 make_unsigned_type). */
3589 size_in_bytes_loc (location_t loc
, const_tree type
)
3593 if (type
== error_mark_node
)
3594 return integer_zero_node
;
3596 type
= TYPE_MAIN_VARIANT (type
);
3597 t
= TYPE_SIZE_UNIT (type
);
3601 lang_hooks
.types
.incomplete_type_error (loc
, NULL_TREE
, type
);
3602 return size_zero_node
;
3608 /* Return the size of TYPE (in bytes) as a wide integer
3609 or return -1 if the size can vary or is larger than an integer. */
3612 int_size_in_bytes (const_tree type
)
3616 if (type
== error_mark_node
)
3619 type
= TYPE_MAIN_VARIANT (type
);
3620 t
= TYPE_SIZE_UNIT (type
);
3622 if (t
&& tree_fits_uhwi_p (t
))
3623 return TREE_INT_CST_LOW (t
);
3628 /* Return the maximum size of TYPE (in bytes) as a wide integer
3629 or return -1 if the size can vary or is larger than an integer. */
3632 max_int_size_in_bytes (const_tree type
)
3634 HOST_WIDE_INT size
= -1;
3637 /* If this is an array type, check for a possible MAX_SIZE attached. */
3639 if (TREE_CODE (type
) == ARRAY_TYPE
)
3641 size_tree
= TYPE_ARRAY_MAX_SIZE (type
);
3643 if (size_tree
&& tree_fits_uhwi_p (size_tree
))
3644 size
= tree_to_uhwi (size_tree
);
3647 /* If we still haven't been able to get a size, see if the language
3648 can compute a maximum size. */
3652 size_tree
= lang_hooks
.types
.max_size (type
);
3654 if (size_tree
&& tree_fits_uhwi_p (size_tree
))
3655 size
= tree_to_uhwi (size_tree
);
3661 /* Return the bit position of FIELD, in bits from the start of the record.
3662 This is a tree of type bitsizetype. */
3665 bit_position (const_tree field
)
3667 return bit_from_pos (DECL_FIELD_OFFSET (field
),
3668 DECL_FIELD_BIT_OFFSET (field
));
3671 /* Return the byte position of FIELD, in bytes from the start of the record.
3672 This is a tree of type sizetype. */
3675 byte_position (const_tree field
)
3677 return byte_from_pos (DECL_FIELD_OFFSET (field
),
3678 DECL_FIELD_BIT_OFFSET (field
));
3681 /* Likewise, but return as an integer. It must be representable in
3682 that way (since it could be a signed value, we don't have the
3683 option of returning -1 like int_size_in_byte can. */
3686 int_byte_position (const_tree field
)
3688 return tree_to_shwi (byte_position (field
));
3691 /* Return, as a tree node, the number of elements for TYPE (which is an
3692 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3695 array_type_nelts (const_tree type
)
3697 tree index_type
, min
, max
;
3699 /* If they did it with unspecified bounds, then we should have already
3700 given an error about it before we got here. */
3701 if (! TYPE_DOMAIN (type
))
3702 return error_mark_node
;
3704 index_type
= TYPE_DOMAIN (type
);
3705 min
= TYPE_MIN_VALUE (index_type
);
3706 max
= TYPE_MAX_VALUE (index_type
);
3708 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3711 /* zero sized arrays are represented from C FE as complete types with
3712 NULL TYPE_MAX_VALUE and zero TYPE_SIZE, while C++ FE represents
3713 them as min 0, max -1. */
3714 if (COMPLETE_TYPE_P (type
)
3715 && integer_zerop (TYPE_SIZE (type
))
3716 && integer_zerop (min
))
3717 return build_int_cst (TREE_TYPE (min
), -1);
3719 return error_mark_node
;
3722 return (integer_zerop (min
)
3724 : fold_build2 (MINUS_EXPR
, TREE_TYPE (max
), max
, min
));
3727 /* If arg is static -- a reference to an object in static storage -- then
3728 return the object. This is not the same as the C meaning of `static'.
3729 If arg isn't static, return NULL. */
3734 switch (TREE_CODE (arg
))
3737 /* Nested functions are static, even though taking their address will
3738 involve a trampoline as we unnest the nested function and create
3739 the trampoline on the tree level. */
3743 return ((TREE_STATIC (arg
) || DECL_EXTERNAL (arg
))
3744 && ! DECL_THREAD_LOCAL_P (arg
)
3745 && ! DECL_DLLIMPORT_P (arg
)
3749 return ((TREE_STATIC (arg
) || DECL_EXTERNAL (arg
))
3753 return TREE_STATIC (arg
) ? arg
: NULL
;
3760 /* If the thing being referenced is not a field, then it is
3761 something language specific. */
3762 gcc_assert (TREE_CODE (TREE_OPERAND (arg
, 1)) == FIELD_DECL
);
3764 /* If we are referencing a bitfield, we can't evaluate an
3765 ADDR_EXPR at compile time and so it isn't a constant. */
3766 if (DECL_BIT_FIELD (TREE_OPERAND (arg
, 1)))
3769 return staticp (TREE_OPERAND (arg
, 0));
3775 return TREE_CONSTANT (TREE_OPERAND (arg
, 0)) ? arg
: NULL
;
3778 case ARRAY_RANGE_REF
:
3779 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg
))) == INTEGER_CST
3780 && TREE_CODE (TREE_OPERAND (arg
, 1)) == INTEGER_CST
)
3781 return staticp (TREE_OPERAND (arg
, 0));
3785 case COMPOUND_LITERAL_EXPR
:
3786 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg
)) ? arg
: NULL
;
3796 /* Return whether OP is a DECL whose address is function-invariant. */
3799 decl_address_invariant_p (const_tree op
)
3801 /* The conditions below are slightly less strict than the one in
3804 switch (TREE_CODE (op
))
3813 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3814 || DECL_THREAD_LOCAL_P (op
)
3815 || DECL_CONTEXT (op
) == current_function_decl
3816 || decl_function_context (op
) == current_function_decl
)
3821 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3822 || decl_function_context (op
) == current_function_decl
)
3833 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3836 decl_address_ip_invariant_p (const_tree op
)
3838 /* The conditions below are slightly less strict than the one in
3841 switch (TREE_CODE (op
))
3849 if (((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3850 && !DECL_DLLIMPORT_P (op
))
3851 || DECL_THREAD_LOCAL_P (op
))
3856 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
)))
3868 /* Return true if T is function-invariant (internal function, does
3869 not handle arithmetic; that's handled in skip_simple_arithmetic and
3870 tree_invariant_p). */
3873 tree_invariant_p_1 (tree t
)
3877 if (TREE_CONSTANT (t
)
3878 || (TREE_READONLY (t
) && !TREE_SIDE_EFFECTS (t
)))
3881 switch (TREE_CODE (t
))
3887 op
= TREE_OPERAND (t
, 0);
3888 while (handled_component_p (op
))
3890 switch (TREE_CODE (op
))
3893 case ARRAY_RANGE_REF
:
3894 if (!tree_invariant_p (TREE_OPERAND (op
, 1))
3895 || TREE_OPERAND (op
, 2) != NULL_TREE
3896 || TREE_OPERAND (op
, 3) != NULL_TREE
)
3901 if (TREE_OPERAND (op
, 2) != NULL_TREE
)
3907 op
= TREE_OPERAND (op
, 0);
3910 return CONSTANT_CLASS_P (op
) || decl_address_invariant_p (op
);
3919 /* Return true if T is function-invariant. */
3922 tree_invariant_p (tree t
)
3924 tree inner
= skip_simple_arithmetic (t
);
3925 return tree_invariant_p_1 (inner
);
3928 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3929 Do this to any expression which may be used in more than one place,
3930 but must be evaluated only once.
3932 Normally, expand_expr would reevaluate the expression each time.
3933 Calling save_expr produces something that is evaluated and recorded
3934 the first time expand_expr is called on it. Subsequent calls to
3935 expand_expr just reuse the recorded value.
3937 The call to expand_expr that generates code that actually computes
3938 the value is the first call *at compile time*. Subsequent calls
3939 *at compile time* generate code to use the saved value.
3940 This produces correct result provided that *at run time* control
3941 always flows through the insns made by the first expand_expr
3942 before reaching the other places where the save_expr was evaluated.
3943 You, the caller of save_expr, must make sure this is so.
3945 Constants, and certain read-only nodes, are returned with no
3946 SAVE_EXPR because that is safe. Expressions containing placeholders
3947 are not touched; see tree.def for an explanation of what these
3951 save_expr (tree expr
)
3955 /* If the tree evaluates to a constant, then we don't want to hide that
3956 fact (i.e. this allows further folding, and direct checks for constants).
3957 However, a read-only object that has side effects cannot be bypassed.
3958 Since it is no problem to reevaluate literals, we just return the
3960 inner
= skip_simple_arithmetic (expr
);
3961 if (TREE_CODE (inner
) == ERROR_MARK
)
3964 if (tree_invariant_p_1 (inner
))
3967 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3968 it means that the size or offset of some field of an object depends on
3969 the value within another field.
3971 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3972 and some variable since it would then need to be both evaluated once and
3973 evaluated more than once. Front-ends must assure this case cannot
3974 happen by surrounding any such subexpressions in their own SAVE_EXPR
3975 and forcing evaluation at the proper time. */
3976 if (contains_placeholder_p (inner
))
3979 expr
= build1_loc (EXPR_LOCATION (expr
), SAVE_EXPR
, TREE_TYPE (expr
), expr
);
3981 /* This expression might be placed ahead of a jump to ensure that the
3982 value was computed on both sides of the jump. So make sure it isn't
3983 eliminated as dead. */
3984 TREE_SIDE_EFFECTS (expr
) = 1;
3988 /* Look inside EXPR into any simple arithmetic operations. Return the
3989 outermost non-arithmetic or non-invariant node. */
3992 skip_simple_arithmetic (tree expr
)
3994 /* We don't care about whether this can be used as an lvalue in this
3996 while (TREE_CODE (expr
) == NON_LVALUE_EXPR
)
3997 expr
= TREE_OPERAND (expr
, 0);
3999 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
4000 a constant, it will be more efficient to not make another SAVE_EXPR since
4001 it will allow better simplification and GCSE will be able to merge the
4002 computations if they actually occur. */
4005 if (UNARY_CLASS_P (expr
))
4006 expr
= TREE_OPERAND (expr
, 0);
4007 else if (BINARY_CLASS_P (expr
))
4009 if (tree_invariant_p (TREE_OPERAND (expr
, 1)))
4010 expr
= TREE_OPERAND (expr
, 0);
4011 else if (tree_invariant_p (TREE_OPERAND (expr
, 0)))
4012 expr
= TREE_OPERAND (expr
, 1);
4023 /* Look inside EXPR into simple arithmetic operations involving constants.
4024 Return the outermost non-arithmetic or non-constant node. */
4027 skip_simple_constant_arithmetic (tree expr
)
4029 while (TREE_CODE (expr
) == NON_LVALUE_EXPR
)
4030 expr
= TREE_OPERAND (expr
, 0);
4034 if (UNARY_CLASS_P (expr
))
4035 expr
= TREE_OPERAND (expr
, 0);
4036 else if (BINARY_CLASS_P (expr
))
4038 if (TREE_CONSTANT (TREE_OPERAND (expr
, 1)))
4039 expr
= TREE_OPERAND (expr
, 0);
4040 else if (TREE_CONSTANT (TREE_OPERAND (expr
, 0)))
4041 expr
= TREE_OPERAND (expr
, 1);
4052 /* Return which tree structure is used by T. */
4054 enum tree_node_structure_enum
4055 tree_node_structure (const_tree t
)
4057 const enum tree_code code
= TREE_CODE (t
);
4058 return tree_node_structure_for_code (code
);
4061 /* Set various status flags when building a CALL_EXPR object T. */
4064 process_call_operands (tree t
)
4066 bool side_effects
= TREE_SIDE_EFFECTS (t
);
4067 bool read_only
= false;
4068 int i
= call_expr_flags (t
);
4070 /* Calls have side-effects, except those to const or pure functions. */
4071 if ((i
& ECF_LOOPING_CONST_OR_PURE
) || !(i
& (ECF_CONST
| ECF_PURE
)))
4072 side_effects
= true;
4073 /* Propagate TREE_READONLY of arguments for const functions. */
4077 if (!side_effects
|| read_only
)
4078 for (i
= 1; i
< TREE_OPERAND_LENGTH (t
); i
++)
4080 tree op
= TREE_OPERAND (t
, i
);
4081 if (op
&& TREE_SIDE_EFFECTS (op
))
4082 side_effects
= true;
4083 if (op
&& !TREE_READONLY (op
) && !CONSTANT_CLASS_P (op
))
4087 TREE_SIDE_EFFECTS (t
) = side_effects
;
4088 TREE_READONLY (t
) = read_only
;
4091 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
4092 size or offset that depends on a field within a record. */
4095 contains_placeholder_p (const_tree exp
)
4097 enum tree_code code
;
4102 code
= TREE_CODE (exp
);
4103 if (code
== PLACEHOLDER_EXPR
)
4106 switch (TREE_CODE_CLASS (code
))
4109 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
4110 position computations since they will be converted into a
4111 WITH_RECORD_EXPR involving the reference, which will assume
4112 here will be valid. */
4113 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0));
4115 case tcc_exceptional
:
4116 if (code
== TREE_LIST
)
4117 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp
))
4118 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp
)));
4123 case tcc_comparison
:
4124 case tcc_expression
:
4128 /* Ignoring the first operand isn't quite right, but works best. */
4129 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1));
4132 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0))
4133 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1))
4134 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 2)));
4137 /* The save_expr function never wraps anything containing
4138 a PLACEHOLDER_EXPR. */
4145 switch (TREE_CODE_LENGTH (code
))
4148 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0));
4150 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0))
4151 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1)));
4162 const_call_expr_arg_iterator iter
;
4163 FOR_EACH_CONST_CALL_EXPR_ARG (arg
, iter
, exp
)
4164 if (CONTAINS_PLACEHOLDER_P (arg
))
4178 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
4179 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
4183 type_contains_placeholder_1 (const_tree type
)
4185 /* If the size contains a placeholder or the parent type (component type in
4186 the case of arrays) type involves a placeholder, this type does. */
4187 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type
))
4188 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type
))
4189 || (!POINTER_TYPE_P (type
)
4191 && type_contains_placeholder_p (TREE_TYPE (type
))))
4194 /* Now do type-specific checks. Note that the last part of the check above
4195 greatly limits what we have to do below. */
4196 switch (TREE_CODE (type
))
4205 case REFERENCE_TYPE
:
4215 case FIXED_POINT_TYPE
:
4216 /* Here we just check the bounds. */
4217 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type
))
4218 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type
)));
4221 /* We have already checked the component type above, so just check
4222 the domain type. Flexible array members have a null domain. */
4223 return TYPE_DOMAIN (type
) ?
4224 type_contains_placeholder_p (TYPE_DOMAIN (type
)) : false;
4228 case QUAL_UNION_TYPE
:
4232 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
4233 if (TREE_CODE (field
) == FIELD_DECL
4234 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field
))
4235 || (TREE_CODE (type
) == QUAL_UNION_TYPE
4236 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field
)))
4237 || type_contains_placeholder_p (TREE_TYPE (field
))))
4248 /* Wrapper around above function used to cache its result. */
4251 type_contains_placeholder_p (tree type
)
4255 /* If the contains_placeholder_bits field has been initialized,
4256 then we know the answer. */
4257 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) > 0)
4258 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) - 1;
4260 /* Indicate that we've seen this type node, and the answer is false.
4261 This is what we want to return if we run into recursion via fields. */
4262 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) = 1;
4264 /* Compute the real value. */
4265 result
= type_contains_placeholder_1 (type
);
4267 /* Store the real value. */
4268 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) = result
+ 1;
4273 /* Push tree EXP onto vector QUEUE if it is not already present. */
4276 push_without_duplicates (tree exp
, vec
<tree
> *queue
)
4281 FOR_EACH_VEC_ELT (*queue
, i
, iter
)
4282 if (simple_cst_equal (iter
, exp
) == 1)
4286 queue
->safe_push (exp
);
4289 /* Given a tree EXP, find all occurrences of references to fields
4290 in a PLACEHOLDER_EXPR and place them in vector REFS without
4291 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
4292 we assume here that EXP contains only arithmetic expressions
4293 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
4297 find_placeholder_in_expr (tree exp
, vec
<tree
> *refs
)
4299 enum tree_code code
= TREE_CODE (exp
);
4303 /* We handle TREE_LIST and COMPONENT_REF separately. */
4304 if (code
== TREE_LIST
)
4306 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp
), refs
);
4307 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp
), refs
);
4309 else if (code
== COMPONENT_REF
)
4311 for (inner
= TREE_OPERAND (exp
, 0);
4312 REFERENCE_CLASS_P (inner
);
4313 inner
= TREE_OPERAND (inner
, 0))
4316 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
4317 push_without_duplicates (exp
, refs
);
4319 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), refs
);
4322 switch (TREE_CODE_CLASS (code
))
4327 case tcc_declaration
:
4328 /* Variables allocated to static storage can stay. */
4329 if (!TREE_STATIC (exp
))
4330 push_without_duplicates (exp
, refs
);
4333 case tcc_expression
:
4334 /* This is the pattern built in ada/make_aligning_type. */
4335 if (code
== ADDR_EXPR
4336 && TREE_CODE (TREE_OPERAND (exp
, 0)) == PLACEHOLDER_EXPR
)
4338 push_without_duplicates (exp
, refs
);
4344 case tcc_exceptional
:
4347 case tcc_comparison
:
4349 for (i
= 0; i
< TREE_CODE_LENGTH (code
); i
++)
4350 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, i
), refs
);
4354 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
4355 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, i
), refs
);
4363 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4364 return a tree with all occurrences of references to F in a
4365 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4366 CONST_DECLs. Note that we assume here that EXP contains only
4367 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4368 occurring only in their argument list. */
4371 substitute_in_expr (tree exp
, tree f
, tree r
)
4373 enum tree_code code
= TREE_CODE (exp
);
4374 tree op0
, op1
, op2
, op3
;
4377 /* We handle TREE_LIST and COMPONENT_REF separately. */
4378 if (code
== TREE_LIST
)
4380 op0
= SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp
), f
, r
);
4381 op1
= SUBSTITUTE_IN_EXPR (TREE_VALUE (exp
), f
, r
);
4382 if (op0
== TREE_CHAIN (exp
) && op1
== TREE_VALUE (exp
))
4385 return tree_cons (TREE_PURPOSE (exp
), op1
, op0
);
4387 else if (code
== COMPONENT_REF
)
4391 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4392 and it is the right field, replace it with R. */
4393 for (inner
= TREE_OPERAND (exp
, 0);
4394 REFERENCE_CLASS_P (inner
);
4395 inner
= TREE_OPERAND (inner
, 0))
4399 op1
= TREE_OPERAND (exp
, 1);
4401 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
&& op1
== f
)
4404 /* If this expression hasn't been completed let, leave it alone. */
4405 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
&& !TREE_TYPE (inner
))
4408 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4409 if (op0
== TREE_OPERAND (exp
, 0))
4413 = fold_build3 (COMPONENT_REF
, TREE_TYPE (exp
), op0
, op1
, NULL_TREE
);
4416 switch (TREE_CODE_CLASS (code
))
4421 case tcc_declaration
:
4427 case tcc_expression
:
4433 case tcc_exceptional
:
4436 case tcc_comparison
:
4438 switch (TREE_CODE_LENGTH (code
))
4444 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4445 if (op0
== TREE_OPERAND (exp
, 0))
4448 new_tree
= fold_build1 (code
, TREE_TYPE (exp
), op0
);
4452 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4453 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
4455 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1))
4458 new_tree
= fold_build2 (code
, TREE_TYPE (exp
), op0
, op1
);
4462 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4463 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
4464 op2
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 2), f
, r
);
4466 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4467 && op2
== TREE_OPERAND (exp
, 2))
4470 new_tree
= fold_build3 (code
, TREE_TYPE (exp
), op0
, op1
, op2
);
4474 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4475 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
4476 op2
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 2), f
, r
);
4477 op3
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 3), f
, r
);
4479 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4480 && op2
== TREE_OPERAND (exp
, 2)
4481 && op3
== TREE_OPERAND (exp
, 3))
4485 = fold (build4 (code
, TREE_TYPE (exp
), op0
, op1
, op2
, op3
));
4497 new_tree
= NULL_TREE
;
4499 /* If we are trying to replace F with a constant or with another
4500 instance of one of the arguments of the call, inline back
4501 functions which do nothing else than computing a value from
4502 the arguments they are passed. This makes it possible to
4503 fold partially or entirely the replacement expression. */
4504 if (code
== CALL_EXPR
)
4506 bool maybe_inline
= false;
4507 if (CONSTANT_CLASS_P (r
))
4508 maybe_inline
= true;
4510 for (i
= 3; i
< TREE_OPERAND_LENGTH (exp
); i
++)
4511 if (operand_equal_p (TREE_OPERAND (exp
, i
), r
, 0))
4513 maybe_inline
= true;
4518 tree t
= maybe_inline_call_in_expr (exp
);
4520 return SUBSTITUTE_IN_EXPR (t
, f
, r
);
4524 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
4526 tree op
= TREE_OPERAND (exp
, i
);
4527 tree new_op
= SUBSTITUTE_IN_EXPR (op
, f
, r
);
4531 new_tree
= copy_node (exp
);
4532 TREE_OPERAND (new_tree
, i
) = new_op
;
4538 new_tree
= fold (new_tree
);
4539 if (TREE_CODE (new_tree
) == CALL_EXPR
)
4540 process_call_operands (new_tree
);
4551 TREE_READONLY (new_tree
) |= TREE_READONLY (exp
);
4553 if (code
== INDIRECT_REF
|| code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
4554 TREE_THIS_NOTRAP (new_tree
) |= TREE_THIS_NOTRAP (exp
);
4559 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4560 for it within OBJ, a tree that is an object or a chain of references. */
4563 substitute_placeholder_in_expr (tree exp
, tree obj
)
4565 enum tree_code code
= TREE_CODE (exp
);
4566 tree op0
, op1
, op2
, op3
;
4569 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4570 in the chain of OBJ. */
4571 if (code
== PLACEHOLDER_EXPR
)
4573 tree need_type
= TYPE_MAIN_VARIANT (TREE_TYPE (exp
));
4576 for (elt
= obj
; elt
!= 0;
4577 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
4578 || TREE_CODE (elt
) == COND_EXPR
)
4579 ? TREE_OPERAND (elt
, 1)
4580 : (REFERENCE_CLASS_P (elt
)
4581 || UNARY_CLASS_P (elt
)
4582 || BINARY_CLASS_P (elt
)
4583 || VL_EXP_CLASS_P (elt
)
4584 || EXPRESSION_CLASS_P (elt
))
4585 ? TREE_OPERAND (elt
, 0) : 0))
4586 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
4589 for (elt
= obj
; elt
!= 0;
4590 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
4591 || TREE_CODE (elt
) == COND_EXPR
)
4592 ? TREE_OPERAND (elt
, 1)
4593 : (REFERENCE_CLASS_P (elt
)
4594 || UNARY_CLASS_P (elt
)
4595 || BINARY_CLASS_P (elt
)
4596 || VL_EXP_CLASS_P (elt
)
4597 || EXPRESSION_CLASS_P (elt
))
4598 ? TREE_OPERAND (elt
, 0) : 0))
4599 if (POINTER_TYPE_P (TREE_TYPE (elt
))
4600 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
4602 return fold_build1 (INDIRECT_REF
, need_type
, elt
);
4604 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4605 survives until RTL generation, there will be an error. */
4609 /* TREE_LIST is special because we need to look at TREE_VALUE
4610 and TREE_CHAIN, not TREE_OPERANDS. */
4611 else if (code
== TREE_LIST
)
4613 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp
), obj
);
4614 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp
), obj
);
4615 if (op0
== TREE_CHAIN (exp
) && op1
== TREE_VALUE (exp
))
4618 return tree_cons (TREE_PURPOSE (exp
), op1
, op0
);
4621 switch (TREE_CODE_CLASS (code
))
4624 case tcc_declaration
:
4627 case tcc_exceptional
:
4630 case tcc_comparison
:
4631 case tcc_expression
:
4634 switch (TREE_CODE_LENGTH (code
))
4640 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4641 if (op0
== TREE_OPERAND (exp
, 0))
4644 new_tree
= fold_build1 (code
, TREE_TYPE (exp
), op0
);
4648 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4649 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
4651 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1))
4654 new_tree
= fold_build2 (code
, TREE_TYPE (exp
), op0
, op1
);
4658 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4659 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
4660 op2
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 2), obj
);
4662 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4663 && op2
== TREE_OPERAND (exp
, 2))
4666 new_tree
= fold_build3 (code
, TREE_TYPE (exp
), op0
, op1
, op2
);
4670 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4671 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
4672 op2
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 2), obj
);
4673 op3
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 3), obj
);
4675 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4676 && op2
== TREE_OPERAND (exp
, 2)
4677 && op3
== TREE_OPERAND (exp
, 3))
4681 = fold (build4 (code
, TREE_TYPE (exp
), op0
, op1
, op2
, op3
));
4693 new_tree
= NULL_TREE
;
4695 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
4697 tree op
= TREE_OPERAND (exp
, i
);
4698 tree new_op
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (op
, obj
);
4702 new_tree
= copy_node (exp
);
4703 TREE_OPERAND (new_tree
, i
) = new_op
;
4709 new_tree
= fold (new_tree
);
4710 if (TREE_CODE (new_tree
) == CALL_EXPR
)
4711 process_call_operands (new_tree
);
4722 TREE_READONLY (new_tree
) |= TREE_READONLY (exp
);
4724 if (code
== INDIRECT_REF
|| code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
4725 TREE_THIS_NOTRAP (new_tree
) |= TREE_THIS_NOTRAP (exp
);
4731 /* Subroutine of stabilize_reference; this is called for subtrees of
4732 references. Any expression with side-effects must be put in a SAVE_EXPR
4733 to ensure that it is only evaluated once.
4735 We don't put SAVE_EXPR nodes around everything, because assigning very
4736 simple expressions to temporaries causes us to miss good opportunities
4737 for optimizations. Among other things, the opportunity to fold in the
4738 addition of a constant into an addressing mode often gets lost, e.g.
4739 "y[i+1] += x;". In general, we take the approach that we should not make
4740 an assignment unless we are forced into it - i.e., that any non-side effect
4741 operator should be allowed, and that cse should take care of coalescing
4742 multiple utterances of the same expression should that prove fruitful. */
4745 stabilize_reference_1 (tree e
)
4748 enum tree_code code
= TREE_CODE (e
);
4750 /* We cannot ignore const expressions because it might be a reference
4751 to a const array but whose index contains side-effects. But we can
4752 ignore things that are actual constant or that already have been
4753 handled by this function. */
4755 if (tree_invariant_p (e
))
4758 switch (TREE_CODE_CLASS (code
))
4760 case tcc_exceptional
:
4761 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4762 have side-effects. */
4763 if (code
== STATEMENT_LIST
)
4764 return save_expr (e
);
4767 case tcc_declaration
:
4768 case tcc_comparison
:
4770 case tcc_expression
:
4773 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4774 so that it will only be evaluated once. */
4775 /* The reference (r) and comparison (<) classes could be handled as
4776 below, but it is generally faster to only evaluate them once. */
4777 if (TREE_SIDE_EFFECTS (e
))
4778 return save_expr (e
);
4782 /* Constants need no processing. In fact, we should never reach
4787 /* Division is slow and tends to be compiled with jumps,
4788 especially the division by powers of 2 that is often
4789 found inside of an array reference. So do it just once. */
4790 if (code
== TRUNC_DIV_EXPR
|| code
== TRUNC_MOD_EXPR
4791 || code
== FLOOR_DIV_EXPR
|| code
== FLOOR_MOD_EXPR
4792 || code
== CEIL_DIV_EXPR
|| code
== CEIL_MOD_EXPR
4793 || code
== ROUND_DIV_EXPR
|| code
== ROUND_MOD_EXPR
)
4794 return save_expr (e
);
4795 /* Recursively stabilize each operand. */
4796 result
= build_nt (code
, stabilize_reference_1 (TREE_OPERAND (e
, 0)),
4797 stabilize_reference_1 (TREE_OPERAND (e
, 1)));
4801 /* Recursively stabilize each operand. */
4802 result
= build_nt (code
, stabilize_reference_1 (TREE_OPERAND (e
, 0)));
4809 TREE_TYPE (result
) = TREE_TYPE (e
);
4810 TREE_READONLY (result
) = TREE_READONLY (e
);
4811 TREE_SIDE_EFFECTS (result
) = TREE_SIDE_EFFECTS (e
);
4812 TREE_THIS_VOLATILE (result
) = TREE_THIS_VOLATILE (e
);
4817 /* Stabilize a reference so that we can use it any number of times
4818 without causing its operands to be evaluated more than once.
4819 Returns the stabilized reference. This works by means of save_expr,
4820 so see the caveats in the comments about save_expr.
4822 Also allows conversion expressions whose operands are references.
4823 Any other kind of expression is returned unchanged. */
4826 stabilize_reference (tree ref
)
4829 enum tree_code code
= TREE_CODE (ref
);
4836 /* No action is needed in this case. */
4841 case FIX_TRUNC_EXPR
:
4842 result
= build_nt (code
, stabilize_reference (TREE_OPERAND (ref
, 0)));
4846 result
= build_nt (INDIRECT_REF
,
4847 stabilize_reference_1 (TREE_OPERAND (ref
, 0)));
4851 result
= build_nt (COMPONENT_REF
,
4852 stabilize_reference (TREE_OPERAND (ref
, 0)),
4853 TREE_OPERAND (ref
, 1), NULL_TREE
);
4857 result
= build_nt (BIT_FIELD_REF
,
4858 stabilize_reference (TREE_OPERAND (ref
, 0)),
4859 TREE_OPERAND (ref
, 1), TREE_OPERAND (ref
, 2));
4860 REF_REVERSE_STORAGE_ORDER (result
) = REF_REVERSE_STORAGE_ORDER (ref
);
4864 result
= build_nt (ARRAY_REF
,
4865 stabilize_reference (TREE_OPERAND (ref
, 0)),
4866 stabilize_reference_1 (TREE_OPERAND (ref
, 1)),
4867 TREE_OPERAND (ref
, 2), TREE_OPERAND (ref
, 3));
4870 case ARRAY_RANGE_REF
:
4871 result
= build_nt (ARRAY_RANGE_REF
,
4872 stabilize_reference (TREE_OPERAND (ref
, 0)),
4873 stabilize_reference_1 (TREE_OPERAND (ref
, 1)),
4874 TREE_OPERAND (ref
, 2), TREE_OPERAND (ref
, 3));
4878 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4879 it wouldn't be ignored. This matters when dealing with
4881 return stabilize_reference_1 (ref
);
4883 /* If arg isn't a kind of lvalue we recognize, make no change.
4884 Caller should recognize the error for an invalid lvalue. */
4889 return error_mark_node
;
4892 TREE_TYPE (result
) = TREE_TYPE (ref
);
4893 TREE_READONLY (result
) = TREE_READONLY (ref
);
4894 TREE_SIDE_EFFECTS (result
) = TREE_SIDE_EFFECTS (ref
);
4895 TREE_THIS_VOLATILE (result
) = TREE_THIS_VOLATILE (ref
);
4896 protected_set_expr_location (result
, EXPR_LOCATION (ref
));
4901 /* Low-level constructors for expressions. */
4903 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4904 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4907 recompute_tree_invariant_for_addr_expr (tree t
)
4910 bool tc
= true, se
= false;
4912 gcc_assert (TREE_CODE (t
) == ADDR_EXPR
);
4914 /* We started out assuming this address is both invariant and constant, but
4915 does not have side effects. Now go down any handled components and see if
4916 any of them involve offsets that are either non-constant or non-invariant.
4917 Also check for side-effects.
4919 ??? Note that this code makes no attempt to deal with the case where
4920 taking the address of something causes a copy due to misalignment. */
4922 #define UPDATE_FLAGS(NODE) \
4923 do { tree _node = (NODE); \
4924 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4925 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4927 for (node
= TREE_OPERAND (t
, 0); handled_component_p (node
);
4928 node
= TREE_OPERAND (node
, 0))
4930 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4931 array reference (probably made temporarily by the G++ front end),
4932 so ignore all the operands. */
4933 if ((TREE_CODE (node
) == ARRAY_REF
4934 || TREE_CODE (node
) == ARRAY_RANGE_REF
)
4935 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node
, 0))) == ARRAY_TYPE
)
4937 UPDATE_FLAGS (TREE_OPERAND (node
, 1));
4938 if (TREE_OPERAND (node
, 2))
4939 UPDATE_FLAGS (TREE_OPERAND (node
, 2));
4940 if (TREE_OPERAND (node
, 3))
4941 UPDATE_FLAGS (TREE_OPERAND (node
, 3));
4943 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4944 FIELD_DECL, apparently. The G++ front end can put something else
4945 there, at least temporarily. */
4946 else if (TREE_CODE (node
) == COMPONENT_REF
4947 && TREE_CODE (TREE_OPERAND (node
, 1)) == FIELD_DECL
)
4949 if (TREE_OPERAND (node
, 2))
4950 UPDATE_FLAGS (TREE_OPERAND (node
, 2));
4954 node
= lang_hooks
.expr_to_decl (node
, &tc
, &se
);
4956 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4957 the address, since &(*a)->b is a form of addition. If it's a constant, the
4958 address is constant too. If it's a decl, its address is constant if the
4959 decl is static. Everything else is not constant and, furthermore,
4960 taking the address of a volatile variable is not volatile. */
4961 if (INDIRECT_REF_P (node
)
4962 || TREE_CODE (node
) == MEM_REF
)
4963 UPDATE_FLAGS (TREE_OPERAND (node
, 0));
4964 else if (CONSTANT_CLASS_P (node
))
4966 else if (DECL_P (node
))
4967 tc
&= (staticp (node
) != NULL_TREE
);
4971 se
|= TREE_SIDE_EFFECTS (node
);
4975 TREE_CONSTANT (t
) = tc
;
4976 TREE_SIDE_EFFECTS (t
) = se
;
4980 /* Build an expression of code CODE, data type TYPE, and operands as
4981 specified. Expressions and reference nodes can be created this way.
4982 Constants, decls, types and misc nodes cannot be.
4984 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4985 enough for all extant tree codes. */
4988 build0 (enum tree_code code
, tree tt MEM_STAT_DECL
)
4992 gcc_assert (TREE_CODE_LENGTH (code
) == 0);
4994 t
= make_node (code PASS_MEM_STAT
);
5001 build1 (enum tree_code code
, tree type
, tree node MEM_STAT_DECL
)
5003 int length
= sizeof (struct tree_exp
);
5006 record_node_allocation_statistics (code
, length
);
5008 gcc_assert (TREE_CODE_LENGTH (code
) == 1);
5010 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
5012 memset (t
, 0, sizeof (struct tree_common
));
5014 TREE_SET_CODE (t
, code
);
5016 TREE_TYPE (t
) = type
;
5017 SET_EXPR_LOCATION (t
, UNKNOWN_LOCATION
);
5018 TREE_OPERAND (t
, 0) = node
;
5019 if (node
&& !TYPE_P (node
))
5021 TREE_SIDE_EFFECTS (t
) = TREE_SIDE_EFFECTS (node
);
5022 TREE_READONLY (t
) = TREE_READONLY (node
);
5025 if (TREE_CODE_CLASS (code
) == tcc_statement
)
5027 if (code
!= DEBUG_BEGIN_STMT
)
5028 TREE_SIDE_EFFECTS (t
) = 1;
5033 /* All of these have side-effects, no matter what their
5035 TREE_SIDE_EFFECTS (t
) = 1;
5036 TREE_READONLY (t
) = 0;
5040 /* Whether a dereference is readonly has nothing to do with whether
5041 its operand is readonly. */
5042 TREE_READONLY (t
) = 0;
5047 recompute_tree_invariant_for_addr_expr (t
);
5051 if ((TREE_CODE_CLASS (code
) == tcc_unary
|| code
== VIEW_CONVERT_EXPR
)
5052 && node
&& !TYPE_P (node
)
5053 && TREE_CONSTANT (node
))
5054 TREE_CONSTANT (t
) = 1;
5055 if (TREE_CODE_CLASS (code
) == tcc_reference
5056 && node
&& TREE_THIS_VOLATILE (node
))
5057 TREE_THIS_VOLATILE (t
) = 1;
5064 #define PROCESS_ARG(N) \
5066 TREE_OPERAND (t, N) = arg##N; \
5067 if (arg##N &&!TYPE_P (arg##N)) \
5069 if (TREE_SIDE_EFFECTS (arg##N)) \
5071 if (!TREE_READONLY (arg##N) \
5072 && !CONSTANT_CLASS_P (arg##N)) \
5073 (void) (read_only = 0); \
5074 if (!TREE_CONSTANT (arg##N)) \
5075 (void) (constant = 0); \
5080 build2 (enum tree_code code
, tree tt
, tree arg0
, tree arg1 MEM_STAT_DECL
)
5082 bool constant
, read_only
, side_effects
, div_by_zero
;
5085 gcc_assert (TREE_CODE_LENGTH (code
) == 2);
5087 if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
|| code
== MULT_EXPR
)
5088 && arg0
&& arg1
&& tt
&& POINTER_TYPE_P (tt
)
5089 /* When sizetype precision doesn't match that of pointers
5090 we need to be able to build explicit extensions or truncations
5091 of the offset argument. */
5092 && TYPE_PRECISION (sizetype
) == TYPE_PRECISION (tt
))
5093 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
5094 && TREE_CODE (arg1
) == INTEGER_CST
);
5096 if (code
== POINTER_PLUS_EXPR
&& arg0
&& arg1
&& tt
)
5097 gcc_assert (POINTER_TYPE_P (tt
) && POINTER_TYPE_P (TREE_TYPE (arg0
))
5098 && ptrofftype_p (TREE_TYPE (arg1
)));
5100 t
= make_node (code PASS_MEM_STAT
);
5103 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
5104 result based on those same flags for the arguments. But if the
5105 arguments aren't really even `tree' expressions, we shouldn't be trying
5108 /* Expressions without side effects may be constant if their
5109 arguments are as well. */
5110 constant
= (TREE_CODE_CLASS (code
) == tcc_comparison
5111 || TREE_CODE_CLASS (code
) == tcc_binary
);
5113 side_effects
= TREE_SIDE_EFFECTS (t
);
5117 case TRUNC_DIV_EXPR
:
5119 case FLOOR_DIV_EXPR
:
5120 case ROUND_DIV_EXPR
:
5121 case EXACT_DIV_EXPR
:
5123 case FLOOR_MOD_EXPR
:
5124 case ROUND_MOD_EXPR
:
5125 case TRUNC_MOD_EXPR
:
5126 div_by_zero
= integer_zerop (arg1
);
5129 div_by_zero
= false;
5135 TREE_SIDE_EFFECTS (t
) = side_effects
;
5136 if (code
== MEM_REF
)
5138 if (arg0
&& TREE_CODE (arg0
) == ADDR_EXPR
)
5140 tree o
= TREE_OPERAND (arg0
, 0);
5141 TREE_READONLY (t
) = TREE_READONLY (o
);
5142 TREE_THIS_VOLATILE (t
) = TREE_THIS_VOLATILE (o
);
5147 TREE_READONLY (t
) = read_only
;
5148 /* Don't mark X / 0 as constant. */
5149 TREE_CONSTANT (t
) = constant
&& !div_by_zero
;
5150 TREE_THIS_VOLATILE (t
)
5151 = (TREE_CODE_CLASS (code
) == tcc_reference
5152 && arg0
&& TREE_THIS_VOLATILE (arg0
));
5160 build3 (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
5161 tree arg2 MEM_STAT_DECL
)
5163 bool constant
, read_only
, side_effects
;
5166 gcc_assert (TREE_CODE_LENGTH (code
) == 3);
5167 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
5169 t
= make_node (code PASS_MEM_STAT
);
5174 /* As a special exception, if COND_EXPR has NULL branches, we
5175 assume that it is a gimple statement and always consider
5176 it to have side effects. */
5177 if (code
== COND_EXPR
5178 && tt
== void_type_node
5179 && arg1
== NULL_TREE
5180 && arg2
== NULL_TREE
)
5181 side_effects
= true;
5183 side_effects
= TREE_SIDE_EFFECTS (t
);
5189 if (code
== COND_EXPR
)
5190 TREE_READONLY (t
) = read_only
;
5192 TREE_SIDE_EFFECTS (t
) = side_effects
;
5193 TREE_THIS_VOLATILE (t
)
5194 = (TREE_CODE_CLASS (code
) == tcc_reference
5195 && arg0
&& TREE_THIS_VOLATILE (arg0
));
5201 build4 (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
5202 tree arg2
, tree arg3 MEM_STAT_DECL
)
5204 bool constant
, read_only
, side_effects
;
5207 gcc_assert (TREE_CODE_LENGTH (code
) == 4);
5209 t
= make_node (code PASS_MEM_STAT
);
5212 side_effects
= TREE_SIDE_EFFECTS (t
);
5219 TREE_SIDE_EFFECTS (t
) = side_effects
;
5220 TREE_THIS_VOLATILE (t
)
5221 = (TREE_CODE_CLASS (code
) == tcc_reference
5222 && arg0
&& TREE_THIS_VOLATILE (arg0
));
5228 build5 (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
5229 tree arg2
, tree arg3
, tree arg4 MEM_STAT_DECL
)
5231 bool constant
, read_only
, side_effects
;
5234 gcc_assert (TREE_CODE_LENGTH (code
) == 5);
5236 t
= make_node (code PASS_MEM_STAT
);
5239 side_effects
= TREE_SIDE_EFFECTS (t
);
5247 TREE_SIDE_EFFECTS (t
) = side_effects
;
5248 if (code
== TARGET_MEM_REF
)
5250 if (arg0
&& TREE_CODE (arg0
) == ADDR_EXPR
)
5252 tree o
= TREE_OPERAND (arg0
, 0);
5253 TREE_READONLY (t
) = TREE_READONLY (o
);
5254 TREE_THIS_VOLATILE (t
) = TREE_THIS_VOLATILE (o
);
5258 TREE_THIS_VOLATILE (t
)
5259 = (TREE_CODE_CLASS (code
) == tcc_reference
5260 && arg0
&& TREE_THIS_VOLATILE (arg0
));
5265 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
5266 on the pointer PTR. */
5269 build_simple_mem_ref_loc (location_t loc
, tree ptr
)
5271 poly_int64 offset
= 0;
5272 tree ptype
= TREE_TYPE (ptr
);
5274 /* For convenience allow addresses that collapse to a simple base
5276 if (TREE_CODE (ptr
) == ADDR_EXPR
5277 && (handled_component_p (TREE_OPERAND (ptr
, 0))
5278 || TREE_CODE (TREE_OPERAND (ptr
, 0)) == MEM_REF
))
5280 ptr
= get_addr_base_and_unit_offset (TREE_OPERAND (ptr
, 0), &offset
);
5282 if (TREE_CODE (ptr
) == MEM_REF
)
5284 offset
+= mem_ref_offset (ptr
).force_shwi ();
5285 ptr
= TREE_OPERAND (ptr
, 0);
5288 ptr
= build_fold_addr_expr (ptr
);
5289 gcc_assert (is_gimple_reg (ptr
) || is_gimple_min_invariant (ptr
));
5291 tem
= build2 (MEM_REF
, TREE_TYPE (ptype
),
5292 ptr
, build_int_cst (ptype
, offset
));
5293 SET_EXPR_LOCATION (tem
, loc
);
5297 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
5300 mem_ref_offset (const_tree t
)
5302 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t
, 1)),
5306 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
5307 offsetted by OFFSET units. */
5310 build_invariant_address (tree type
, tree base
, poly_int64 offset
)
5312 tree ref
= fold_build2 (MEM_REF
, TREE_TYPE (type
),
5313 build_fold_addr_expr (base
),
5314 build_int_cst (ptr_type_node
, offset
));
5315 tree addr
= build1 (ADDR_EXPR
, type
, ref
);
5316 recompute_tree_invariant_for_addr_expr (addr
);
5320 /* Similar except don't specify the TREE_TYPE
5321 and leave the TREE_SIDE_EFFECTS as 0.
5322 It is permissible for arguments to be null,
5323 or even garbage if their values do not matter. */
5326 build_nt (enum tree_code code
, ...)
5333 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
5337 t
= make_node (code
);
5338 length
= TREE_CODE_LENGTH (code
);
5340 for (i
= 0; i
< length
; i
++)
5341 TREE_OPERAND (t
, i
) = va_arg (p
, tree
);
5347 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5351 build_nt_call_vec (tree fn
, vec
<tree
, va_gc
> *args
)
5356 ret
= build_vl_exp (CALL_EXPR
, vec_safe_length (args
) + 3);
5357 CALL_EXPR_FN (ret
) = fn
;
5358 CALL_EXPR_STATIC_CHAIN (ret
) = NULL_TREE
;
5359 FOR_EACH_VEC_SAFE_ELT (args
, ix
, t
)
5360 CALL_EXPR_ARG (ret
, ix
) = t
;
5364 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5366 We do NOT enter this node in any sort of symbol table.
5368 LOC is the location of the decl.
5370 layout_decl is used to set up the decl's storage layout.
5371 Other slots are initialized to 0 or null pointers. */
5374 build_decl (location_t loc
, enum tree_code code
, tree name
,
5375 tree type MEM_STAT_DECL
)
5379 t
= make_node (code PASS_MEM_STAT
);
5380 DECL_SOURCE_LOCATION (t
) = loc
;
5382 /* if (type == error_mark_node)
5383 type = integer_type_node; */
5384 /* That is not done, deliberately, so that having error_mark_node
5385 as the type can suppress useless errors in the use of this variable. */
5387 DECL_NAME (t
) = name
;
5388 TREE_TYPE (t
) = type
;
5390 if (code
== VAR_DECL
|| code
== PARM_DECL
|| code
== RESULT_DECL
)
5396 /* Create and return a DEBUG_EXPR_DECL node of the given TYPE. */
5399 build_debug_expr_decl (tree type
)
5401 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
5402 DECL_ARTIFICIAL (vexpr
) = 1;
5403 TREE_TYPE (vexpr
) = type
;
5404 SET_DECL_MODE (vexpr
, TYPE_MODE (type
));
5408 /* Builds and returns function declaration with NAME and TYPE. */
5411 build_fn_decl (const char *name
, tree type
)
5413 tree id
= get_identifier (name
);
5414 tree decl
= build_decl (input_location
, FUNCTION_DECL
, id
, type
);
5416 DECL_EXTERNAL (decl
) = 1;
5417 TREE_PUBLIC (decl
) = 1;
5418 DECL_ARTIFICIAL (decl
) = 1;
5419 TREE_NOTHROW (decl
) = 1;
5424 vec
<tree
, va_gc
> *all_translation_units
;
5426 /* Builds a new translation-unit decl with name NAME, queues it in the
5427 global list of translation-unit decls and returns it. */
5430 build_translation_unit_decl (tree name
)
5432 tree tu
= build_decl (UNKNOWN_LOCATION
, TRANSLATION_UNIT_DECL
,
5434 TRANSLATION_UNIT_LANGUAGE (tu
) = lang_hooks
.name
;
5435 vec_safe_push (all_translation_units
, tu
);
5440 /* BLOCK nodes are used to represent the structure of binding contours
5441 and declarations, once those contours have been exited and their contents
5442 compiled. This information is used for outputting debugging info. */
5445 build_block (tree vars
, tree subblocks
, tree supercontext
, tree chain
)
5447 tree block
= make_node (BLOCK
);
5449 BLOCK_VARS (block
) = vars
;
5450 BLOCK_SUBBLOCKS (block
) = subblocks
;
5451 BLOCK_SUPERCONTEXT (block
) = supercontext
;
5452 BLOCK_CHAIN (block
) = chain
;
5457 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5459 LOC is the location to use in tree T. */
5462 protected_set_expr_location (tree t
, location_t loc
)
5464 if (CAN_HAVE_LOCATION_P (t
))
5465 SET_EXPR_LOCATION (t
, loc
);
5466 else if (t
&& TREE_CODE (t
) == STATEMENT_LIST
)
5468 t
= expr_single (t
);
5469 if (t
&& CAN_HAVE_LOCATION_P (t
))
5470 SET_EXPR_LOCATION (t
, loc
);
5474 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
5475 UNKNOWN_LOCATION. */
5478 protected_set_expr_location_if_unset (tree t
, location_t loc
)
5480 t
= expr_single (t
);
5481 if (t
&& !EXPR_HAS_LOCATION (t
))
5482 protected_set_expr_location (t
, loc
);
5485 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5486 of the various TYPE_QUAL values. */
5489 set_type_quals (tree type
, int type_quals
)
5491 TYPE_READONLY (type
) = (type_quals
& TYPE_QUAL_CONST
) != 0;
5492 TYPE_VOLATILE (type
) = (type_quals
& TYPE_QUAL_VOLATILE
) != 0;
5493 TYPE_RESTRICT (type
) = (type_quals
& TYPE_QUAL_RESTRICT
) != 0;
5494 TYPE_ATOMIC (type
) = (type_quals
& TYPE_QUAL_ATOMIC
) != 0;
5495 TYPE_ADDR_SPACE (type
) = DECODE_QUAL_ADDR_SPACE (type_quals
);
5498 /* Returns true iff CAND and BASE have equivalent language-specific
5502 check_lang_type (const_tree cand
, const_tree base
)
5504 if (lang_hooks
.types
.type_hash_eq
== NULL
)
5506 /* type_hash_eq currently only applies to these types. */
5507 if (TREE_CODE (cand
) != FUNCTION_TYPE
5508 && TREE_CODE (cand
) != METHOD_TYPE
)
5510 return lang_hooks
.types
.type_hash_eq (cand
, base
);
5513 /* This function checks to see if TYPE matches the size one of the built-in
5514 atomic types, and returns that core atomic type. */
5517 find_atomic_core_type (const_tree type
)
5519 tree base_atomic_type
;
5521 /* Only handle complete types. */
5522 if (!tree_fits_uhwi_p (TYPE_SIZE (type
)))
5525 switch (tree_to_uhwi (TYPE_SIZE (type
)))
5528 base_atomic_type
= atomicQI_type_node
;
5532 base_atomic_type
= atomicHI_type_node
;
5536 base_atomic_type
= atomicSI_type_node
;
5540 base_atomic_type
= atomicDI_type_node
;
5544 base_atomic_type
= atomicTI_type_node
;
5548 base_atomic_type
= NULL_TREE
;
5551 return base_atomic_type
;
5554 /* Returns true iff unqualified CAND and BASE are equivalent. */
5557 check_base_type (const_tree cand
, const_tree base
)
5559 if (TYPE_NAME (cand
) != TYPE_NAME (base
)
5560 /* Apparently this is needed for Objective-C. */
5561 || TYPE_CONTEXT (cand
) != TYPE_CONTEXT (base
)
5562 || !attribute_list_equal (TYPE_ATTRIBUTES (cand
),
5563 TYPE_ATTRIBUTES (base
)))
5565 /* Check alignment. */
5566 if (TYPE_ALIGN (cand
) == TYPE_ALIGN (base
)
5567 && TYPE_USER_ALIGN (cand
) == TYPE_USER_ALIGN (base
))
5569 /* Atomic types increase minimal alignment. We must to do so as well
5570 or we get duplicated canonical types. See PR88686. */
5571 if ((TYPE_QUALS (cand
) & TYPE_QUAL_ATOMIC
))
5573 /* See if this object can map to a basic atomic type. */
5574 tree atomic_type
= find_atomic_core_type (cand
);
5575 if (atomic_type
&& TYPE_ALIGN (atomic_type
) == TYPE_ALIGN (cand
))
5581 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5584 check_qualified_type (const_tree cand
, const_tree base
, int type_quals
)
5586 return (TYPE_QUALS (cand
) == type_quals
5587 && check_base_type (cand
, base
)
5588 && check_lang_type (cand
, base
));
5591 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
5594 check_aligned_type (const_tree cand
, const_tree base
, unsigned int align
)
5596 return (TYPE_QUALS (cand
) == TYPE_QUALS (base
)
5597 && TYPE_NAME (cand
) == TYPE_NAME (base
)
5598 /* Apparently this is needed for Objective-C. */
5599 && TYPE_CONTEXT (cand
) == TYPE_CONTEXT (base
)
5600 /* Check alignment. */
5601 && TYPE_ALIGN (cand
) == align
5602 /* Check this is a user-aligned type as build_aligned_type
5604 && TYPE_USER_ALIGN (cand
)
5605 && attribute_list_equal (TYPE_ATTRIBUTES (cand
),
5606 TYPE_ATTRIBUTES (base
))
5607 && check_lang_type (cand
, base
));
5610 /* Return a version of the TYPE, qualified as indicated by the
5611 TYPE_QUALS, if one exists. If no qualified version exists yet,
5612 return NULL_TREE. */
5615 get_qualified_type (tree type
, int type_quals
)
5617 if (TYPE_QUALS (type
) == type_quals
)
5620 tree mv
= TYPE_MAIN_VARIANT (type
);
5621 if (check_qualified_type (mv
, type
, type_quals
))
5624 /* Search the chain of variants to see if there is already one there just
5625 like the one we need to have. If so, use that existing one. We must
5626 preserve the TYPE_NAME, since there is code that depends on this. */
5627 for (tree
*tp
= &TYPE_NEXT_VARIANT (mv
); *tp
; tp
= &TYPE_NEXT_VARIANT (*tp
))
5628 if (check_qualified_type (*tp
, type
, type_quals
))
5630 /* Put the found variant at the head of the variant list so
5631 frequently searched variants get found faster. The C++ FE
5632 benefits greatly from this. */
5634 *tp
= TYPE_NEXT_VARIANT (t
);
5635 TYPE_NEXT_VARIANT (t
) = TYPE_NEXT_VARIANT (mv
);
5636 TYPE_NEXT_VARIANT (mv
) = t
;
5643 /* Like get_qualified_type, but creates the type if it does not
5644 exist. This function never returns NULL_TREE. */
5647 build_qualified_type (tree type
, int type_quals MEM_STAT_DECL
)
5651 /* See if we already have the appropriate qualified variant. */
5652 t
= get_qualified_type (type
, type_quals
);
5654 /* If not, build it. */
5657 t
= build_variant_type_copy (type PASS_MEM_STAT
);
5658 set_type_quals (t
, type_quals
);
5660 if (((type_quals
& TYPE_QUAL_ATOMIC
) == TYPE_QUAL_ATOMIC
))
5662 /* See if this object can map to a basic atomic type. */
5663 tree atomic_type
= find_atomic_core_type (type
);
5666 /* Ensure the alignment of this type is compatible with
5667 the required alignment of the atomic type. */
5668 if (TYPE_ALIGN (atomic_type
) > TYPE_ALIGN (t
))
5669 SET_TYPE_ALIGN (t
, TYPE_ALIGN (atomic_type
));
5673 if (TYPE_STRUCTURAL_EQUALITY_P (type
))
5674 /* Propagate structural equality. */
5675 SET_TYPE_STRUCTURAL_EQUALITY (t
);
5676 else if (TYPE_CANONICAL (type
) != type
)
5677 /* Build the underlying canonical type, since it is different
5680 tree c
= build_qualified_type (TYPE_CANONICAL (type
), type_quals
);
5681 TYPE_CANONICAL (t
) = TYPE_CANONICAL (c
);
5684 /* T is its own canonical type. */
5685 TYPE_CANONICAL (t
) = t
;
5692 /* Create a variant of type T with alignment ALIGN. */
5695 build_aligned_type (tree type
, unsigned int align
)
5699 if (TYPE_PACKED (type
)
5700 || TYPE_ALIGN (type
) == align
)
5703 for (t
= TYPE_MAIN_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
5704 if (check_aligned_type (t
, type
, align
))
5707 t
= build_variant_type_copy (type
);
5708 SET_TYPE_ALIGN (t
, align
);
5709 TYPE_USER_ALIGN (t
) = 1;
5714 /* Create a new distinct copy of TYPE. The new type is made its own
5715 MAIN_VARIANT. If TYPE requires structural equality checks, the
5716 resulting type requires structural equality checks; otherwise, its
5717 TYPE_CANONICAL points to itself. */
5720 build_distinct_type_copy (tree type MEM_STAT_DECL
)
5722 tree t
= copy_node (type PASS_MEM_STAT
);
5724 TYPE_POINTER_TO (t
) = 0;
5725 TYPE_REFERENCE_TO (t
) = 0;
5727 /* Set the canonical type either to a new equivalence class, or
5728 propagate the need for structural equality checks. */
5729 if (TYPE_STRUCTURAL_EQUALITY_P (type
))
5730 SET_TYPE_STRUCTURAL_EQUALITY (t
);
5732 TYPE_CANONICAL (t
) = t
;
5734 /* Make it its own variant. */
5735 TYPE_MAIN_VARIANT (t
) = t
;
5736 TYPE_NEXT_VARIANT (t
) = 0;
5738 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
5739 whose TREE_TYPE is not t. This can also happen in the Ada
5740 frontend when using subtypes. */
5745 /* Create a new variant of TYPE, equivalent but distinct. This is so
5746 the caller can modify it. TYPE_CANONICAL for the return type will
5747 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
5748 are considered equal by the language itself (or that both types
5749 require structural equality checks). */
5752 build_variant_type_copy (tree type MEM_STAT_DECL
)
5754 tree t
, m
= TYPE_MAIN_VARIANT (type
);
5756 t
= build_distinct_type_copy (type PASS_MEM_STAT
);
5758 /* Since we're building a variant, assume that it is a non-semantic
5759 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
5760 TYPE_CANONICAL (t
) = TYPE_CANONICAL (type
);
5761 /* Type variants have no alias set defined. */
5762 TYPE_ALIAS_SET (t
) = -1;
5764 /* Add the new type to the chain of variants of TYPE. */
5765 TYPE_NEXT_VARIANT (t
) = TYPE_NEXT_VARIANT (m
);
5766 TYPE_NEXT_VARIANT (m
) = t
;
5767 TYPE_MAIN_VARIANT (t
) = m
;
5772 /* Return true if the from tree in both tree maps are equal. */
5775 tree_map_base_eq (const void *va
, const void *vb
)
5777 const struct tree_map_base
*const a
= (const struct tree_map_base
*) va
,
5778 *const b
= (const struct tree_map_base
*) vb
;
5779 return (a
->from
== b
->from
);
5782 /* Hash a from tree in a tree_base_map. */
5785 tree_map_base_hash (const void *item
)
5787 return htab_hash_pointer (((const struct tree_map_base
*)item
)->from
);
5790 /* Return true if this tree map structure is marked for garbage collection
5791 purposes. We simply return true if the from tree is marked, so that this
5792 structure goes away when the from tree goes away. */
5795 tree_map_base_marked_p (const void *p
)
5797 return ggc_marked_p (((const struct tree_map_base
*) p
)->from
);
5800 /* Hash a from tree in a tree_map. */
5803 tree_map_hash (const void *item
)
5805 return (((const struct tree_map
*) item
)->hash
);
5808 /* Hash a from tree in a tree_decl_map. */
5811 tree_decl_map_hash (const void *item
)
5813 return DECL_UID (((const struct tree_decl_map
*) item
)->base
.from
);
5816 /* Return the initialization priority for DECL. */
5819 decl_init_priority_lookup (tree decl
)
5821 symtab_node
*snode
= symtab_node::get (decl
);
5824 return DEFAULT_INIT_PRIORITY
;
5826 snode
->get_init_priority ();
5829 /* Return the finalization priority for DECL. */
5832 decl_fini_priority_lookup (tree decl
)
5834 cgraph_node
*node
= cgraph_node::get (decl
);
5837 return DEFAULT_INIT_PRIORITY
;
5839 node
->get_fini_priority ();
5842 /* Set the initialization priority for DECL to PRIORITY. */
5845 decl_init_priority_insert (tree decl
, priority_type priority
)
5847 struct symtab_node
*snode
;
5849 if (priority
== DEFAULT_INIT_PRIORITY
)
5851 snode
= symtab_node::get (decl
);
5855 else if (VAR_P (decl
))
5856 snode
= varpool_node::get_create (decl
);
5858 snode
= cgraph_node::get_create (decl
);
5859 snode
->set_init_priority (priority
);
5862 /* Set the finalization priority for DECL to PRIORITY. */
5865 decl_fini_priority_insert (tree decl
, priority_type priority
)
5867 struct cgraph_node
*node
;
5869 if (priority
== DEFAULT_INIT_PRIORITY
)
5871 node
= cgraph_node::get (decl
);
5876 node
= cgraph_node::get_create (decl
);
5877 node
->set_fini_priority (priority
);
5880 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
5883 print_debug_expr_statistics (void)
5885 fprintf (stderr
, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
5886 (long) debug_expr_for_decl
->size (),
5887 (long) debug_expr_for_decl
->elements (),
5888 debug_expr_for_decl
->collisions ());
5891 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
5894 print_value_expr_statistics (void)
5896 fprintf (stderr
, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
5897 (long) value_expr_for_decl
->size (),
5898 (long) value_expr_for_decl
->elements (),
5899 value_expr_for_decl
->collisions ());
5902 /* Lookup a debug expression for FROM, and return it if we find one. */
5905 decl_debug_expr_lookup (tree from
)
5907 struct tree_decl_map
*h
, in
;
5908 in
.base
.from
= from
;
5910 h
= debug_expr_for_decl
->find_with_hash (&in
, DECL_UID (from
));
5916 /* Insert a mapping FROM->TO in the debug expression hashtable. */
5919 decl_debug_expr_insert (tree from
, tree to
)
5921 struct tree_decl_map
*h
;
5923 h
= ggc_alloc
<tree_decl_map
> ();
5924 h
->base
.from
= from
;
5926 *debug_expr_for_decl
->find_slot_with_hash (h
, DECL_UID (from
), INSERT
) = h
;
5929 /* Lookup a value expression for FROM, and return it if we find one. */
5932 decl_value_expr_lookup (tree from
)
5934 struct tree_decl_map
*h
, in
;
5935 in
.base
.from
= from
;
5937 h
= value_expr_for_decl
->find_with_hash (&in
, DECL_UID (from
));
5943 /* Insert a mapping FROM->TO in the value expression hashtable. */
5946 decl_value_expr_insert (tree from
, tree to
)
5948 struct tree_decl_map
*h
;
5950 /* Uses of FROM shouldn't look like they happen at the location of TO. */
5951 to
= protected_set_expr_location_unshare (to
, UNKNOWN_LOCATION
);
5953 h
= ggc_alloc
<tree_decl_map
> ();
5954 h
->base
.from
= from
;
5956 *value_expr_for_decl
->find_slot_with_hash (h
, DECL_UID (from
), INSERT
) = h
;
5959 /* Lookup a vector of debug arguments for FROM, and return it if we
5963 decl_debug_args_lookup (tree from
)
5965 struct tree_vec_map
*h
, in
;
5967 if (!DECL_HAS_DEBUG_ARGS_P (from
))
5969 gcc_checking_assert (debug_args_for_decl
!= NULL
);
5970 in
.base
.from
= from
;
5971 h
= debug_args_for_decl
->find_with_hash (&in
, DECL_UID (from
));
5977 /* Insert a mapping FROM->empty vector of debug arguments in the value
5978 expression hashtable. */
5981 decl_debug_args_insert (tree from
)
5983 struct tree_vec_map
*h
;
5986 if (DECL_HAS_DEBUG_ARGS_P (from
))
5987 return decl_debug_args_lookup (from
);
5988 if (debug_args_for_decl
== NULL
)
5989 debug_args_for_decl
= hash_table
<tree_vec_map_cache_hasher
>::create_ggc (64);
5990 h
= ggc_alloc
<tree_vec_map
> ();
5991 h
->base
.from
= from
;
5993 loc
= debug_args_for_decl
->find_slot_with_hash (h
, DECL_UID (from
), INSERT
);
5995 DECL_HAS_DEBUG_ARGS_P (from
) = 1;
5999 /* Hashing of types so that we don't make duplicates.
6000 The entry point is `type_hash_canon'. */
6002 /* Generate the default hash code for TYPE. This is designed for
6003 speed, rather than maximum entropy. */
6006 type_hash_canon_hash (tree type
)
6008 inchash::hash hstate
;
6010 hstate
.add_int (TREE_CODE (type
));
6012 if (TREE_TYPE (type
))
6013 hstate
.add_object (TYPE_HASH (TREE_TYPE (type
)));
6015 for (tree t
= TYPE_ATTRIBUTES (type
); t
; t
= TREE_CHAIN (t
))
6016 /* Just the identifier is adequate to distinguish. */
6017 hstate
.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t
)));
6019 switch (TREE_CODE (type
))
6022 hstate
.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type
)));
6025 for (tree t
= TYPE_ARG_TYPES (type
); t
; t
= TREE_CHAIN (t
))
6026 if (TREE_VALUE (t
) != error_mark_node
)
6027 hstate
.add_object (TYPE_HASH (TREE_VALUE (t
)));
6031 hstate
.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type
)));
6036 if (TYPE_DOMAIN (type
))
6037 hstate
.add_object (TYPE_HASH (TYPE_DOMAIN (type
)));
6038 if (!AGGREGATE_TYPE_P (TREE_TYPE (type
)))
6040 unsigned typeless
= TYPE_TYPELESS_STORAGE (type
);
6041 hstate
.add_object (typeless
);
6048 tree t
= TYPE_MAX_VALUE (type
);
6050 t
= TYPE_MIN_VALUE (type
);
6051 for (int i
= 0; i
< TREE_INT_CST_NUNITS (t
); i
++)
6052 hstate
.add_object (TREE_INT_CST_ELT (t
, i
));
6058 unsigned prec
= TYPE_PRECISION (type
);
6059 unsigned uns
= TYPE_UNSIGNED (type
);
6060 hstate
.add_object (prec
);
6061 hstate
.add_int (uns
);
6066 case FIXED_POINT_TYPE
:
6068 unsigned prec
= TYPE_PRECISION (type
);
6069 hstate
.add_object (prec
);
6074 hstate
.add_poly_int (TYPE_VECTOR_SUBPARTS (type
));
6081 return hstate
.end ();
6084 /* These are the Hashtable callback functions. */
6086 /* Returns true iff the types are equivalent. */
6089 type_cache_hasher::equal (type_hash
*a
, type_hash
*b
)
6091 /* First test the things that are the same for all types. */
6092 if (a
->hash
!= b
->hash
6093 || TREE_CODE (a
->type
) != TREE_CODE (b
->type
)
6094 || TREE_TYPE (a
->type
) != TREE_TYPE (b
->type
)
6095 || !attribute_list_equal (TYPE_ATTRIBUTES (a
->type
),
6096 TYPE_ATTRIBUTES (b
->type
))
6097 || (TREE_CODE (a
->type
) != COMPLEX_TYPE
6098 && TYPE_NAME (a
->type
) != TYPE_NAME (b
->type
)))
6101 /* Be careful about comparing arrays before and after the element type
6102 has been completed; don't compare TYPE_ALIGN unless both types are
6104 if (COMPLETE_TYPE_P (a
->type
) && COMPLETE_TYPE_P (b
->type
)
6105 && (TYPE_ALIGN (a
->type
) != TYPE_ALIGN (b
->type
)
6106 || TYPE_MODE (a
->type
) != TYPE_MODE (b
->type
)))
6109 switch (TREE_CODE (a
->type
))
6115 case REFERENCE_TYPE
:
6120 return known_eq (TYPE_VECTOR_SUBPARTS (a
->type
),
6121 TYPE_VECTOR_SUBPARTS (b
->type
));
6124 if (TYPE_VALUES (a
->type
) != TYPE_VALUES (b
->type
)
6125 && !(TYPE_VALUES (a
->type
)
6126 && TREE_CODE (TYPE_VALUES (a
->type
)) == TREE_LIST
6127 && TYPE_VALUES (b
->type
)
6128 && TREE_CODE (TYPE_VALUES (b
->type
)) == TREE_LIST
6129 && type_list_equal (TYPE_VALUES (a
->type
),
6130 TYPE_VALUES (b
->type
))))
6138 if (TYPE_PRECISION (a
->type
) != TYPE_PRECISION (b
->type
))
6140 return ((TYPE_MAX_VALUE (a
->type
) == TYPE_MAX_VALUE (b
->type
)
6141 || tree_int_cst_equal (TYPE_MAX_VALUE (a
->type
),
6142 TYPE_MAX_VALUE (b
->type
)))
6143 && (TYPE_MIN_VALUE (a
->type
) == TYPE_MIN_VALUE (b
->type
)
6144 || tree_int_cst_equal (TYPE_MIN_VALUE (a
->type
),
6145 TYPE_MIN_VALUE (b
->type
))));
6148 if (TYPE_PRECISION (a
->type
) != TYPE_PRECISION (b
->type
))
6150 return TYPE_UNSIGNED (a
->type
) == TYPE_UNSIGNED (b
->type
);
6152 case FIXED_POINT_TYPE
:
6153 return TYPE_SATURATING (a
->type
) == TYPE_SATURATING (b
->type
);
6156 return TYPE_OFFSET_BASETYPE (a
->type
) == TYPE_OFFSET_BASETYPE (b
->type
);
6159 if (TYPE_METHOD_BASETYPE (a
->type
) == TYPE_METHOD_BASETYPE (b
->type
)
6160 && (TYPE_ARG_TYPES (a
->type
) == TYPE_ARG_TYPES (b
->type
)
6161 || (TYPE_ARG_TYPES (a
->type
)
6162 && TREE_CODE (TYPE_ARG_TYPES (a
->type
)) == TREE_LIST
6163 && TYPE_ARG_TYPES (b
->type
)
6164 && TREE_CODE (TYPE_ARG_TYPES (b
->type
)) == TREE_LIST
6165 && type_list_equal (TYPE_ARG_TYPES (a
->type
),
6166 TYPE_ARG_TYPES (b
->type
)))))
6170 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6171 where the flag should be inherited from the element type
6172 and can change after ARRAY_TYPEs are created; on non-aggregates
6173 compare it and hash it, scalars will never have that flag set
6174 and we need to differentiate between arrays created by different
6175 front-ends or middle-end created arrays. */
6176 return (TYPE_DOMAIN (a
->type
) == TYPE_DOMAIN (b
->type
)
6177 && (AGGREGATE_TYPE_P (TREE_TYPE (a
->type
))
6178 || (TYPE_TYPELESS_STORAGE (a
->type
)
6179 == TYPE_TYPELESS_STORAGE (b
->type
))));
6183 case QUAL_UNION_TYPE
:
6184 return (TYPE_FIELDS (a
->type
) == TYPE_FIELDS (b
->type
)
6185 || (TYPE_FIELDS (a
->type
)
6186 && TREE_CODE (TYPE_FIELDS (a
->type
)) == TREE_LIST
6187 && TYPE_FIELDS (b
->type
)
6188 && TREE_CODE (TYPE_FIELDS (b
->type
)) == TREE_LIST
6189 && type_list_equal (TYPE_FIELDS (a
->type
),
6190 TYPE_FIELDS (b
->type
))));
6193 if ((TYPE_ARG_TYPES (a
->type
) == TYPE_ARG_TYPES (b
->type
)
6194 && (TYPE_NO_NAMED_ARGS_STDARG_P (a
->type
)
6195 == TYPE_NO_NAMED_ARGS_STDARG_P (b
->type
)))
6196 || (TYPE_ARG_TYPES (a
->type
)
6197 && TREE_CODE (TYPE_ARG_TYPES (a
->type
)) == TREE_LIST
6198 && TYPE_ARG_TYPES (b
->type
)
6199 && TREE_CODE (TYPE_ARG_TYPES (b
->type
)) == TREE_LIST
6200 && type_list_equal (TYPE_ARG_TYPES (a
->type
),
6201 TYPE_ARG_TYPES (b
->type
))))
6209 if (lang_hooks
.types
.type_hash_eq
!= NULL
)
6210 return lang_hooks
.types
.type_hash_eq (a
->type
, b
->type
);
6215 /* Given TYPE, and HASHCODE its hash code, return the canonical
6216 object for an identical type if one already exists.
6217 Otherwise, return TYPE, and record it as the canonical object.
6219 To use this function, first create a type of the sort you want.
6220 Then compute its hash code from the fields of the type that
6221 make it different from other similar types.
6222 Then call this function and use the value. */
6225 type_hash_canon (unsigned int hashcode
, tree type
)
6230 /* The hash table only contains main variants, so ensure that's what we're
6232 gcc_assert (TYPE_MAIN_VARIANT (type
) == type
);
6234 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6235 must call that routine before comparing TYPE_ALIGNs. */
6241 loc
= type_hash_table
->find_slot_with_hash (&in
, hashcode
, INSERT
);
6244 tree t1
= ((type_hash
*) *loc
)->type
;
6245 gcc_assert (TYPE_MAIN_VARIANT (t1
) == t1
6247 if (TYPE_UID (type
) + 1 == next_type_uid
)
6249 /* Free also min/max values and the cache for integer
6250 types. This can't be done in free_node, as LTO frees
6251 those on its own. */
6252 if (TREE_CODE (type
) == INTEGER_TYPE
|| TREE_CODE (type
) == BITINT_TYPE
)
6254 if (TYPE_MIN_VALUE (type
)
6255 && TREE_TYPE (TYPE_MIN_VALUE (type
)) == type
)
6257 /* Zero is always in TYPE_CACHED_VALUES. */
6258 if (! TYPE_UNSIGNED (type
))
6259 int_cst_hash_table
->remove_elt (TYPE_MIN_VALUE (type
));
6260 ggc_free (TYPE_MIN_VALUE (type
));
6262 if (TYPE_MAX_VALUE (type
)
6263 && TREE_TYPE (TYPE_MAX_VALUE (type
)) == type
)
6265 int_cst_hash_table
->remove_elt (TYPE_MAX_VALUE (type
));
6266 ggc_free (TYPE_MAX_VALUE (type
));
6268 if (TYPE_CACHED_VALUES_P (type
))
6269 ggc_free (TYPE_CACHED_VALUES (type
));
6276 struct type_hash
*h
;
6278 h
= ggc_alloc
<type_hash
> ();
6288 print_type_hash_statistics (void)
6290 fprintf (stderr
, "Type hash: size %ld, %ld elements, %f collisions\n",
6291 (long) type_hash_table
->size (),
6292 (long) type_hash_table
->elements (),
6293 type_hash_table
->collisions ());
6296 /* Given two lists of types
6297 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6298 return 1 if the lists contain the same types in the same order.
6299 Also, the TREE_PURPOSEs must match. */
6302 type_list_equal (const_tree l1
, const_tree l2
)
6306 for (t1
= l1
, t2
= l2
; t1
&& t2
; t1
= TREE_CHAIN (t1
), t2
= TREE_CHAIN (t2
))
6307 if (TREE_VALUE (t1
) != TREE_VALUE (t2
)
6308 || (TREE_PURPOSE (t1
) != TREE_PURPOSE (t2
)
6309 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1
), TREE_PURPOSE (t2
))
6310 && (TREE_TYPE (TREE_PURPOSE (t1
))
6311 == TREE_TYPE (TREE_PURPOSE (t2
))))))
6317 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6318 given by TYPE. If the argument list accepts variable arguments,
6319 then this function counts only the ordinary arguments. */
6322 type_num_arguments (const_tree fntype
)
6326 for (tree t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
6327 /* If the function does not take a variable number of arguments,
6328 the last element in the list will have type `void'. */
6329 if (VOID_TYPE_P (TREE_VALUE (t
)))
6337 /* Return the type of the function TYPE's argument ARGNO if known.
6338 For vararg function's where ARGNO refers to one of the variadic
6339 arguments return null. Otherwise, return a void_type_node for
6340 out-of-bounds ARGNO. */
6343 type_argument_type (const_tree fntype
, unsigned argno
)
6345 /* Treat zero the same as an out-of-bounds argument number. */
6347 return void_type_node
;
6349 function_args_iterator iter
;
6353 FOREACH_FUNCTION_ARGS (fntype
, argtype
, iter
)
6355 /* A vararg function's argument list ends in a null. Otherwise,
6356 an ordinary function's argument list ends with void. Return
6357 null if ARGNO refers to a vararg argument, void_type_node if
6358 it's out of bounds, and the formal argument type otherwise. */
6362 if (i
== argno
|| VOID_TYPE_P (argtype
))
6371 /* True if integer constants T1 and T2
6372 represent the same constant value. */
6375 tree_int_cst_equal (const_tree t1
, const_tree t2
)
6380 if (t1
== 0 || t2
== 0)
6383 STRIP_ANY_LOCATION_WRAPPER (t1
);
6384 STRIP_ANY_LOCATION_WRAPPER (t2
);
6386 if (TREE_CODE (t1
) == INTEGER_CST
6387 && TREE_CODE (t2
) == INTEGER_CST
6388 && wi::to_widest (t1
) == wi::to_widest (t2
))
6394 /* Return true if T is an INTEGER_CST whose numerical value (extended
6395 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6398 tree_fits_shwi_p (const_tree t
)
6400 return (t
!= NULL_TREE
6401 && TREE_CODE (t
) == INTEGER_CST
6402 && wi::fits_shwi_p (wi::to_widest (t
)));
6405 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6406 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
6409 tree_fits_poly_int64_p (const_tree t
)
6413 if (POLY_INT_CST_P (t
))
6415 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; i
++)
6416 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t
, i
))))
6420 return (TREE_CODE (t
) == INTEGER_CST
6421 && wi::fits_shwi_p (wi::to_widest (t
)));
6424 /* Return true if T is an INTEGER_CST whose numerical value (extended
6425 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6428 tree_fits_uhwi_p (const_tree t
)
6430 return (t
!= NULL_TREE
6431 && TREE_CODE (t
) == INTEGER_CST
6432 && wi::fits_uhwi_p (wi::to_widest (t
)));
6435 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6436 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
6439 tree_fits_poly_uint64_p (const_tree t
)
6443 if (POLY_INT_CST_P (t
))
6445 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; i
++)
6446 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t
, i
))))
6450 return (TREE_CODE (t
) == INTEGER_CST
6451 && wi::fits_uhwi_p (wi::to_widest (t
)));
6454 /* T is an INTEGER_CST whose numerical value (extended according to
6455 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6459 tree_to_shwi (const_tree t
)
6461 gcc_assert (tree_fits_shwi_p (t
));
6462 return TREE_INT_CST_LOW (t
);
6465 /* T is an INTEGER_CST whose numerical value (extended according to
6466 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6469 unsigned HOST_WIDE_INT
6470 tree_to_uhwi (const_tree t
)
6472 gcc_assert (tree_fits_uhwi_p (t
));
6473 return TREE_INT_CST_LOW (t
);
6476 /* Return the most significant (sign) bit of T. */
6479 tree_int_cst_sign_bit (const_tree t
)
6481 unsigned bitno
= TYPE_PRECISION (TREE_TYPE (t
)) - 1;
6483 return wi::extract_uhwi (wi::to_wide (t
), bitno
, 1);
6486 /* Return an indication of the sign of the integer constant T.
6487 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6488 Note that -1 will never be returned if T's type is unsigned. */
6491 tree_int_cst_sgn (const_tree t
)
6493 if (wi::to_wide (t
) == 0)
6495 else if (TYPE_UNSIGNED (TREE_TYPE (t
)))
6497 else if (wi::neg_p (wi::to_wide (t
)))
6503 /* Return the minimum number of bits needed to represent VALUE in a
6504 signed or unsigned type, UNSIGNEDP says which. */
6507 tree_int_cst_min_precision (tree value
, signop sgn
)
6509 /* If the value is negative, compute its negative minus 1. The latter
6510 adjustment is because the absolute value of the largest negative value
6511 is one larger than the largest positive value. This is equivalent to
6512 a bit-wise negation, so use that operation instead. */
6514 if (tree_int_cst_sgn (value
) < 0)
6515 value
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (value
), value
);
6517 /* Return the number of bits needed, taking into account the fact
6518 that we need one more bit for a signed than unsigned type.
6519 If value is 0 or -1, the minimum precision is 1 no matter
6520 whether unsignedp is true or false. */
6522 if (integer_zerop (value
))
6525 return tree_floor_log2 (value
) + 1 + (sgn
== SIGNED
? 1 : 0) ;
6528 /* Return truthvalue of whether T1 is the same tree structure as T2.
6529 Return 1 if they are the same.
6530 Return 0 if they are understandably different.
6531 Return -1 if either contains tree structure not understood by
6535 simple_cst_equal (const_tree t1
, const_tree t2
)
6537 enum tree_code code1
, code2
;
6543 if (t1
== 0 || t2
== 0)
6546 /* For location wrappers to be the same, they must be at the same
6547 source location (and wrap the same thing). */
6548 if (location_wrapper_p (t1
) && location_wrapper_p (t2
))
6550 if (EXPR_LOCATION (t1
) != EXPR_LOCATION (t2
))
6552 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6555 code1
= TREE_CODE (t1
);
6556 code2
= TREE_CODE (t2
);
6558 if (CONVERT_EXPR_CODE_P (code1
) || code1
== NON_LVALUE_EXPR
)
6560 if (CONVERT_EXPR_CODE_P (code2
)
6561 || code2
== NON_LVALUE_EXPR
)
6562 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6564 return simple_cst_equal (TREE_OPERAND (t1
, 0), t2
);
6567 else if (CONVERT_EXPR_CODE_P (code2
)
6568 || code2
== NON_LVALUE_EXPR
)
6569 return simple_cst_equal (t1
, TREE_OPERAND (t2
, 0));
6577 return wi::to_widest (t1
) == wi::to_widest (t2
);
6580 return real_identical (&TREE_REAL_CST (t1
), &TREE_REAL_CST (t2
));
6583 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1
), TREE_FIXED_CST (t2
));
6586 return (TREE_STRING_LENGTH (t1
) == TREE_STRING_LENGTH (t2
)
6587 && ! memcmp (TREE_STRING_POINTER (t1
), TREE_STRING_POINTER (t2
),
6588 TREE_STRING_LENGTH (t1
)));
6592 unsigned HOST_WIDE_INT idx
;
6593 vec
<constructor_elt
, va_gc
> *v1
= CONSTRUCTOR_ELTS (t1
);
6594 vec
<constructor_elt
, va_gc
> *v2
= CONSTRUCTOR_ELTS (t2
);
6596 if (vec_safe_length (v1
) != vec_safe_length (v2
))
6599 for (idx
= 0; idx
< vec_safe_length (v1
); ++idx
)
6600 /* ??? Should we handle also fields here? */
6601 if (!simple_cst_equal ((*v1
)[idx
].value
, (*v2
)[idx
].value
))
6607 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6610 cmp
= simple_cst_equal (CALL_EXPR_FN (t1
), CALL_EXPR_FN (t2
));
6613 if (call_expr_nargs (t1
) != call_expr_nargs (t2
))
6616 const_tree arg1
, arg2
;
6617 const_call_expr_arg_iterator iter1
, iter2
;
6618 for (arg1
= first_const_call_expr_arg (t1
, &iter1
),
6619 arg2
= first_const_call_expr_arg (t2
, &iter2
);
6621 arg1
= next_const_call_expr_arg (&iter1
),
6622 arg2
= next_const_call_expr_arg (&iter2
))
6624 cmp
= simple_cst_equal (arg1
, arg2
);
6628 return arg1
== arg2
;
6632 /* Special case: if either target is an unallocated VAR_DECL,
6633 it means that it's going to be unified with whatever the
6634 TARGET_EXPR is really supposed to initialize, so treat it
6635 as being equivalent to anything. */
6636 if ((TREE_CODE (TREE_OPERAND (t1
, 0)) == VAR_DECL
6637 && DECL_NAME (TREE_OPERAND (t1
, 0)) == NULL_TREE
6638 && !DECL_RTL_SET_P (TREE_OPERAND (t1
, 0)))
6639 || (TREE_CODE (TREE_OPERAND (t2
, 0)) == VAR_DECL
6640 && DECL_NAME (TREE_OPERAND (t2
, 0)) == NULL_TREE
6641 && !DECL_RTL_SET_P (TREE_OPERAND (t2
, 0))))
6644 cmp
= simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6649 return simple_cst_equal (TREE_OPERAND (t1
, 1), TREE_OPERAND (t2
, 1));
6651 case WITH_CLEANUP_EXPR
:
6652 cmp
= simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6656 return simple_cst_equal (TREE_OPERAND (t1
, 1), TREE_OPERAND (t1
, 1));
6659 if (TREE_OPERAND (t1
, 1) == TREE_OPERAND (t2
, 1))
6660 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6671 if (POLY_INT_CST_P (t1
))
6672 /* A false return means maybe_ne rather than known_ne. */
6673 return known_eq (poly_widest_int::from (poly_int_cst_value (t1
),
6674 TYPE_SIGN (TREE_TYPE (t1
))),
6675 poly_widest_int::from (poly_int_cst_value (t2
),
6676 TYPE_SIGN (TREE_TYPE (t2
))));
6680 /* This general rule works for most tree codes. All exceptions should be
6681 handled above. If this is a language-specific tree code, we can't
6682 trust what might be in the operand, so say we don't know
6684 if ((int) code1
>= (int) LAST_AND_UNUSED_TREE_CODE
)
6687 switch (TREE_CODE_CLASS (code1
))
6691 case tcc_comparison
:
6692 case tcc_expression
:
6696 for (i
= 0; i
< TREE_CODE_LENGTH (code1
); i
++)
6698 cmp
= simple_cst_equal (TREE_OPERAND (t1
, i
), TREE_OPERAND (t2
, i
));
6710 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
6711 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
6712 than U, respectively. */
6715 compare_tree_int (const_tree t
, unsigned HOST_WIDE_INT u
)
6717 if (tree_int_cst_sgn (t
) < 0)
6719 else if (!tree_fits_uhwi_p (t
))
6721 else if (TREE_INT_CST_LOW (t
) == u
)
6723 else if (TREE_INT_CST_LOW (t
) < u
)
6729 /* Return true if SIZE represents a constant size that is in bounds of
6730 what the middle-end and the backend accepts (covering not more than
6731 half of the address-space).
6732 When PERR is non-null, set *PERR on failure to the description of
6733 why SIZE is not valid. */
6736 valid_constant_size_p (const_tree size
, cst_size_error
*perr
/* = NULL */)
6738 if (POLY_INT_CST_P (size
))
6740 if (TREE_OVERFLOW (size
))
6742 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
6743 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size
, i
)))
6748 cst_size_error error
;
6752 if (TREE_CODE (size
) != INTEGER_CST
)
6754 *perr
= cst_size_not_constant
;
6758 if (TREE_OVERFLOW_P (size
))
6760 *perr
= cst_size_overflow
;
6764 if (tree_int_cst_sgn (size
) < 0)
6766 *perr
= cst_size_negative
;
6769 if (!tree_fits_uhwi_p (size
)
6770 || (wi::to_widest (TYPE_MAX_VALUE (sizetype
))
6771 < wi::to_widest (size
) * 2))
6773 *perr
= cst_size_too_big
;
6780 /* Return the precision of the type, or for a complex or vector type the
6781 precision of the type of its elements. */
6784 element_precision (const_tree type
)
6787 type
= TREE_TYPE (type
);
6788 enum tree_code code
= TREE_CODE (type
);
6789 if (code
== COMPLEX_TYPE
|| code
== VECTOR_TYPE
)
6790 type
= TREE_TYPE (type
);
6792 return TYPE_PRECISION (type
);
6795 /* Return true if CODE represents an associative tree code. Otherwise
6798 associative_tree_code (enum tree_code code
)
6817 /* Return true if CODE represents a commutative tree code. Otherwise
6820 commutative_tree_code (enum tree_code code
)
6826 case MULT_HIGHPART_EXPR
:
6834 case UNORDERED_EXPR
:
6838 case TRUTH_AND_EXPR
:
6839 case TRUTH_XOR_EXPR
:
6841 case WIDEN_MULT_EXPR
:
6842 case VEC_WIDEN_MULT_HI_EXPR
:
6843 case VEC_WIDEN_MULT_LO_EXPR
:
6844 case VEC_WIDEN_MULT_EVEN_EXPR
:
6845 case VEC_WIDEN_MULT_ODD_EXPR
:
6854 /* Return true if CODE represents a ternary tree code for which the
6855 first two operands are commutative. Otherwise return false. */
6857 commutative_ternary_tree_code (enum tree_code code
)
6861 case WIDEN_MULT_PLUS_EXPR
:
6862 case WIDEN_MULT_MINUS_EXPR
:
6872 /* Returns true if CODE can overflow. */
6875 operation_can_overflow (enum tree_code code
)
6883 /* Can overflow in various ways. */
6885 case TRUNC_DIV_EXPR
:
6886 case EXACT_DIV_EXPR
:
6887 case FLOOR_DIV_EXPR
:
6889 /* For INT_MIN / -1. */
6896 /* These operators cannot overflow. */
6901 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
6902 ftrapv doesn't generate trapping insns for CODE. */
6905 operation_no_trapping_overflow (tree type
, enum tree_code code
)
6907 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type
));
6909 /* We don't generate instructions that trap on overflow for complex or vector
6911 if (!INTEGRAL_TYPE_P (type
))
6914 if (!TYPE_OVERFLOW_TRAPS (type
))
6924 /* These operators can overflow, and -ftrapv generates trapping code for
6927 case TRUNC_DIV_EXPR
:
6928 case EXACT_DIV_EXPR
:
6929 case FLOOR_DIV_EXPR
:
6932 /* These operators can overflow, but -ftrapv does not generate trapping
6936 /* These operators cannot overflow. */
6941 /* Constructors for pointer, array and function types.
6942 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
6943 constructed by language-dependent code, not here.) */
6945 /* Construct, lay out and return the type of pointers to TO_TYPE with
6946 mode MODE. If MODE is VOIDmode, a pointer mode for the address
6947 space of TO_TYPE will be picked. If CAN_ALIAS_ALL is TRUE,
6948 indicate this type can reference all of memory. If such a type has
6949 already been constructed, reuse it. */
6952 build_pointer_type_for_mode (tree to_type
, machine_mode mode
,
6956 bool could_alias
= can_alias_all
;
6958 if (to_type
== error_mark_node
)
6959 return error_mark_node
;
6961 if (mode
== VOIDmode
)
6963 addr_space_t as
= TYPE_ADDR_SPACE (to_type
);
6964 mode
= targetm
.addr_space
.pointer_mode (as
);
6967 /* If the pointed-to type has the may_alias attribute set, force
6968 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6969 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type
)))
6970 can_alias_all
= true;
6972 /* In some cases, languages will have things that aren't a POINTER_TYPE
6973 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
6974 In that case, return that type without regard to the rest of our
6977 ??? This is a kludge, but consistent with the way this function has
6978 always operated and there doesn't seem to be a good way to avoid this
6980 if (TYPE_POINTER_TO (to_type
) != 0
6981 && TREE_CODE (TYPE_POINTER_TO (to_type
)) != POINTER_TYPE
)
6982 return TYPE_POINTER_TO (to_type
);
6984 /* First, if we already have a type for pointers to TO_TYPE and it's
6985 the proper mode, use it. */
6986 for (t
= TYPE_POINTER_TO (to_type
); t
; t
= TYPE_NEXT_PTR_TO (t
))
6987 if (TYPE_MODE (t
) == mode
&& TYPE_REF_CAN_ALIAS_ALL (t
) == can_alias_all
)
6990 t
= make_node (POINTER_TYPE
);
6992 TREE_TYPE (t
) = to_type
;
6993 SET_TYPE_MODE (t
, mode
);
6994 TYPE_REF_CAN_ALIAS_ALL (t
) = can_alias_all
;
6995 TYPE_NEXT_PTR_TO (t
) = TYPE_POINTER_TO (to_type
);
6996 TYPE_POINTER_TO (to_type
) = t
;
6998 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6999 if (TYPE_STRUCTURAL_EQUALITY_P (to_type
) || in_lto_p
)
7000 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7001 else if (TYPE_CANONICAL (to_type
) != to_type
|| could_alias
)
7003 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type
),
7006 /* Lay out the type. This function has many callers that are concerned
7007 with expression-construction, and this simplifies them all. */
7013 /* By default build pointers in ptr_mode. */
7016 build_pointer_type (tree to_type
)
7018 return build_pointer_type_for_mode (to_type
, VOIDmode
, false);
7021 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7024 build_reference_type_for_mode (tree to_type
, machine_mode mode
,
7028 bool could_alias
= can_alias_all
;
7030 if (to_type
== error_mark_node
)
7031 return error_mark_node
;
7033 if (mode
== VOIDmode
)
7035 addr_space_t as
= TYPE_ADDR_SPACE (to_type
);
7036 mode
= targetm
.addr_space
.pointer_mode (as
);
7039 /* If the pointed-to type has the may_alias attribute set, force
7040 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7041 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type
)))
7042 can_alias_all
= true;
7044 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7045 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7046 In that case, return that type without regard to the rest of our
7049 ??? This is a kludge, but consistent with the way this function has
7050 always operated and there doesn't seem to be a good way to avoid this
7052 if (TYPE_REFERENCE_TO (to_type
) != 0
7053 && TREE_CODE (TYPE_REFERENCE_TO (to_type
)) != REFERENCE_TYPE
)
7054 return TYPE_REFERENCE_TO (to_type
);
7056 /* First, if we already have a type for pointers to TO_TYPE and it's
7057 the proper mode, use it. */
7058 for (t
= TYPE_REFERENCE_TO (to_type
); t
; t
= TYPE_NEXT_REF_TO (t
))
7059 if (TYPE_MODE (t
) == mode
&& TYPE_REF_CAN_ALIAS_ALL (t
) == can_alias_all
)
7062 t
= make_node (REFERENCE_TYPE
);
7064 TREE_TYPE (t
) = to_type
;
7065 SET_TYPE_MODE (t
, mode
);
7066 TYPE_REF_CAN_ALIAS_ALL (t
) = can_alias_all
;
7067 TYPE_NEXT_REF_TO (t
) = TYPE_REFERENCE_TO (to_type
);
7068 TYPE_REFERENCE_TO (to_type
) = t
;
7070 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7071 if (TYPE_STRUCTURAL_EQUALITY_P (to_type
) || in_lto_p
)
7072 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7073 else if (TYPE_CANONICAL (to_type
) != to_type
|| could_alias
)
7075 = build_reference_type_for_mode (TYPE_CANONICAL (to_type
),
7084 /* Build the node for the type of references-to-TO_TYPE by default
7088 build_reference_type (tree to_type
)
7090 return build_reference_type_for_mode (to_type
, VOIDmode
, false);
7093 #define MAX_INT_CACHED_PREC \
7094 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7095 static GTY(()) tree nonstandard_integer_type_cache
[2 * MAX_INT_CACHED_PREC
+ 2];
7098 clear_nonstandard_integer_type_cache (void)
7100 for (size_t i
= 0 ; i
< 2 * MAX_INT_CACHED_PREC
+ 2 ; i
++)
7102 nonstandard_integer_type_cache
[i
] = NULL
;
7106 /* Builds a signed or unsigned integer type of precision PRECISION.
7107 Used for C bitfields whose precision does not match that of
7108 built-in target types. */
7110 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision
,
7116 unsignedp
= MAX_INT_CACHED_PREC
+ 1;
7118 if (precision
<= MAX_INT_CACHED_PREC
)
7120 itype
= nonstandard_integer_type_cache
[precision
+ unsignedp
];
7125 itype
= make_node (INTEGER_TYPE
);
7126 TYPE_PRECISION (itype
) = precision
;
7129 fixup_unsigned_type (itype
);
7131 fixup_signed_type (itype
);
7133 inchash::hash hstate
;
7134 inchash::add_expr (TYPE_MAX_VALUE (itype
), hstate
);
7135 ret
= type_hash_canon (hstate
.end (), itype
);
7136 if (precision
<= MAX_INT_CACHED_PREC
)
7137 nonstandard_integer_type_cache
[precision
+ unsignedp
] = ret
;
7142 #define MAX_BOOL_CACHED_PREC \
7143 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7144 static GTY(()) tree nonstandard_boolean_type_cache
[MAX_BOOL_CACHED_PREC
+ 1];
7146 /* Builds a boolean type of precision PRECISION.
7147 Used for boolean vectors to choose proper vector element size. */
7149 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision
)
7153 if (precision
<= MAX_BOOL_CACHED_PREC
)
7155 type
= nonstandard_boolean_type_cache
[precision
];
7160 type
= make_node (BOOLEAN_TYPE
);
7161 TYPE_PRECISION (type
) = precision
;
7162 fixup_signed_type (type
);
7164 if (precision
<= MAX_INT_CACHED_PREC
)
7165 nonstandard_boolean_type_cache
[precision
] = type
;
7170 static GTY(()) vec
<tree
, va_gc
> *bitint_type_cache
;
7172 /* Builds a signed or unsigned _BitInt(PRECISION) type. */
7174 build_bitint_type (unsigned HOST_WIDE_INT precision
, int unsignedp
)
7178 gcc_checking_assert (precision
>= 1 + !unsignedp
);
7181 unsignedp
= MAX_INT_CACHED_PREC
+ 1;
7183 if (bitint_type_cache
== NULL
)
7184 vec_safe_grow_cleared (bitint_type_cache
, 2 * MAX_INT_CACHED_PREC
+ 2);
7186 if (precision
<= MAX_INT_CACHED_PREC
)
7188 itype
= (*bitint_type_cache
)[precision
+ unsignedp
];
7193 itype
= make_node (BITINT_TYPE
);
7194 TYPE_PRECISION (itype
) = precision
;
7197 fixup_unsigned_type (itype
);
7199 fixup_signed_type (itype
);
7201 inchash::hash hstate
;
7202 inchash::add_expr (TYPE_MAX_VALUE (itype
), hstate
);
7203 ret
= type_hash_canon (hstate
.end (), itype
);
7204 if (precision
<= MAX_INT_CACHED_PREC
)
7205 (*bitint_type_cache
)[precision
+ unsignedp
] = ret
;
7210 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7211 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7212 is true, reuse such a type that has already been constructed. */
7215 build_range_type_1 (tree type
, tree lowval
, tree highval
, bool shared
)
7217 tree itype
= make_node (INTEGER_TYPE
);
7219 TREE_TYPE (itype
) = type
;
7221 TYPE_MIN_VALUE (itype
) = fold_convert (type
, lowval
);
7222 TYPE_MAX_VALUE (itype
) = highval
? fold_convert (type
, highval
) : NULL
;
7224 TYPE_PRECISION (itype
) = TYPE_PRECISION (type
);
7225 SET_TYPE_MODE (itype
, TYPE_MODE (type
));
7226 TYPE_SIZE (itype
) = TYPE_SIZE (type
);
7227 TYPE_SIZE_UNIT (itype
) = TYPE_SIZE_UNIT (type
);
7228 SET_TYPE_ALIGN (itype
, TYPE_ALIGN (type
));
7229 TYPE_USER_ALIGN (itype
) = TYPE_USER_ALIGN (type
);
7230 SET_TYPE_WARN_IF_NOT_ALIGN (itype
, TYPE_WARN_IF_NOT_ALIGN (type
));
7235 if ((TYPE_MIN_VALUE (itype
)
7236 && TREE_CODE (TYPE_MIN_VALUE (itype
)) != INTEGER_CST
)
7237 || (TYPE_MAX_VALUE (itype
)
7238 && TREE_CODE (TYPE_MAX_VALUE (itype
)) != INTEGER_CST
))
7240 /* Since we cannot reliably merge this type, we need to compare it using
7241 structural equality checks. */
7242 SET_TYPE_STRUCTURAL_EQUALITY (itype
);
7246 hashval_t hash
= type_hash_canon_hash (itype
);
7247 itype
= type_hash_canon (hash
, itype
);
7252 /* Wrapper around build_range_type_1 with SHARED set to true. */
7255 build_range_type (tree type
, tree lowval
, tree highval
)
7257 return build_range_type_1 (type
, lowval
, highval
, true);
7260 /* Wrapper around build_range_type_1 with SHARED set to false. */
7263 build_nonshared_range_type (tree type
, tree lowval
, tree highval
)
7265 return build_range_type_1 (type
, lowval
, highval
, false);
7268 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7269 MAXVAL should be the maximum value in the domain
7270 (one less than the length of the array).
7272 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7273 We don't enforce this limit, that is up to caller (e.g. language front end).
7274 The limit exists because the result is a signed type and we don't handle
7275 sizes that use more than one HOST_WIDE_INT. */
7278 build_index_type (tree maxval
)
7280 return build_range_type (sizetype
, size_zero_node
, maxval
);
7283 /* Return true if the debug information for TYPE, a subtype, should be emitted
7284 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7285 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7286 debug info and doesn't reflect the source code. */
7289 subrange_type_for_debug_p (const_tree type
, tree
*lowval
, tree
*highval
)
7291 tree base_type
= TREE_TYPE (type
), low
, high
;
7293 /* Subrange types have a base type which is an integral type. */
7294 if (!INTEGRAL_TYPE_P (base_type
))
7297 /* Get the real bounds of the subtype. */
7298 if (lang_hooks
.types
.get_subrange_bounds
)
7299 lang_hooks
.types
.get_subrange_bounds (type
, &low
, &high
);
7302 low
= TYPE_MIN_VALUE (type
);
7303 high
= TYPE_MAX_VALUE (type
);
7306 /* If the type and its base type have the same representation and the same
7307 name, then the type is not a subrange but a copy of the base type. */
7308 if ((TREE_CODE (base_type
) == INTEGER_TYPE
7309 || TREE_CODE (base_type
) == BOOLEAN_TYPE
)
7310 && int_size_in_bytes (type
) == int_size_in_bytes (base_type
)
7311 && tree_int_cst_equal (low
, TYPE_MIN_VALUE (base_type
))
7312 && tree_int_cst_equal (high
, TYPE_MAX_VALUE (base_type
))
7313 && TYPE_IDENTIFIER (type
) == TYPE_IDENTIFIER (base_type
))
7323 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7324 and number of elements specified by the range of values of INDEX_TYPE.
7325 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7326 If SHARED is true, reuse such a type that has already been constructed.
7327 If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type. */
7330 build_array_type_1 (tree elt_type
, tree index_type
, bool typeless_storage
,
7331 bool shared
, bool set_canonical
)
7335 if (TREE_CODE (elt_type
) == FUNCTION_TYPE
)
7337 error ("arrays of functions are not meaningful");
7338 elt_type
= integer_type_node
;
7341 t
= make_node (ARRAY_TYPE
);
7342 TREE_TYPE (t
) = elt_type
;
7343 TYPE_DOMAIN (t
) = index_type
;
7344 TYPE_ADDR_SPACE (t
) = TYPE_ADDR_SPACE (elt_type
);
7345 TYPE_TYPELESS_STORAGE (t
) = typeless_storage
;
7350 hashval_t hash
= type_hash_canon_hash (t
);
7351 t
= type_hash_canon (hash
, t
);
7354 if (TYPE_CANONICAL (t
) == t
&& set_canonical
)
7356 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type
)
7357 || (index_type
&& TYPE_STRUCTURAL_EQUALITY_P (index_type
))
7359 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7360 else if (TYPE_CANONICAL (elt_type
) != elt_type
7361 || (index_type
&& TYPE_CANONICAL (index_type
) != index_type
))
7363 = build_array_type_1 (TYPE_CANONICAL (elt_type
),
7365 ? TYPE_CANONICAL (index_type
) : NULL_TREE
,
7366 typeless_storage
, shared
, set_canonical
);
7372 /* Wrapper around build_array_type_1 with SHARED set to true. */
7375 build_array_type (tree elt_type
, tree index_type
, bool typeless_storage
)
7378 build_array_type_1 (elt_type
, index_type
, typeless_storage
, true, true);
7381 /* Wrapper around build_array_type_1 with SHARED set to false. */
7384 build_nonshared_array_type (tree elt_type
, tree index_type
)
7386 return build_array_type_1 (elt_type
, index_type
, false, false, true);
7389 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7393 build_array_type_nelts (tree elt_type
, poly_uint64 nelts
)
7395 return build_array_type (elt_type
, build_index_type (size_int (nelts
- 1)));
7398 /* Computes the canonical argument types from the argument type list
7401 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7402 on entry to this function, or if any of the ARGTYPES are
7405 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7406 true on entry to this function, or if any of the ARGTYPES are
7409 Returns a canonical argument list, which may be ARGTYPES when the
7410 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7411 true) or would not differ from ARGTYPES. */
7414 maybe_canonicalize_argtypes (tree argtypes
,
7415 bool *any_structural_p
,
7416 bool *any_noncanonical_p
)
7419 bool any_noncanonical_argtypes_p
= false;
7421 for (arg
= argtypes
; arg
&& !(*any_structural_p
); arg
= TREE_CHAIN (arg
))
7423 if (!TREE_VALUE (arg
) || TREE_VALUE (arg
) == error_mark_node
)
7424 /* Fail gracefully by stating that the type is structural. */
7425 *any_structural_p
= true;
7426 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg
)))
7427 *any_structural_p
= true;
7428 else if (TYPE_CANONICAL (TREE_VALUE (arg
)) != TREE_VALUE (arg
)
7429 || TREE_PURPOSE (arg
))
7430 /* If the argument has a default argument, we consider it
7431 non-canonical even though the type itself is canonical.
7432 That way, different variants of function and method types
7433 with default arguments will all point to the variant with
7434 no defaults as their canonical type. */
7435 any_noncanonical_argtypes_p
= true;
7438 if (*any_structural_p
)
7441 if (any_noncanonical_argtypes_p
)
7443 /* Build the canonical list of argument types. */
7444 tree canon_argtypes
= NULL_TREE
;
7445 bool is_void
= false;
7447 for (arg
= argtypes
; arg
; arg
= TREE_CHAIN (arg
))
7449 if (arg
== void_list_node
)
7452 canon_argtypes
= tree_cons (NULL_TREE
,
7453 TYPE_CANONICAL (TREE_VALUE (arg
)),
7457 canon_argtypes
= nreverse (canon_argtypes
);
7459 canon_argtypes
= chainon (canon_argtypes
, void_list_node
);
7461 /* There is a non-canonical type. */
7462 *any_noncanonical_p
= true;
7463 return canon_argtypes
;
7466 /* The canonical argument types are the same as ARGTYPES. */
7470 /* Construct, lay out and return
7471 the type of functions returning type VALUE_TYPE
7472 given arguments of types ARG_TYPES.
7473 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7474 are data type nodes for the arguments of the function.
7475 NO_NAMED_ARGS_STDARG_P is true if this is a prototyped
7476 variable-arguments function with (...) prototype (no named arguments).
7477 If such a type has already been constructed, reuse it. */
7480 build_function_type (tree value_type
, tree arg_types
,
7481 bool no_named_args_stdarg_p
)
7484 inchash::hash hstate
;
7485 bool any_structural_p
, any_noncanonical_p
;
7486 tree canon_argtypes
;
7488 gcc_assert (arg_types
!= error_mark_node
);
7490 if (TREE_CODE (value_type
) == FUNCTION_TYPE
)
7492 error ("function return type cannot be function");
7493 value_type
= integer_type_node
;
7496 /* Make a node of the sort we want. */
7497 t
= make_node (FUNCTION_TYPE
);
7498 TREE_TYPE (t
) = value_type
;
7499 TYPE_ARG_TYPES (t
) = arg_types
;
7500 if (no_named_args_stdarg_p
)
7502 gcc_assert (arg_types
== NULL_TREE
);
7503 TYPE_NO_NAMED_ARGS_STDARG_P (t
) = 1;
7506 /* If we already have such a type, use the old one. */
7507 hashval_t hash
= type_hash_canon_hash (t
);
7508 t
= type_hash_canon (hash
, t
);
7510 /* Set up the canonical type. */
7511 any_structural_p
= TYPE_STRUCTURAL_EQUALITY_P (value_type
);
7512 any_noncanonical_p
= TYPE_CANONICAL (value_type
) != value_type
;
7513 canon_argtypes
= maybe_canonicalize_argtypes (arg_types
,
7515 &any_noncanonical_p
);
7516 if (any_structural_p
)
7517 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7518 else if (any_noncanonical_p
)
7519 TYPE_CANONICAL (t
) = build_function_type (TYPE_CANONICAL (value_type
),
7522 if (!COMPLETE_TYPE_P (t
))
7527 /* Build a function type. The RETURN_TYPE is the type returned by the
7528 function. If VAARGS is set, no void_type_node is appended to the
7529 list. ARGP must be always be terminated be a NULL_TREE. */
7532 build_function_type_list_1 (bool vaargs
, tree return_type
, va_list argp
)
7536 t
= va_arg (argp
, tree
);
7537 for (args
= NULL_TREE
; t
!= NULL_TREE
; t
= va_arg (argp
, tree
))
7538 args
= tree_cons (NULL_TREE
, t
, args
);
7543 if (args
!= NULL_TREE
)
7544 args
= nreverse (args
);
7545 gcc_assert (last
!= void_list_node
);
7547 else if (args
== NULL_TREE
)
7548 args
= void_list_node
;
7552 args
= nreverse (args
);
7553 TREE_CHAIN (last
) = void_list_node
;
7555 args
= build_function_type (return_type
, args
, vaargs
&& args
== NULL_TREE
);
7560 /* Build a function type. The RETURN_TYPE is the type returned by the
7561 function. If additional arguments are provided, they are
7562 additional argument types. The list of argument types must always
7563 be terminated by NULL_TREE. */
7566 build_function_type_list (tree return_type
, ...)
7571 va_start (p
, return_type
);
7572 args
= build_function_type_list_1 (false, return_type
, p
);
7577 /* Build a variable argument function type. The RETURN_TYPE is the
7578 type returned by the function. If additional arguments are provided,
7579 they are additional argument types. The list of argument types must
7580 always be terminated by NULL_TREE. */
7583 build_varargs_function_type_list (tree return_type
, ...)
7588 va_start (p
, return_type
);
7589 args
= build_function_type_list_1 (true, return_type
, p
);
7595 /* Build a function type. RETURN_TYPE is the type returned by the
7596 function; VAARGS indicates whether the function takes varargs. The
7597 function takes N named arguments, the types of which are provided in
7601 build_function_type_array_1 (bool vaargs
, tree return_type
, int n
,
7605 tree t
= vaargs
? NULL_TREE
: void_list_node
;
7607 for (i
= n
- 1; i
>= 0; i
--)
7608 t
= tree_cons (NULL_TREE
, arg_types
[i
], t
);
7610 return build_function_type (return_type
, t
, vaargs
&& n
== 0);
7613 /* Build a function type. RETURN_TYPE is the type returned by the
7614 function. The function takes N named arguments, the types of which
7615 are provided in ARG_TYPES. */
7618 build_function_type_array (tree return_type
, int n
, tree
*arg_types
)
7620 return build_function_type_array_1 (false, return_type
, n
, arg_types
);
7623 /* Build a variable argument function type. RETURN_TYPE is the type
7624 returned by the function. The function takes N named arguments, the
7625 types of which are provided in ARG_TYPES. */
7628 build_varargs_function_type_array (tree return_type
, int n
, tree
*arg_types
)
7630 return build_function_type_array_1 (true, return_type
, n
, arg_types
);
7633 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
7634 and ARGTYPES (a TREE_LIST) are the return type and arguments types
7635 for the method. An implicit additional parameter (of type
7636 pointer-to-BASETYPE) is added to the ARGTYPES. */
7639 build_method_type_directly (tree basetype
,
7645 bool any_structural_p
, any_noncanonical_p
;
7646 tree canon_argtypes
;
7648 /* Make a node of the sort we want. */
7649 t
= make_node (METHOD_TYPE
);
7651 TYPE_METHOD_BASETYPE (t
) = TYPE_MAIN_VARIANT (basetype
);
7652 TREE_TYPE (t
) = rettype
;
7653 ptype
= build_pointer_type (basetype
);
7655 /* The actual arglist for this function includes a "hidden" argument
7656 which is "this". Put it into the list of argument types. */
7657 argtypes
= tree_cons (NULL_TREE
, ptype
, argtypes
);
7658 TYPE_ARG_TYPES (t
) = argtypes
;
7660 /* If we already have such a type, use the old one. */
7661 hashval_t hash
= type_hash_canon_hash (t
);
7662 t
= type_hash_canon (hash
, t
);
7664 /* Set up the canonical type. */
7666 = (TYPE_STRUCTURAL_EQUALITY_P (basetype
)
7667 || TYPE_STRUCTURAL_EQUALITY_P (rettype
));
7669 = (TYPE_CANONICAL (basetype
) != basetype
7670 || TYPE_CANONICAL (rettype
) != rettype
);
7671 canon_argtypes
= maybe_canonicalize_argtypes (TREE_CHAIN (argtypes
),
7673 &any_noncanonical_p
);
7674 if (any_structural_p
)
7675 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7676 else if (any_noncanonical_p
)
7678 = build_method_type_directly (TYPE_CANONICAL (basetype
),
7679 TYPE_CANONICAL (rettype
),
7681 if (!COMPLETE_TYPE_P (t
))
7687 /* Construct, lay out and return the type of methods belonging to class
7688 BASETYPE and whose arguments and values are described by TYPE.
7689 If that type exists already, reuse it.
7690 TYPE must be a FUNCTION_TYPE node. */
7693 build_method_type (tree basetype
, tree type
)
7695 gcc_assert (TREE_CODE (type
) == FUNCTION_TYPE
);
7697 return build_method_type_directly (basetype
,
7699 TYPE_ARG_TYPES (type
));
7702 /* Construct, lay out and return the type of offsets to a value
7703 of type TYPE, within an object of type BASETYPE.
7704 If a suitable offset type exists already, reuse it. */
7707 build_offset_type (tree basetype
, tree type
)
7711 /* Make a node of the sort we want. */
7712 t
= make_node (OFFSET_TYPE
);
7714 TYPE_OFFSET_BASETYPE (t
) = TYPE_MAIN_VARIANT (basetype
);
7715 TREE_TYPE (t
) = type
;
7717 /* If we already have such a type, use the old one. */
7718 hashval_t hash
= type_hash_canon_hash (t
);
7719 t
= type_hash_canon (hash
, t
);
7721 if (!COMPLETE_TYPE_P (t
))
7724 if (TYPE_CANONICAL (t
) == t
)
7726 if (TYPE_STRUCTURAL_EQUALITY_P (basetype
)
7727 || TYPE_STRUCTURAL_EQUALITY_P (type
))
7728 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7729 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype
)) != basetype
7730 || TYPE_CANONICAL (type
) != type
)
7732 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype
)),
7733 TYPE_CANONICAL (type
));
7739 /* Create a complex type whose components are COMPONENT_TYPE.
7741 If NAMED is true, the type is given a TYPE_NAME. We do not always
7742 do so because this creates a DECL node and thus make the DECL_UIDs
7743 dependent on the type canonicalization hashtable, which is GC-ed,
7744 so the DECL_UIDs would not be stable wrt garbage collection. */
7747 build_complex_type (tree component_type
, bool named
)
7749 gcc_assert (INTEGRAL_TYPE_P (component_type
)
7750 || SCALAR_FLOAT_TYPE_P (component_type
)
7751 || FIXED_POINT_TYPE_P (component_type
));
7753 /* Make a node of the sort we want. */
7754 tree probe
= make_node (COMPLEX_TYPE
);
7756 TREE_TYPE (probe
) = TYPE_MAIN_VARIANT (component_type
);
7758 /* If we already have such a type, use the old one. */
7759 hashval_t hash
= type_hash_canon_hash (probe
);
7760 tree t
= type_hash_canon (hash
, probe
);
7764 /* We created a new type. The hash insertion will have laid
7765 out the type. We need to check the canonicalization and
7766 maybe set the name. */
7767 gcc_checking_assert (COMPLETE_TYPE_P (t
)
7769 && TYPE_CANONICAL (t
) == t
);
7771 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t
)))
7772 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7773 else if (TYPE_CANONICAL (TREE_TYPE (t
)) != TREE_TYPE (t
))
7775 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t
)), named
);
7777 /* We need to create a name, since complex is a fundamental type. */
7780 const char *name
= NULL
;
7782 if (TREE_TYPE (t
) == char_type_node
)
7783 name
= "complex char";
7784 else if (TREE_TYPE (t
) == signed_char_type_node
)
7785 name
= "complex signed char";
7786 else if (TREE_TYPE (t
) == unsigned_char_type_node
)
7787 name
= "complex unsigned char";
7788 else if (TREE_TYPE (t
) == short_integer_type_node
)
7789 name
= "complex short int";
7790 else if (TREE_TYPE (t
) == short_unsigned_type_node
)
7791 name
= "complex short unsigned int";
7792 else if (TREE_TYPE (t
) == integer_type_node
)
7793 name
= "complex int";
7794 else if (TREE_TYPE (t
) == unsigned_type_node
)
7795 name
= "complex unsigned int";
7796 else if (TREE_TYPE (t
) == long_integer_type_node
)
7797 name
= "complex long int";
7798 else if (TREE_TYPE (t
) == long_unsigned_type_node
)
7799 name
= "complex long unsigned int";
7800 else if (TREE_TYPE (t
) == long_long_integer_type_node
)
7801 name
= "complex long long int";
7802 else if (TREE_TYPE (t
) == long_long_unsigned_type_node
)
7803 name
= "complex long long unsigned int";
7806 TYPE_NAME (t
) = build_decl (UNKNOWN_LOCATION
, TYPE_DECL
,
7807 get_identifier (name
), t
);
7811 return build_qualified_type (t
, TYPE_QUALS (component_type
));
7814 /* If TYPE is a real or complex floating-point type and the target
7815 does not directly support arithmetic on TYPE then return the wider
7816 type to be used for arithmetic on TYPE. Otherwise, return
7820 excess_precision_type (tree type
)
7822 /* The target can give two different responses to the question of
7823 which excess precision mode it would like depending on whether we
7824 are in -fexcess-precision=standard or -fexcess-precision=fast. */
7826 enum excess_precision_type requested_type
7827 = (flag_excess_precision
== EXCESS_PRECISION_FAST
7828 ? EXCESS_PRECISION_TYPE_FAST
7829 : (flag_excess_precision
== EXCESS_PRECISION_FLOAT16
7830 ? EXCESS_PRECISION_TYPE_FLOAT16
: EXCESS_PRECISION_TYPE_STANDARD
));
7832 enum flt_eval_method target_flt_eval_method
7833 = targetm
.c
.excess_precision (requested_type
);
7835 /* The target should not ask for unpredictable float evaluation (though
7836 it might advertise that implicitly the evaluation is unpredictable,
7837 but we don't care about that here, it will have been reported
7838 elsewhere). If it does ask for unpredictable evaluation, we have
7839 nothing to do here. */
7840 gcc_assert (target_flt_eval_method
!= FLT_EVAL_METHOD_UNPREDICTABLE
);
7842 /* Nothing to do. The target has asked for all types we know about
7843 to be computed with their native precision and range. */
7844 if (target_flt_eval_method
== FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16
)
7847 /* The target will promote this type in a target-dependent way, so excess
7848 precision ought to leave it alone. */
7849 if (targetm
.promoted_type (type
) != NULL_TREE
)
7852 machine_mode float16_type_mode
= (float16_type_node
7853 ? TYPE_MODE (float16_type_node
)
7855 machine_mode bfloat16_type_mode
= (bfloat16_type_node
7856 ? TYPE_MODE (bfloat16_type_node
)
7858 machine_mode float_type_mode
= TYPE_MODE (float_type_node
);
7859 machine_mode double_type_mode
= TYPE_MODE (double_type_node
);
7861 switch (TREE_CODE (type
))
7865 machine_mode type_mode
= TYPE_MODE (type
);
7866 switch (target_flt_eval_method
)
7868 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT
:
7869 if (type_mode
== float16_type_mode
7870 || type_mode
== bfloat16_type_mode
)
7871 return float_type_node
;
7873 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE
:
7874 if (type_mode
== float16_type_mode
7875 || type_mode
== bfloat16_type_mode
7876 || type_mode
== float_type_mode
)
7877 return double_type_node
;
7879 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE
:
7880 if (type_mode
== float16_type_mode
7881 || type_mode
== bfloat16_type_mode
7882 || type_mode
== float_type_mode
7883 || type_mode
== double_type_mode
)
7884 return long_double_type_node
;
7893 if (TREE_CODE (TREE_TYPE (type
)) != REAL_TYPE
)
7895 machine_mode type_mode
= TYPE_MODE (TREE_TYPE (type
));
7896 switch (target_flt_eval_method
)
7898 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT
:
7899 if (type_mode
== float16_type_mode
7900 || type_mode
== bfloat16_type_mode
)
7901 return complex_float_type_node
;
7903 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE
:
7904 if (type_mode
== float16_type_mode
7905 || type_mode
== bfloat16_type_mode
7906 || type_mode
== float_type_mode
)
7907 return complex_double_type_node
;
7909 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE
:
7910 if (type_mode
== float16_type_mode
7911 || type_mode
== bfloat16_type_mode
7912 || type_mode
== float_type_mode
7913 || type_mode
== double_type_mode
)
7914 return complex_long_double_type_node
;
7928 /* Return OP, stripped of any conversions to wider types as much as is safe.
7929 Converting the value back to OP's type makes a value equivalent to OP.
7931 If FOR_TYPE is nonzero, we return a value which, if converted to
7932 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
7934 OP must have integer, real or enumeral type. Pointers are not allowed!
7936 There are some cases where the obvious value we could return
7937 would regenerate to OP if converted to OP's type,
7938 but would not extend like OP to wider types.
7939 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
7940 For example, if OP is (unsigned short)(signed char)-1,
7941 we avoid returning (signed char)-1 if FOR_TYPE is int,
7942 even though extending that to an unsigned short would regenerate OP,
7943 since the result of extending (signed char)-1 to (int)
7944 is different from (int) OP. */
7947 get_unwidened (tree op
, tree for_type
)
7949 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
7950 tree type
= TREE_TYPE (op
);
7952 = TYPE_PRECISION (for_type
!= 0 ? for_type
: type
);
7954 = (for_type
!= 0 && for_type
!= type
7955 && final_prec
> TYPE_PRECISION (type
)
7956 && TYPE_UNSIGNED (type
));
7959 while (CONVERT_EXPR_P (op
))
7963 /* TYPE_PRECISION on vector types has different meaning
7964 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
7965 so avoid them here. */
7966 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op
, 0))) == VECTOR_TYPE
)
7969 bitschange
= TYPE_PRECISION (TREE_TYPE (op
))
7970 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op
, 0)));
7972 /* Truncations are many-one so cannot be removed.
7973 Unless we are later going to truncate down even farther. */
7975 && final_prec
> TYPE_PRECISION (TREE_TYPE (op
)))
7978 /* See what's inside this conversion. If we decide to strip it,
7980 op
= TREE_OPERAND (op
, 0);
7982 /* If we have not stripped any zero-extensions (uns is 0),
7983 we can strip any kind of extension.
7984 If we have previously stripped a zero-extension,
7985 only zero-extensions can safely be stripped.
7986 Any extension can be stripped if the bits it would produce
7987 are all going to be discarded later by truncating to FOR_TYPE. */
7991 if (! uns
|| final_prec
<= TYPE_PRECISION (TREE_TYPE (op
)))
7993 /* TYPE_UNSIGNED says whether this is a zero-extension.
7994 Let's avoid computing it if it does not affect WIN
7995 and if UNS will not be needed again. */
7997 || CONVERT_EXPR_P (op
))
7998 && TYPE_UNSIGNED (TREE_TYPE (op
)))
8006 /* If we finally reach a constant see if it fits in sth smaller and
8007 in that case convert it. */
8008 if (TREE_CODE (win
) == INTEGER_CST
)
8010 tree wtype
= TREE_TYPE (win
);
8011 unsigned prec
= wi::min_precision (wi::to_wide (win
), TYPE_SIGN (wtype
));
8013 prec
= MAX (prec
, final_prec
);
8014 if (prec
< TYPE_PRECISION (wtype
))
8016 tree t
= lang_hooks
.types
.type_for_size (prec
, TYPE_UNSIGNED (wtype
));
8017 if (t
&& TYPE_PRECISION (t
) < TYPE_PRECISION (wtype
))
8018 win
= fold_convert (t
, win
);
8025 /* Return OP or a simpler expression for a narrower value
8026 which can be sign-extended or zero-extended to give back OP.
8027 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8028 or 0 if the value should be sign-extended. */
8031 get_narrower (tree op
, int *unsignedp_ptr
)
8036 bool integral_p
= INTEGRAL_TYPE_P (TREE_TYPE (op
));
8038 if (TREE_CODE (op
) == COMPOUND_EXPR
)
8041 op
= TREE_OPERAND (op
, 1);
8042 while (TREE_CODE (op
) == COMPOUND_EXPR
);
8043 tree ret
= get_narrower (op
, unsignedp_ptr
);
8046 auto_vec
<tree
, 16> v
;
8048 for (op
= win
; TREE_CODE (op
) == COMPOUND_EXPR
;
8049 op
= TREE_OPERAND (op
, 1))
8051 FOR_EACH_VEC_ELT_REVERSE (v
, i
, op
)
8052 ret
= build2_loc (EXPR_LOCATION (op
), COMPOUND_EXPR
,
8053 TREE_TYPE (ret
), TREE_OPERAND (op
, 0),
8057 while (TREE_CODE (op
) == NOP_EXPR
)
8060 = (TYPE_PRECISION (TREE_TYPE (op
))
8061 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op
, 0))));
8063 /* Truncations are many-one so cannot be removed. */
8067 /* See what's inside this conversion. If we decide to strip it,
8072 op
= TREE_OPERAND (op
, 0);
8073 /* An extension: the outermost one can be stripped,
8074 but remember whether it is zero or sign extension. */
8076 uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
8077 /* Otherwise, if a sign extension has been stripped,
8078 only sign extensions can now be stripped;
8079 if a zero extension has been stripped, only zero-extensions. */
8080 else if (uns
!= TYPE_UNSIGNED (TREE_TYPE (op
)))
8084 else /* bitschange == 0 */
8086 /* A change in nominal type can always be stripped, but we must
8087 preserve the unsignedness. */
8089 uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
8091 op
= TREE_OPERAND (op
, 0);
8092 /* Keep trying to narrow, but don't assign op to win if it
8093 would turn an integral type into something else. */
8094 if (INTEGRAL_TYPE_P (TREE_TYPE (op
)) != integral_p
)
8101 if (TREE_CODE (op
) == COMPONENT_REF
8102 /* Since type_for_size always gives an integer type. */
8103 && TREE_CODE (TREE_TYPE (op
)) != REAL_TYPE
8104 && TREE_CODE (TREE_TYPE (op
)) != FIXED_POINT_TYPE
8105 /* Ensure field is laid out already. */
8106 && DECL_SIZE (TREE_OPERAND (op
, 1)) != 0
8107 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op
, 1))))
8109 unsigned HOST_WIDE_INT innerprec
8110 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op
, 1)));
8111 int unsignedp
= (DECL_UNSIGNED (TREE_OPERAND (op
, 1))
8112 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op
, 1))));
8113 tree type
= lang_hooks
.types
.type_for_size (innerprec
, unsignedp
);
8115 /* We can get this structure field in a narrower type that fits it,
8116 but the resulting extension to its nominal type (a fullword type)
8117 must satisfy the same conditions as for other extensions.
8119 Do this only for fields that are aligned (not bit-fields),
8120 because when bit-field insns will be used there is no
8121 advantage in doing this. */
8123 if (innerprec
< TYPE_PRECISION (TREE_TYPE (op
))
8124 && ! DECL_BIT_FIELD (TREE_OPERAND (op
, 1))
8125 && (first
|| uns
== DECL_UNSIGNED (TREE_OPERAND (op
, 1)))
8129 uns
= DECL_UNSIGNED (TREE_OPERAND (op
, 1));
8130 win
= fold_convert (type
, op
);
8134 *unsignedp_ptr
= uns
;
8138 /* Return true if integer constant C has a value that is permissible
8139 for TYPE, an integral type. */
8142 int_fits_type_p (const_tree c
, const_tree type
)
8144 tree type_low_bound
, type_high_bound
;
8145 bool ok_for_low_bound
, ok_for_high_bound
;
8146 signop sgn_c
= TYPE_SIGN (TREE_TYPE (c
));
8148 /* Non-standard boolean types can have arbitrary precision but various
8149 transformations assume that they can only take values 0 and +/-1. */
8150 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
8151 return wi::fits_to_boolean_p (wi::to_wide (c
), type
);
8154 type_low_bound
= TYPE_MIN_VALUE (type
);
8155 type_high_bound
= TYPE_MAX_VALUE (type
);
8157 /* If at least one bound of the type is a constant integer, we can check
8158 ourselves and maybe make a decision. If no such decision is possible, but
8159 this type is a subtype, try checking against that. Otherwise, use
8160 fits_to_tree_p, which checks against the precision.
8162 Compute the status for each possibly constant bound, and return if we see
8163 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8164 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8165 for "constant known to fit". */
8167 /* Check if c >= type_low_bound. */
8168 if (type_low_bound
&& TREE_CODE (type_low_bound
) == INTEGER_CST
)
8170 if (tree_int_cst_lt (c
, type_low_bound
))
8172 ok_for_low_bound
= true;
8175 ok_for_low_bound
= false;
8177 /* Check if c <= type_high_bound. */
8178 if (type_high_bound
&& TREE_CODE (type_high_bound
) == INTEGER_CST
)
8180 if (tree_int_cst_lt (type_high_bound
, c
))
8182 ok_for_high_bound
= true;
8185 ok_for_high_bound
= false;
8187 /* If the constant fits both bounds, the result is known. */
8188 if (ok_for_low_bound
&& ok_for_high_bound
)
8191 /* Perform some generic filtering which may allow making a decision
8192 even if the bounds are not constant. First, negative integers
8193 never fit in unsigned types, */
8194 if (TYPE_UNSIGNED (type
) && sgn_c
== SIGNED
&& wi::neg_p (wi::to_wide (c
)))
8197 /* Second, narrower types always fit in wider ones. */
8198 if (TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (c
)))
8201 /* Third, unsigned integers with top bit set never fit signed types. */
8202 if (!TYPE_UNSIGNED (type
) && sgn_c
== UNSIGNED
)
8204 int prec
= GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c
))) - 1;
8205 if (prec
< TYPE_PRECISION (TREE_TYPE (c
)))
8207 /* When a tree_cst is converted to a wide-int, the precision
8208 is taken from the type. However, if the precision of the
8209 mode underneath the type is smaller than that, it is
8210 possible that the value will not fit. The test below
8211 fails if any bit is set between the sign bit of the
8212 underlying mode and the top bit of the type. */
8213 if (wi::zext (wi::to_wide (c
), prec
- 1) != wi::to_wide (c
))
8216 else if (wi::neg_p (wi::to_wide (c
)))
8220 /* If we haven't been able to decide at this point, there nothing more we
8221 can check ourselves here. Look at the base type if we have one and it
8222 has the same precision. */
8223 if (TREE_CODE (type
) == INTEGER_TYPE
8224 && TREE_TYPE (type
) != 0
8225 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (type
)))
8227 type
= TREE_TYPE (type
);
8231 /* Or to fits_to_tree_p, if nothing else. */
8232 return wi::fits_to_tree_p (wi::to_wide (c
), type
);
8235 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8236 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8237 represented (assuming two's-complement arithmetic) within the bit
8238 precision of the type are returned instead. */
8241 get_type_static_bounds (const_tree type
, mpz_t min
, mpz_t max
)
8243 if (!POINTER_TYPE_P (type
) && TYPE_MIN_VALUE (type
)
8244 && TREE_CODE (TYPE_MIN_VALUE (type
)) == INTEGER_CST
)
8245 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type
)), min
, TYPE_SIGN (type
));
8248 if (TYPE_UNSIGNED (type
))
8249 mpz_set_ui (min
, 0);
8252 wide_int mn
= wi::min_value (TYPE_PRECISION (type
), SIGNED
);
8253 wi::to_mpz (mn
, min
, SIGNED
);
8257 if (!POINTER_TYPE_P (type
) && TYPE_MAX_VALUE (type
)
8258 && TREE_CODE (TYPE_MAX_VALUE (type
)) == INTEGER_CST
)
8259 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type
)), max
, TYPE_SIGN (type
));
8262 wide_int mn
= wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
8263 wi::to_mpz (mn
, max
, TYPE_SIGN (type
));
8267 /* Return true if VAR is an automatic variable. */
8270 auto_var_p (const_tree var
)
8272 return ((((VAR_P (var
) && ! DECL_EXTERNAL (var
))
8273 || TREE_CODE (var
) == PARM_DECL
)
8274 && ! TREE_STATIC (var
))
8275 || TREE_CODE (var
) == RESULT_DECL
);
8278 /* Return true if VAR is an automatic variable defined in function FN. */
8281 auto_var_in_fn_p (const_tree var
, const_tree fn
)
8283 return (DECL_P (var
) && DECL_CONTEXT (var
) == fn
8284 && (auto_var_p (var
)
8285 || TREE_CODE (var
) == LABEL_DECL
));
8288 /* Subprogram of following function. Called by walk_tree.
8290 Return *TP if it is an automatic variable or parameter of the
8291 function passed in as DATA. */
8294 find_var_from_fn (tree
*tp
, int *walk_subtrees
, void *data
)
8296 tree fn
= (tree
) data
;
8301 else if (DECL_P (*tp
)
8302 && auto_var_in_fn_p (*tp
, fn
))
8308 /* Returns true if T is, contains, or refers to a type with variable
8309 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8310 arguments, but not the return type. If FN is nonzero, only return
8311 true if a modifier of the type or position of FN is a variable or
8312 parameter inside FN.
8314 This concept is more general than that of C99 'variably modified types':
8315 in C99, a struct type is never variably modified because a VLA may not
8316 appear as a structure member. However, in GNU C code like:
8318 struct S { int i[f()]; };
8320 is valid, and other languages may define similar constructs. */
8323 variably_modified_type_p (tree type
, tree fn
)
8327 /* Test if T is either variable (if FN is zero) or an expression containing
8328 a variable in FN. If TYPE isn't gimplified, return true also if
8329 gimplify_one_sizepos would gimplify the expression into a local
8331 #define RETURN_TRUE_IF_VAR(T) \
8332 do { tree _t = (T); \
8333 if (_t != NULL_TREE \
8334 && _t != error_mark_node \
8335 && !CONSTANT_CLASS_P (_t) \
8336 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8338 || (!TYPE_SIZES_GIMPLIFIED (type) \
8339 && (TREE_CODE (_t) != VAR_DECL \
8340 && !CONTAINS_PLACEHOLDER_P (_t))) \
8341 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8342 return true; } while (0)
8344 if (type
== error_mark_node
)
8347 /* If TYPE itself has variable size, it is variably modified. */
8348 RETURN_TRUE_IF_VAR (TYPE_SIZE (type
));
8349 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type
));
8351 switch (TREE_CODE (type
))
8354 case REFERENCE_TYPE
:
8356 /* Ada can have pointer types refering to themselves indirectly. */
8357 if (TREE_VISITED (type
))
8359 TREE_VISITED (type
) = true;
8360 if (variably_modified_type_p (TREE_TYPE (type
), fn
))
8362 TREE_VISITED (type
) = false;
8365 TREE_VISITED (type
) = false;
8370 /* If TYPE is a function type, it is variably modified if the
8371 return type is variably modified. */
8372 if (variably_modified_type_p (TREE_TYPE (type
), fn
))
8378 case FIXED_POINT_TYPE
:
8381 /* Scalar types are variably modified if their end points
8383 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type
));
8384 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type
));
8389 case QUAL_UNION_TYPE
:
8390 /* We can't see if any of the fields are variably-modified by the
8391 definition we normally use, since that would produce infinite
8392 recursion via pointers. */
8393 /* This is variably modified if some field's type is. */
8394 for (t
= TYPE_FIELDS (type
); t
; t
= DECL_CHAIN (t
))
8395 if (TREE_CODE (t
) == FIELD_DECL
)
8397 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t
));
8398 RETURN_TRUE_IF_VAR (DECL_SIZE (t
));
8399 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t
));
8401 /* If the type is a qualified union, then the DECL_QUALIFIER
8402 of fields can also be an expression containing a variable. */
8403 if (TREE_CODE (type
) == QUAL_UNION_TYPE
)
8404 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t
));
8406 /* If the field is a qualified union, then it's only a container
8407 for what's inside so we look into it. That's necessary in LTO
8408 mode because the sizes of the field tested above have been set
8409 to PLACEHOLDER_EXPRs by free_lang_data. */
8410 if (TREE_CODE (TREE_TYPE (t
)) == QUAL_UNION_TYPE
8411 && variably_modified_type_p (TREE_TYPE (t
), fn
))
8417 /* Do not call ourselves to avoid infinite recursion. This is
8418 variably modified if the element type is. */
8419 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type
)));
8420 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type
)));
8427 /* The current language may have other cases to check, but in general,
8428 all other types are not variably modified. */
8429 return lang_hooks
.tree_inlining
.var_mod_type_p (type
, fn
);
8431 #undef RETURN_TRUE_IF_VAR
8434 /* Given a DECL or TYPE, return the scope in which it was declared, or
8435 NULL_TREE if there is no containing scope. */
8438 get_containing_scope (const_tree t
)
8440 return (TYPE_P (t
) ? TYPE_CONTEXT (t
) : DECL_CONTEXT (t
));
8443 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
8446 get_ultimate_context (const_tree decl
)
8448 while (decl
&& TREE_CODE (decl
) != TRANSLATION_UNIT_DECL
)
8450 if (TREE_CODE (decl
) == BLOCK
)
8451 decl
= BLOCK_SUPERCONTEXT (decl
);
8453 decl
= get_containing_scope (decl
);
8458 /* Return the innermost context enclosing DECL that is
8459 a FUNCTION_DECL, or zero if none. */
8462 decl_function_context (const_tree decl
)
8466 if (TREE_CODE (decl
) == ERROR_MARK
)
8469 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8470 where we look up the function at runtime. Such functions always take
8471 a first argument of type 'pointer to real context'.
8473 C++ should really be fixed to use DECL_CONTEXT for the real context,
8474 and use something else for the "virtual context". */
8475 else if (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_VIRTUAL_P (decl
))
8478 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl
)))));
8480 context
= DECL_CONTEXT (decl
);
8482 while (context
&& TREE_CODE (context
) != FUNCTION_DECL
)
8484 if (TREE_CODE (context
) == BLOCK
)
8485 context
= BLOCK_SUPERCONTEXT (context
);
8487 context
= get_containing_scope (context
);
8493 /* Return the innermost context enclosing DECL that is
8494 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8495 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8498 decl_type_context (const_tree decl
)
8500 tree context
= DECL_CONTEXT (decl
);
8503 switch (TREE_CODE (context
))
8505 case NAMESPACE_DECL
:
8506 case TRANSLATION_UNIT_DECL
:
8511 case QUAL_UNION_TYPE
:
8516 context
= DECL_CONTEXT (context
);
8520 context
= BLOCK_SUPERCONTEXT (context
);
8530 /* CALL is a CALL_EXPR. Return the declaration for the function
8531 called, or NULL_TREE if the called function cannot be
8535 get_callee_fndecl (const_tree call
)
8539 if (call
== error_mark_node
)
8540 return error_mark_node
;
8542 /* It's invalid to call this function with anything but a
8544 gcc_assert (TREE_CODE (call
) == CALL_EXPR
);
8546 /* The first operand to the CALL is the address of the function
8548 addr
= CALL_EXPR_FN (call
);
8550 /* If there is no function, return early. */
8551 if (addr
== NULL_TREE
)
8556 /* If this is a readonly function pointer, extract its initial value. */
8557 if (DECL_P (addr
) && TREE_CODE (addr
) != FUNCTION_DECL
8558 && TREE_READONLY (addr
) && ! TREE_THIS_VOLATILE (addr
)
8559 && DECL_INITIAL (addr
))
8560 addr
= DECL_INITIAL (addr
);
8562 /* If the address is just `&f' for some function `f', then we know
8563 that `f' is being called. */
8564 if (TREE_CODE (addr
) == ADDR_EXPR
8565 && TREE_CODE (TREE_OPERAND (addr
, 0)) == FUNCTION_DECL
)
8566 return TREE_OPERAND (addr
, 0);
8568 /* We couldn't figure out what was being called. */
8572 /* Return true when STMTs arguments and return value match those of FNDECL,
8573 a decl of a builtin function. */
8576 tree_builtin_call_types_compatible_p (const_tree call
, tree fndecl
)
8578 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl
) != NOT_BUILT_IN
);
8580 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
8581 if (tree decl
= builtin_decl_explicit (DECL_FUNCTION_CODE (fndecl
)))
8584 bool gimple_form
= (cfun
&& (cfun
->curr_properties
& PROP_gimple
)) != 0;
8586 ? !useless_type_conversion_p (TREE_TYPE (call
),
8587 TREE_TYPE (TREE_TYPE (fndecl
)))
8588 : (TYPE_MAIN_VARIANT (TREE_TYPE (call
))
8589 != TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (fndecl
)))))
8592 tree targs
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
8593 unsigned nargs
= call_expr_nargs (call
);
8594 for (unsigned i
= 0; i
< nargs
; ++i
, targs
= TREE_CHAIN (targs
))
8596 /* Variadic args follow. */
8599 tree arg
= CALL_EXPR_ARG (call
, i
);
8600 tree type
= TREE_VALUE (targs
);
8602 ? !useless_type_conversion_p (type
, TREE_TYPE (arg
))
8603 : TYPE_MAIN_VARIANT (type
) != TYPE_MAIN_VARIANT (TREE_TYPE (arg
)))
8605 /* For pointer arguments be more forgiving, e.g. due to
8606 FILE * vs. fileptr_type_node, or say char * vs. const char *
8609 && POINTER_TYPE_P (type
)
8610 && POINTER_TYPE_P (TREE_TYPE (arg
))
8611 && tree_nop_conversion_p (type
, TREE_TYPE (arg
)))
8613 /* char/short integral arguments are promoted to int
8614 by several frontends if targetm.calls.promote_prototypes
8615 is true. Allow such promotion too. */
8616 if (INTEGRAL_TYPE_P (type
)
8617 && TYPE_PRECISION (type
) < TYPE_PRECISION (integer_type_node
)
8618 && INTEGRAL_TYPE_P (TREE_TYPE (arg
))
8619 && !TYPE_UNSIGNED (TREE_TYPE (arg
))
8620 && targetm
.calls
.promote_prototypes (TREE_TYPE (fndecl
))
8622 ? useless_type_conversion_p (integer_type_node
,
8624 : tree_nop_conversion_p (integer_type_node
,
8630 if (targs
&& !VOID_TYPE_P (TREE_VALUE (targs
)))
8635 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
8636 return the associated function code, otherwise return CFN_LAST. */
8639 get_call_combined_fn (const_tree call
)
8641 /* It's invalid to call this function with anything but a CALL_EXPR. */
8642 gcc_assert (TREE_CODE (call
) == CALL_EXPR
);
8644 if (!CALL_EXPR_FN (call
))
8645 return as_combined_fn (CALL_EXPR_IFN (call
));
8647 tree fndecl
= get_callee_fndecl (call
);
8649 && fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
)
8650 && tree_builtin_call_types_compatible_p (call
, fndecl
))
8651 return as_combined_fn (DECL_FUNCTION_CODE (fndecl
));
8656 /* Comparator of indices based on tree_node_counts. */
8659 tree_nodes_cmp (const void *p1
, const void *p2
)
8661 const unsigned *n1
= (const unsigned *)p1
;
8662 const unsigned *n2
= (const unsigned *)p2
;
8664 return tree_node_counts
[*n1
] - tree_node_counts
[*n2
];
8667 /* Comparator of indices based on tree_code_counts. */
8670 tree_codes_cmp (const void *p1
, const void *p2
)
8672 const unsigned *n1
= (const unsigned *)p1
;
8673 const unsigned *n2
= (const unsigned *)p2
;
8675 return tree_code_counts
[*n1
] - tree_code_counts
[*n2
];
8678 #define TREE_MEM_USAGE_SPACES 40
8680 /* Print debugging information about tree nodes generated during the compile,
8681 and any language-specific information. */
8684 dump_tree_statistics (void)
8686 if (GATHER_STATISTICS
)
8688 uint64_t total_nodes
, total_bytes
;
8689 fprintf (stderr
, "\nKind Nodes Bytes\n");
8690 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8691 total_nodes
= total_bytes
= 0;
8694 auto_vec
<unsigned> indices (all_kinds
);
8695 for (unsigned i
= 0; i
< all_kinds
; i
++)
8696 indices
.quick_push (i
);
8697 indices
.qsort (tree_nodes_cmp
);
8699 for (unsigned i
= 0; i
< (int) all_kinds
; i
++)
8701 unsigned j
= indices
[i
];
8702 fprintf (stderr
, "%-20s %6" PRIu64
"%c %9" PRIu64
"%c\n",
8703 tree_node_kind_names
[j
], SIZE_AMOUNT (tree_node_counts
[j
]),
8704 SIZE_AMOUNT (tree_node_sizes
[j
]));
8705 total_nodes
+= tree_node_counts
[j
];
8706 total_bytes
+= tree_node_sizes
[j
];
8708 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8709 fprintf (stderr
, "%-20s %6" PRIu64
"%c %9" PRIu64
"%c\n", "Total",
8710 SIZE_AMOUNT (total_nodes
), SIZE_AMOUNT (total_bytes
));
8711 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8715 fprintf (stderr
, "Code Nodes\n");
8716 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8718 auto_vec
<unsigned> indices (MAX_TREE_CODES
);
8719 for (unsigned i
= 0; i
< MAX_TREE_CODES
; i
++)
8720 indices
.quick_push (i
);
8721 indices
.qsort (tree_codes_cmp
);
8723 for (unsigned i
= 0; i
< MAX_TREE_CODES
; i
++)
8725 unsigned j
= indices
[i
];
8726 fprintf (stderr
, "%-32s %6" PRIu64
"%c\n",
8727 get_tree_code_name ((enum tree_code
) j
),
8728 SIZE_AMOUNT (tree_code_counts
[j
]));
8730 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8731 fprintf (stderr
, "\n");
8732 ssanames_print_statistics ();
8733 fprintf (stderr
, "\n");
8734 phinodes_print_statistics ();
8735 fprintf (stderr
, "\n");
8739 fprintf (stderr
, "(No per-node statistics)\n");
8741 print_type_hash_statistics ();
8742 print_debug_expr_statistics ();
8743 print_value_expr_statistics ();
8744 lang_hooks
.print_statistics ();
8747 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8749 /* Generate a crc32 of the low BYTES bytes of VALUE. */
8752 crc32_unsigned_n (unsigned chksum
, unsigned value
, unsigned bytes
)
8754 /* This relies on the raw feedback's top 4 bits being zero. */
8755 #define FEEDBACK(X) ((X) * 0x04c11db7)
8756 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
8757 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
8758 static const unsigned syndromes
[16] =
8760 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
8761 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
8762 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
8763 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
8768 value
<<= (32 - bytes
* 8);
8769 for (unsigned ix
= bytes
* 2; ix
--; value
<<= 4)
8771 unsigned feedback
= syndromes
[((value
^ chksum
) >> 28) & 0xf];
8773 chksum
= (chksum
<< 4) ^ feedback
;
8779 /* Generate a crc32 of a string. */
8782 crc32_string (unsigned chksum
, const char *string
)
8785 chksum
= crc32_byte (chksum
, *string
);
8790 /* P is a string that will be used in a symbol. Mask out any characters
8791 that are not valid in that context. */
8794 clean_symbol_name (char *p
)
8798 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8801 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
8808 static GTY(()) unsigned anon_cnt
= 0; /* Saved for PCH. */
8810 /* Create a unique anonymous identifier. The identifier is still a
8811 valid assembly label. */
8817 #if !defined (NO_DOT_IN_LABEL)
8819 #elif !defined (NO_DOLLAR_IN_LABEL)
8827 int len
= snprintf (buf
, sizeof (buf
), fmt
, anon_cnt
++);
8828 gcc_checking_assert (len
< int (sizeof (buf
)));
8830 tree id
= get_identifier_with_length (buf
, len
);
8831 IDENTIFIER_ANON_P (id
) = true;
8836 /* Generate a name for a special-purpose function.
8837 The generated name may need to be unique across the whole link.
8838 Changes to this function may also require corresponding changes to
8839 xstrdup_mask_random.
8840 TYPE is some string to identify the purpose of this function to the
8841 linker or collect2; it must start with an uppercase letter,
8843 I - for constructors
8845 N - for C++ anonymous namespaces
8846 F - for DWARF unwind frame information. */
8849 get_file_function_name (const char *type
)
8855 /* If we already have a name we know to be unique, just use that. */
8856 if (first_global_object_name
)
8857 p
= q
= ASTRDUP (first_global_object_name
);
8858 /* If the target is handling the constructors/destructors, they
8859 will be local to this file and the name is only necessary for
8861 We also assign sub_I and sub_D sufixes to constructors called from
8862 the global static constructors. These are always local. */
8863 else if (((type
[0] == 'I' || type
[0] == 'D') && targetm
.have_ctors_dtors
)
8864 || (startswith (type
, "sub_")
8865 && (type
[4] == 'I' || type
[4] == 'D')))
8867 const char *file
= main_input_filename
;
8869 file
= LOCATION_FILE (input_location
);
8870 /* Just use the file's basename, because the full pathname
8871 might be quite long. */
8872 p
= q
= ASTRDUP (lbasename (file
));
8876 /* Otherwise, the name must be unique across the entire link.
8877 We don't have anything that we know to be unique to this translation
8878 unit, so use what we do have and throw in some randomness. */
8880 const char *name
= weak_global_object_name
;
8881 const char *file
= main_input_filename
;
8886 file
= LOCATION_FILE (input_location
);
8888 len
= strlen (file
);
8889 q
= (char *) alloca (9 + 19 + len
+ 1);
8890 memcpy (q
, file
, len
+ 1);
8892 snprintf (q
+ len
, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX
,
8893 crc32_string (0, name
), get_random_seed (false));
8898 clean_symbol_name (q
);
8899 buf
= (char *) alloca (sizeof (FILE_FUNCTION_FORMAT
) + strlen (p
)
8902 /* Set up the name of the file-level functions we may need.
8903 Use a global object (which is already required to be unique over
8904 the program) rather than the file name (which imposes extra
8906 sprintf (buf
, FILE_FUNCTION_FORMAT
, type
, p
);
8908 return get_identifier (buf
);
8911 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
8913 /* Complain that the tree code of NODE does not match the expected 0
8914 terminated list of trailing codes. The trailing code list can be
8915 empty, for a more vague error message. FILE, LINE, and FUNCTION
8916 are of the caller. */
8919 tree_check_failed (const_tree node
, const char *file
,
8920 int line
, const char *function
, ...)
8924 unsigned length
= 0;
8925 enum tree_code code
;
8927 va_start (args
, function
);
8928 while ((code
= (enum tree_code
) va_arg (args
, int)))
8929 length
+= 4 + strlen (get_tree_code_name (code
));
8934 va_start (args
, function
);
8935 length
+= strlen ("expected ");
8936 buffer
= tmp
= (char *) alloca (length
);
8938 while ((code
= (enum tree_code
) va_arg (args
, int)))
8940 const char *prefix
= length
? " or " : "expected ";
8942 strcpy (tmp
+ length
, prefix
);
8943 length
+= strlen (prefix
);
8944 strcpy (tmp
+ length
, get_tree_code_name (code
));
8945 length
+= strlen (get_tree_code_name (code
));
8950 buffer
= "unexpected node";
8952 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8953 buffer
, get_tree_code_name (TREE_CODE (node
)),
8954 function
, trim_filename (file
), line
);
8957 /* Complain that the tree code of NODE does match the expected 0
8958 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
8962 tree_not_check_failed (const_tree node
, const char *file
,
8963 int line
, const char *function
, ...)
8967 unsigned length
= 0;
8968 enum tree_code code
;
8970 va_start (args
, function
);
8971 while ((code
= (enum tree_code
) va_arg (args
, int)))
8972 length
+= 4 + strlen (get_tree_code_name (code
));
8974 va_start (args
, function
);
8975 buffer
= (char *) alloca (length
);
8977 while ((code
= (enum tree_code
) va_arg (args
, int)))
8981 strcpy (buffer
+ length
, " or ");
8984 strcpy (buffer
+ length
, get_tree_code_name (code
));
8985 length
+= strlen (get_tree_code_name (code
));
8989 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
8990 buffer
, get_tree_code_name (TREE_CODE (node
)),
8991 function
, trim_filename (file
), line
);
8994 /* Similar to tree_check_failed, except that we check for a class of tree
8995 code, given in CL. */
8998 tree_class_check_failed (const_tree node
, const enum tree_code_class cl
,
8999 const char *file
, int line
, const char *function
)
9002 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9003 TREE_CODE_CLASS_STRING (cl
),
9004 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node
))),
9005 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
9008 /* Similar to tree_check_failed, except that instead of specifying a
9009 dozen codes, use the knowledge that they're all sequential. */
9012 tree_range_check_failed (const_tree node
, const char *file
, int line
,
9013 const char *function
, enum tree_code c1
,
9017 unsigned length
= 0;
9020 for (c
= c1
; c
<= c2
; ++c
)
9021 length
+= 4 + strlen (get_tree_code_name ((enum tree_code
) c
));
9023 length
+= strlen ("expected ");
9024 buffer
= (char *) alloca (length
);
9027 for (c
= c1
; c
<= c2
; ++c
)
9029 const char *prefix
= length
? " or " : "expected ";
9031 strcpy (buffer
+ length
, prefix
);
9032 length
+= strlen (prefix
);
9033 strcpy (buffer
+ length
, get_tree_code_name ((enum tree_code
) c
));
9034 length
+= strlen (get_tree_code_name ((enum tree_code
) c
));
9037 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9038 buffer
, get_tree_code_name (TREE_CODE (node
)),
9039 function
, trim_filename (file
), line
);
9043 /* Similar to tree_check_failed, except that we check that a tree does
9044 not have the specified code, given in CL. */
9047 tree_not_class_check_failed (const_tree node
, const enum tree_code_class cl
,
9048 const char *file
, int line
, const char *function
)
9051 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9052 TREE_CODE_CLASS_STRING (cl
),
9053 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node
))),
9054 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
9058 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9061 omp_clause_check_failed (const_tree node
, const char *file
, int line
,
9062 const char *function
, enum omp_clause_code code
)
9064 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
9066 omp_clause_code_name
[code
],
9067 get_tree_code_name (TREE_CODE (node
)),
9068 function
, trim_filename (file
), line
);
9072 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9075 omp_clause_range_check_failed (const_tree node
, const char *file
, int line
,
9076 const char *function
, enum omp_clause_code c1
,
9077 enum omp_clause_code c2
)
9080 unsigned length
= 0;
9083 for (c
= c1
; c
<= c2
; ++c
)
9084 length
+= 4 + strlen (omp_clause_code_name
[c
]);
9086 length
+= strlen ("expected ");
9087 buffer
= (char *) alloca (length
);
9090 for (c
= c1
; c
<= c2
; ++c
)
9092 const char *prefix
= length
? " or " : "expected ";
9094 strcpy (buffer
+ length
, prefix
);
9095 length
+= strlen (prefix
);
9096 strcpy (buffer
+ length
, omp_clause_code_name
[c
]);
9097 length
+= strlen (omp_clause_code_name
[c
]);
9100 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9101 buffer
, omp_clause_code_name
[TREE_CODE (node
)],
9102 function
, trim_filename (file
), line
);
9106 #undef DEFTREESTRUCT
9107 #define DEFTREESTRUCT(VAL, NAME) NAME,
9109 static const char *ts_enum_names
[] = {
9110 #include "treestruct.def"
9112 #undef DEFTREESTRUCT
9114 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9116 /* Similar to tree_class_check_failed, except that we check for
9117 whether CODE contains the tree structure identified by EN. */
9120 tree_contains_struct_check_failed (const_tree node
,
9121 const enum tree_node_structure_enum en
,
9122 const char *file
, int line
,
9123 const char *function
)
9126 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9128 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
9132 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9133 (dynamically sized) vector. */
9136 tree_int_cst_elt_check_failed (int idx
, int len
, const char *file
, int line
,
9137 const char *function
)
9140 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
9142 idx
+ 1, len
, function
, trim_filename (file
), line
);
9145 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9146 (dynamically sized) vector. */
9149 tree_vec_elt_check_failed (int idx
, int len
, const char *file
, int line
,
9150 const char *function
)
9153 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
9154 idx
+ 1, len
, function
, trim_filename (file
), line
);
9157 /* Similar to above, except that the check is for the bounds of the operand
9158 vector of an expression node EXP. */
9161 tree_operand_check_failed (int idx
, const_tree exp
, const char *file
,
9162 int line
, const char *function
)
9164 enum tree_code code
= TREE_CODE (exp
);
9166 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9167 idx
+ 1, get_tree_code_name (code
), TREE_OPERAND_LENGTH (exp
),
9168 function
, trim_filename (file
), line
);
9171 /* Similar to above, except that the check is for the number of
9172 operands of an OMP_CLAUSE node. */
9175 omp_clause_operand_check_failed (int idx
, const_tree t
, const char *file
,
9176 int line
, const char *function
)
9179 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
9180 "in %s, at %s:%d", idx
+ 1, omp_clause_code_name
[OMP_CLAUSE_CODE (t
)],
9181 omp_clause_num_ops
[OMP_CLAUSE_CODE (t
)], function
,
9182 trim_filename (file
), line
);
9184 #endif /* ENABLE_TREE_CHECKING */
9186 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
9187 and mapped to the machine mode MODE. Initialize its fields and build
9188 the information necessary for debugging output. */
9191 make_vector_type (tree innertype
, poly_int64 nunits
, machine_mode mode
)
9194 tree mv_innertype
= TYPE_MAIN_VARIANT (innertype
);
9196 t
= make_node (VECTOR_TYPE
);
9197 TREE_TYPE (t
) = mv_innertype
;
9198 SET_TYPE_VECTOR_SUBPARTS (t
, nunits
);
9199 SET_TYPE_MODE (t
, mode
);
9201 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype
) || in_lto_p
)
9202 SET_TYPE_STRUCTURAL_EQUALITY (t
);
9203 else if ((TYPE_CANONICAL (mv_innertype
) != innertype
9204 || mode
!= VOIDmode
)
9205 && !VECTOR_BOOLEAN_TYPE_P (t
))
9207 = make_vector_type (TYPE_CANONICAL (mv_innertype
), nunits
, VOIDmode
);
9211 hashval_t hash
= type_hash_canon_hash (t
);
9212 t
= type_hash_canon (hash
, t
);
9214 /* We have built a main variant, based on the main variant of the
9215 inner type. Use it to build the variant we return. */
9216 if ((TYPE_ATTRIBUTES (innertype
) || TYPE_QUALS (innertype
))
9217 && TREE_TYPE (t
) != innertype
)
9218 return build_type_attribute_qual_variant (t
,
9219 TYPE_ATTRIBUTES (innertype
),
9220 TYPE_QUALS (innertype
));
9226 make_or_reuse_type (unsigned size
, int unsignedp
)
9230 if (size
== INT_TYPE_SIZE
)
9231 return unsignedp
? unsigned_type_node
: integer_type_node
;
9232 if (size
== CHAR_TYPE_SIZE
)
9233 return unsignedp
? unsigned_char_type_node
: signed_char_type_node
;
9234 if (size
== SHORT_TYPE_SIZE
)
9235 return unsignedp
? short_unsigned_type_node
: short_integer_type_node
;
9236 if (size
== LONG_TYPE_SIZE
)
9237 return unsignedp
? long_unsigned_type_node
: long_integer_type_node
;
9238 if (size
== LONG_LONG_TYPE_SIZE
)
9239 return (unsignedp
? long_long_unsigned_type_node
9240 : long_long_integer_type_node
);
9242 for (i
= 0; i
< NUM_INT_N_ENTS
; i
++)
9243 if (size
== int_n_data
[i
].bitsize
9244 && int_n_enabled_p
[i
])
9245 return (unsignedp
? int_n_trees
[i
].unsigned_type
9246 : int_n_trees
[i
].signed_type
);
9249 return make_unsigned_type (size
);
9251 return make_signed_type (size
);
9254 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9257 make_or_reuse_fract_type (unsigned size
, int unsignedp
, int satp
)
9261 if (size
== SHORT_FRACT_TYPE_SIZE
)
9262 return unsignedp
? sat_unsigned_short_fract_type_node
9263 : sat_short_fract_type_node
;
9264 if (size
== FRACT_TYPE_SIZE
)
9265 return unsignedp
? sat_unsigned_fract_type_node
: sat_fract_type_node
;
9266 if (size
== LONG_FRACT_TYPE_SIZE
)
9267 return unsignedp
? sat_unsigned_long_fract_type_node
9268 : sat_long_fract_type_node
;
9269 if (size
== LONG_LONG_FRACT_TYPE_SIZE
)
9270 return unsignedp
? sat_unsigned_long_long_fract_type_node
9271 : sat_long_long_fract_type_node
;
9275 if (size
== SHORT_FRACT_TYPE_SIZE
)
9276 return unsignedp
? unsigned_short_fract_type_node
9277 : short_fract_type_node
;
9278 if (size
== FRACT_TYPE_SIZE
)
9279 return unsignedp
? unsigned_fract_type_node
: fract_type_node
;
9280 if (size
== LONG_FRACT_TYPE_SIZE
)
9281 return unsignedp
? unsigned_long_fract_type_node
9282 : long_fract_type_node
;
9283 if (size
== LONG_LONG_FRACT_TYPE_SIZE
)
9284 return unsignedp
? unsigned_long_long_fract_type_node
9285 : long_long_fract_type_node
;
9288 return make_fract_type (size
, unsignedp
, satp
);
9291 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9294 make_or_reuse_accum_type (unsigned size
, int unsignedp
, int satp
)
9298 if (size
== SHORT_ACCUM_TYPE_SIZE
)
9299 return unsignedp
? sat_unsigned_short_accum_type_node
9300 : sat_short_accum_type_node
;
9301 if (size
== ACCUM_TYPE_SIZE
)
9302 return unsignedp
? sat_unsigned_accum_type_node
: sat_accum_type_node
;
9303 if (size
== LONG_ACCUM_TYPE_SIZE
)
9304 return unsignedp
? sat_unsigned_long_accum_type_node
9305 : sat_long_accum_type_node
;
9306 if (size
== LONG_LONG_ACCUM_TYPE_SIZE
)
9307 return unsignedp
? sat_unsigned_long_long_accum_type_node
9308 : sat_long_long_accum_type_node
;
9312 if (size
== SHORT_ACCUM_TYPE_SIZE
)
9313 return unsignedp
? unsigned_short_accum_type_node
9314 : short_accum_type_node
;
9315 if (size
== ACCUM_TYPE_SIZE
)
9316 return unsignedp
? unsigned_accum_type_node
: accum_type_node
;
9317 if (size
== LONG_ACCUM_TYPE_SIZE
)
9318 return unsignedp
? unsigned_long_accum_type_node
9319 : long_accum_type_node
;
9320 if (size
== LONG_LONG_ACCUM_TYPE_SIZE
)
9321 return unsignedp
? unsigned_long_long_accum_type_node
9322 : long_long_accum_type_node
;
9325 return make_accum_type (size
, unsignedp
, satp
);
9329 /* Create an atomic variant node for TYPE. This routine is called
9330 during initialization of data types to create the 5 basic atomic
9331 types. The generic build_variant_type function requires these to
9332 already be set up in order to function properly, so cannot be
9333 called from there. If ALIGN is non-zero, then ensure alignment is
9334 overridden to this value. */
9337 build_atomic_base (tree type
, unsigned int align
)
9341 /* Make sure its not already registered. */
9342 if ((t
= get_qualified_type (type
, TYPE_QUAL_ATOMIC
)))
9345 t
= build_variant_type_copy (type
);
9346 set_type_quals (t
, TYPE_QUAL_ATOMIC
);
9349 SET_TYPE_ALIGN (t
, align
);
9354 /* Information about the _FloatN and _FloatNx types. This must be in
9355 the same order as the corresponding TI_* enum values. */
9356 const floatn_type_info floatn_nx_types
[NUM_FLOATN_NX_TYPES
] =
9368 /* Create nodes for all integer types (and error_mark_node) using the sizes
9369 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9372 build_common_tree_nodes (bool signed_char
)
9376 error_mark_node
= make_node (ERROR_MARK
);
9377 TREE_TYPE (error_mark_node
) = error_mark_node
;
9379 initialize_sizetypes ();
9381 /* Define both `signed char' and `unsigned char'. */
9382 signed_char_type_node
= make_signed_type (CHAR_TYPE_SIZE
);
9383 TYPE_STRING_FLAG (signed_char_type_node
) = 1;
9384 unsigned_char_type_node
= make_unsigned_type (CHAR_TYPE_SIZE
);
9385 TYPE_STRING_FLAG (unsigned_char_type_node
) = 1;
9387 /* Define `char', which is like either `signed char' or `unsigned char'
9388 but not the same as either. */
9391 ? make_signed_type (CHAR_TYPE_SIZE
)
9392 : make_unsigned_type (CHAR_TYPE_SIZE
));
9393 TYPE_STRING_FLAG (char_type_node
) = 1;
9395 short_integer_type_node
= make_signed_type (SHORT_TYPE_SIZE
);
9396 short_unsigned_type_node
= make_unsigned_type (SHORT_TYPE_SIZE
);
9397 integer_type_node
= make_signed_type (INT_TYPE_SIZE
);
9398 unsigned_type_node
= make_unsigned_type (INT_TYPE_SIZE
);
9399 long_integer_type_node
= make_signed_type (LONG_TYPE_SIZE
);
9400 long_unsigned_type_node
= make_unsigned_type (LONG_TYPE_SIZE
);
9401 long_long_integer_type_node
= make_signed_type (LONG_LONG_TYPE_SIZE
);
9402 long_long_unsigned_type_node
= make_unsigned_type (LONG_LONG_TYPE_SIZE
);
9404 for (i
= 0; i
< NUM_INT_N_ENTS
; i
++)
9406 int_n_trees
[i
].signed_type
= make_signed_type (int_n_data
[i
].bitsize
);
9407 int_n_trees
[i
].unsigned_type
= make_unsigned_type (int_n_data
[i
].bitsize
);
9409 if (int_n_enabled_p
[i
])
9411 integer_types
[itk_intN_0
+ i
* 2] = int_n_trees
[i
].signed_type
;
9412 integer_types
[itk_unsigned_intN_0
+ i
* 2] = int_n_trees
[i
].unsigned_type
;
9416 /* Define a boolean type. This type only represents boolean values but
9417 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9418 boolean_type_node
= make_unsigned_type (BOOL_TYPE_SIZE
);
9419 TREE_SET_CODE (boolean_type_node
, BOOLEAN_TYPE
);
9420 TYPE_PRECISION (boolean_type_node
) = 1;
9421 TYPE_MAX_VALUE (boolean_type_node
) = build_int_cst (boolean_type_node
, 1);
9423 /* Define what type to use for size_t. */
9424 if (strcmp (SIZE_TYPE
, "unsigned int") == 0)
9425 size_type_node
= unsigned_type_node
;
9426 else if (strcmp (SIZE_TYPE
, "long unsigned int") == 0)
9427 size_type_node
= long_unsigned_type_node
;
9428 else if (strcmp (SIZE_TYPE
, "long long unsigned int") == 0)
9429 size_type_node
= long_long_unsigned_type_node
;
9430 else if (strcmp (SIZE_TYPE
, "short unsigned int") == 0)
9431 size_type_node
= short_unsigned_type_node
;
9436 size_type_node
= NULL_TREE
;
9437 for (i
= 0; i
< NUM_INT_N_ENTS
; i
++)
9438 if (int_n_enabled_p
[i
])
9440 char name
[50], altname
[50];
9441 sprintf (name
, "__int%d unsigned", int_n_data
[i
].bitsize
);
9442 sprintf (altname
, "__int%d__ unsigned", int_n_data
[i
].bitsize
);
9444 if (strcmp (name
, SIZE_TYPE
) == 0
9445 || strcmp (altname
, SIZE_TYPE
) == 0)
9447 size_type_node
= int_n_trees
[i
].unsigned_type
;
9450 if (size_type_node
== NULL_TREE
)
9454 /* Define what type to use for ptrdiff_t. */
9455 if (strcmp (PTRDIFF_TYPE
, "int") == 0)
9456 ptrdiff_type_node
= integer_type_node
;
9457 else if (strcmp (PTRDIFF_TYPE
, "long int") == 0)
9458 ptrdiff_type_node
= long_integer_type_node
;
9459 else if (strcmp (PTRDIFF_TYPE
, "long long int") == 0)
9460 ptrdiff_type_node
= long_long_integer_type_node
;
9461 else if (strcmp (PTRDIFF_TYPE
, "short int") == 0)
9462 ptrdiff_type_node
= short_integer_type_node
;
9465 ptrdiff_type_node
= NULL_TREE
;
9466 for (int i
= 0; i
< NUM_INT_N_ENTS
; i
++)
9467 if (int_n_enabled_p
[i
])
9469 char name
[50], altname
[50];
9470 sprintf (name
, "__int%d", int_n_data
[i
].bitsize
);
9471 sprintf (altname
, "__int%d__", int_n_data
[i
].bitsize
);
9473 if (strcmp (name
, PTRDIFF_TYPE
) == 0
9474 || strcmp (altname
, PTRDIFF_TYPE
) == 0)
9475 ptrdiff_type_node
= int_n_trees
[i
].signed_type
;
9477 if (ptrdiff_type_node
== NULL_TREE
)
9481 /* Fill in the rest of the sized types. Reuse existing type nodes
9483 intQI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (QImode
), 0);
9484 intHI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (HImode
), 0);
9485 intSI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (SImode
), 0);
9486 intDI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (DImode
), 0);
9487 intTI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (TImode
), 0);
9489 unsigned_intQI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (QImode
), 1);
9490 unsigned_intHI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (HImode
), 1);
9491 unsigned_intSI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (SImode
), 1);
9492 unsigned_intDI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (DImode
), 1);
9493 unsigned_intTI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (TImode
), 1);
9495 /* Don't call build_qualified type for atomics. That routine does
9496 special processing for atomics, and until they are initialized
9497 it's better not to make that call.
9499 Check to see if there is a target override for atomic types. */
9501 atomicQI_type_node
= build_atomic_base (unsigned_intQI_type_node
,
9502 targetm
.atomic_align_for_mode (QImode
));
9503 atomicHI_type_node
= build_atomic_base (unsigned_intHI_type_node
,
9504 targetm
.atomic_align_for_mode (HImode
));
9505 atomicSI_type_node
= build_atomic_base (unsigned_intSI_type_node
,
9506 targetm
.atomic_align_for_mode (SImode
));
9507 atomicDI_type_node
= build_atomic_base (unsigned_intDI_type_node
,
9508 targetm
.atomic_align_for_mode (DImode
));
9509 atomicTI_type_node
= build_atomic_base (unsigned_intTI_type_node
,
9510 targetm
.atomic_align_for_mode (TImode
));
9512 access_public_node
= get_identifier ("public");
9513 access_protected_node
= get_identifier ("protected");
9514 access_private_node
= get_identifier ("private");
9516 /* Define these next since types below may used them. */
9517 integer_zero_node
= build_int_cst (integer_type_node
, 0);
9518 integer_one_node
= build_int_cst (integer_type_node
, 1);
9519 integer_three_node
= build_int_cst (integer_type_node
, 3);
9520 integer_minus_one_node
= build_int_cst (integer_type_node
, -1);
9522 size_zero_node
= size_int (0);
9523 size_one_node
= size_int (1);
9524 bitsize_zero_node
= bitsize_int (0);
9525 bitsize_one_node
= bitsize_int (1);
9526 bitsize_unit_node
= bitsize_int (BITS_PER_UNIT
);
9528 boolean_false_node
= TYPE_MIN_VALUE (boolean_type_node
);
9529 boolean_true_node
= TYPE_MAX_VALUE (boolean_type_node
);
9531 void_type_node
= make_node (VOID_TYPE
);
9532 layout_type (void_type_node
);
9534 /* We are not going to have real types in C with less than byte alignment,
9535 so we might as well not have any types that claim to have it. */
9536 SET_TYPE_ALIGN (void_type_node
, BITS_PER_UNIT
);
9537 TYPE_USER_ALIGN (void_type_node
) = 0;
9539 void_node
= make_node (VOID_CST
);
9540 TREE_TYPE (void_node
) = void_type_node
;
9542 void_list_node
= build_tree_list (NULL_TREE
, void_type_node
);
9544 null_pointer_node
= build_int_cst (build_pointer_type (void_type_node
), 0);
9545 layout_type (TREE_TYPE (null_pointer_node
));
9547 ptr_type_node
= build_pointer_type (void_type_node
);
9549 = build_pointer_type (build_type_variant (void_type_node
, 1, 0));
9550 for (unsigned i
= 0; i
< ARRAY_SIZE (builtin_structptr_types
); ++i
)
9551 builtin_structptr_types
[i
].node
= builtin_structptr_types
[i
].base
;
9553 pointer_sized_int_node
= build_nonstandard_integer_type (POINTER_SIZE
, 1);
9555 float_type_node
= make_node (REAL_TYPE
);
9556 TYPE_PRECISION (float_type_node
) = FLOAT_TYPE_SIZE
;
9557 layout_type (float_type_node
);
9559 double_type_node
= make_node (REAL_TYPE
);
9560 TYPE_PRECISION (double_type_node
) = DOUBLE_TYPE_SIZE
;
9561 layout_type (double_type_node
);
9563 long_double_type_node
= make_node (REAL_TYPE
);
9564 TYPE_PRECISION (long_double_type_node
) = LONG_DOUBLE_TYPE_SIZE
;
9565 layout_type (long_double_type_node
);
9567 for (i
= 0; i
< NUM_FLOATN_NX_TYPES
; i
++)
9569 int n
= floatn_nx_types
[i
].n
;
9570 bool extended
= floatn_nx_types
[i
].extended
;
9571 scalar_float_mode mode
;
9572 if (!targetm
.floatn_mode (n
, extended
).exists (&mode
))
9574 int precision
= GET_MODE_PRECISION (mode
);
9575 /* Work around the rs6000 KFmode having precision 113 not
9577 const struct real_format
*fmt
= REAL_MODE_FORMAT (mode
);
9578 gcc_assert (fmt
->b
== 2 && fmt
->emin
+ fmt
->emax
== 3);
9579 int min_precision
= fmt
->p
+ ceil_log2 (fmt
->emax
- fmt
->emin
);
9581 gcc_assert (min_precision
== n
);
9582 if (precision
< min_precision
)
9583 precision
= min_precision
;
9584 FLOATN_NX_TYPE_NODE (i
) = make_node (REAL_TYPE
);
9585 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i
)) = precision
;
9586 layout_type (FLOATN_NX_TYPE_NODE (i
));
9587 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i
), mode
);
9589 float128t_type_node
= float128_type_node
;
9591 if (REAL_MODE_FORMAT (BFmode
) == &arm_bfloat_half_format
9592 && targetm
.scalar_mode_supported_p (BFmode
)
9593 && targetm
.libgcc_floating_mode_supported_p (BFmode
))
9595 bfloat16_type_node
= make_node (REAL_TYPE
);
9596 TYPE_PRECISION (bfloat16_type_node
) = GET_MODE_PRECISION (BFmode
);
9597 layout_type (bfloat16_type_node
);
9598 SET_TYPE_MODE (bfloat16_type_node
, BFmode
);
9602 float_ptr_type_node
= build_pointer_type (float_type_node
);
9603 double_ptr_type_node
= build_pointer_type (double_type_node
);
9604 long_double_ptr_type_node
= build_pointer_type (long_double_type_node
);
9605 integer_ptr_type_node
= build_pointer_type (integer_type_node
);
9607 /* Fixed size integer types. */
9608 uint16_type_node
= make_or_reuse_type (16, 1);
9609 uint32_type_node
= make_or_reuse_type (32, 1);
9610 uint64_type_node
= make_or_reuse_type (64, 1);
9611 if (targetm
.scalar_mode_supported_p (TImode
))
9612 uint128_type_node
= make_or_reuse_type (128, 1);
9614 /* Decimal float types. */
9615 if (targetm
.decimal_float_supported_p ())
9617 dfloat32_type_node
= make_node (REAL_TYPE
);
9618 TYPE_PRECISION (dfloat32_type_node
) = DECIMAL32_TYPE_SIZE
;
9619 SET_TYPE_MODE (dfloat32_type_node
, SDmode
);
9620 layout_type (dfloat32_type_node
);
9622 dfloat64_type_node
= make_node (REAL_TYPE
);
9623 TYPE_PRECISION (dfloat64_type_node
) = DECIMAL64_TYPE_SIZE
;
9624 SET_TYPE_MODE (dfloat64_type_node
, DDmode
);
9625 layout_type (dfloat64_type_node
);
9627 dfloat128_type_node
= make_node (REAL_TYPE
);
9628 TYPE_PRECISION (dfloat128_type_node
) = DECIMAL128_TYPE_SIZE
;
9629 SET_TYPE_MODE (dfloat128_type_node
, TDmode
);
9630 layout_type (dfloat128_type_node
);
9633 complex_integer_type_node
= build_complex_type (integer_type_node
, true);
9634 complex_float_type_node
= build_complex_type (float_type_node
, true);
9635 complex_double_type_node
= build_complex_type (double_type_node
, true);
9636 complex_long_double_type_node
= build_complex_type (long_double_type_node
,
9639 for (i
= 0; i
< NUM_FLOATN_NX_TYPES
; i
++)
9641 if (FLOATN_NX_TYPE_NODE (i
) != NULL_TREE
)
9642 COMPLEX_FLOATN_NX_TYPE_NODE (i
)
9643 = build_complex_type (FLOATN_NX_TYPE_NODE (i
));
9646 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9647 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9648 sat_ ## KIND ## _type_node = \
9649 make_sat_signed_ ## KIND ## _type (SIZE); \
9650 sat_unsigned_ ## KIND ## _type_node = \
9651 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9652 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9653 unsigned_ ## KIND ## _type_node = \
9654 make_unsigned_ ## KIND ## _type (SIZE);
9656 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9657 sat_ ## WIDTH ## KIND ## _type_node = \
9658 make_sat_signed_ ## KIND ## _type (SIZE); \
9659 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9660 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9661 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9662 unsigned_ ## WIDTH ## KIND ## _type_node = \
9663 make_unsigned_ ## KIND ## _type (SIZE);
9665 /* Make fixed-point type nodes based on four different widths. */
9666 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9667 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9668 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9669 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9670 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9672 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9673 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9674 NAME ## _type_node = \
9675 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9676 u ## NAME ## _type_node = \
9677 make_or_reuse_unsigned_ ## KIND ## _type \
9678 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9679 sat_ ## NAME ## _type_node = \
9680 make_or_reuse_sat_signed_ ## KIND ## _type \
9681 (GET_MODE_BITSIZE (MODE ## mode)); \
9682 sat_u ## NAME ## _type_node = \
9683 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9684 (GET_MODE_BITSIZE (U ## MODE ## mode));
9686 /* Fixed-point type and mode nodes. */
9687 MAKE_FIXED_TYPE_NODE_FAMILY (fract
, FRACT
)
9688 MAKE_FIXED_TYPE_NODE_FAMILY (accum
, ACCUM
)
9689 MAKE_FIXED_MODE_NODE (fract
, qq
, QQ
)
9690 MAKE_FIXED_MODE_NODE (fract
, hq
, HQ
)
9691 MAKE_FIXED_MODE_NODE (fract
, sq
, SQ
)
9692 MAKE_FIXED_MODE_NODE (fract
, dq
, DQ
)
9693 MAKE_FIXED_MODE_NODE (fract
, tq
, TQ
)
9694 MAKE_FIXED_MODE_NODE (accum
, ha
, HA
)
9695 MAKE_FIXED_MODE_NODE (accum
, sa
, SA
)
9696 MAKE_FIXED_MODE_NODE (accum
, da
, DA
)
9697 MAKE_FIXED_MODE_NODE (accum
, ta
, TA
)
9700 tree t
= targetm
.build_builtin_va_list ();
9702 /* Many back-ends define record types without setting TYPE_NAME.
9703 If we copied the record type here, we'd keep the original
9704 record type without a name. This breaks name mangling. So,
9705 don't copy record types and let c_common_nodes_and_builtins()
9706 declare the type to be __builtin_va_list. */
9707 if (TREE_CODE (t
) != RECORD_TYPE
)
9708 t
= build_variant_type_copy (t
);
9710 va_list_type_node
= t
;
9713 /* SCEV analyzer global shared trees. */
9714 chrec_dont_know
= make_node (SCEV_NOT_KNOWN
);
9715 TREE_TYPE (chrec_dont_know
) = void_type_node
;
9716 chrec_known
= make_node (SCEV_KNOWN
);
9717 TREE_TYPE (chrec_known
) = void_type_node
;
9720 /* Modify DECL for given flags.
9721 TM_PURE attribute is set only on types, so the function will modify
9722 DECL's type when ECF_TM_PURE is used. */
9725 set_call_expr_flags (tree decl
, int flags
)
9727 if (flags
& ECF_NOTHROW
)
9728 TREE_NOTHROW (decl
) = 1;
9729 if (flags
& ECF_CONST
)
9730 TREE_READONLY (decl
) = 1;
9731 if (flags
& ECF_PURE
)
9732 DECL_PURE_P (decl
) = 1;
9733 if (flags
& ECF_LOOPING_CONST_OR_PURE
)
9734 DECL_LOOPING_CONST_OR_PURE_P (decl
) = 1;
9735 if (flags
& ECF_NOVOPS
)
9736 DECL_IS_NOVOPS (decl
) = 1;
9737 if (flags
& ECF_NORETURN
)
9738 TREE_THIS_VOLATILE (decl
) = 1;
9739 if (flags
& ECF_MALLOC
)
9740 DECL_IS_MALLOC (decl
) = 1;
9741 if (flags
& ECF_RETURNS_TWICE
)
9742 DECL_IS_RETURNS_TWICE (decl
) = 1;
9743 if (flags
& ECF_LEAF
)
9744 DECL_ATTRIBUTES (decl
) = tree_cons (get_identifier ("leaf"),
9745 NULL
, DECL_ATTRIBUTES (decl
));
9746 if (flags
& ECF_COLD
)
9747 DECL_ATTRIBUTES (decl
) = tree_cons (get_identifier ("cold"),
9748 NULL
, DECL_ATTRIBUTES (decl
));
9749 if (flags
& ECF_RET1
)
9750 DECL_ATTRIBUTES (decl
)
9751 = tree_cons (get_identifier ("fn spec"),
9752 build_tree_list (NULL_TREE
, build_string (2, "1 ")),
9753 DECL_ATTRIBUTES (decl
));
9754 if ((flags
& ECF_TM_PURE
) && flag_tm
)
9755 apply_tm_attr (decl
, get_identifier ("transaction_pure"));
9756 if ((flags
& ECF_XTHROW
))
9757 DECL_ATTRIBUTES (decl
)
9758 = tree_cons (get_identifier ("expected_throw"),
9759 NULL
, DECL_ATTRIBUTES (decl
));
9760 /* Looping const or pure is implied by noreturn.
9761 There is currently no way to declare looping const or looping pure alone. */
9762 gcc_assert (!(flags
& ECF_LOOPING_CONST_OR_PURE
)
9763 || ((flags
& ECF_NORETURN
) && (flags
& (ECF_CONST
| ECF_PURE
))));
9767 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9770 local_define_builtin (const char *name
, tree type
, enum built_in_function code
,
9771 const char *library_name
, int ecf_flags
)
9775 decl
= add_builtin_function (name
, type
, code
, BUILT_IN_NORMAL
,
9776 library_name
, NULL_TREE
);
9777 set_call_expr_flags (decl
, ecf_flags
);
9779 set_builtin_decl (code
, decl
, true);
9782 /* Call this function after instantiating all builtins that the language
9783 front end cares about. This will build the rest of the builtins
9784 and internal functions that are relied upon by the tree optimizers and
9788 build_common_builtin_nodes (void)
9793 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_PADDING
))
9795 ftype
= build_function_type_list (void_type_node
,
9800 local_define_builtin ("__builtin_clear_padding", ftype
,
9801 BUILT_IN_CLEAR_PADDING
,
9802 "__builtin_clear_padding",
9803 ECF_LEAF
| ECF_NOTHROW
);
9806 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE
)
9807 || !builtin_decl_explicit_p (BUILT_IN_TRAP
)
9808 || !builtin_decl_explicit_p (BUILT_IN_UNREACHABLE_TRAP
)
9809 || !builtin_decl_explicit_p (BUILT_IN_ABORT
))
9811 ftype
= build_function_type (void_type_node
, void_list_node
);
9812 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE
))
9813 local_define_builtin ("__builtin_unreachable", ftype
,
9814 BUILT_IN_UNREACHABLE
,
9815 "__builtin_unreachable",
9816 ECF_NOTHROW
| ECF_LEAF
| ECF_NORETURN
9817 | ECF_CONST
| ECF_COLD
);
9818 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE_TRAP
))
9819 local_define_builtin ("__builtin_unreachable trap", ftype
,
9820 BUILT_IN_UNREACHABLE_TRAP
,
9821 "__builtin_unreachable trap",
9822 ECF_NOTHROW
| ECF_LEAF
| ECF_NORETURN
9823 | ECF_CONST
| ECF_COLD
);
9824 if (!builtin_decl_explicit_p (BUILT_IN_ABORT
))
9825 local_define_builtin ("__builtin_abort", ftype
, BUILT_IN_ABORT
,
9827 ECF_LEAF
| ECF_NORETURN
| ECF_CONST
| ECF_COLD
);
9828 if (!builtin_decl_explicit_p (BUILT_IN_TRAP
))
9829 local_define_builtin ("__builtin_trap", ftype
, BUILT_IN_TRAP
,
9831 ECF_NORETURN
| ECF_NOTHROW
| ECF_LEAF
| ECF_COLD
);
9834 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY
)
9835 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE
))
9837 ftype
= build_function_type_list (ptr_type_node
,
9838 ptr_type_node
, const_ptr_type_node
,
9839 size_type_node
, NULL_TREE
);
9841 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY
))
9842 local_define_builtin ("__builtin_memcpy", ftype
, BUILT_IN_MEMCPY
,
9843 "memcpy", ECF_NOTHROW
| ECF_LEAF
);
9844 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE
))
9845 local_define_builtin ("__builtin_memmove", ftype
, BUILT_IN_MEMMOVE
,
9846 "memmove", ECF_NOTHROW
| ECF_LEAF
);
9849 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP
))
9851 ftype
= build_function_type_list (integer_type_node
, const_ptr_type_node
,
9852 const_ptr_type_node
, size_type_node
,
9854 local_define_builtin ("__builtin_memcmp", ftype
, BUILT_IN_MEMCMP
,
9855 "memcmp", ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9858 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET
))
9860 ftype
= build_function_type_list (ptr_type_node
,
9861 ptr_type_node
, integer_type_node
,
9862 size_type_node
, NULL_TREE
);
9863 local_define_builtin ("__builtin_memset", ftype
, BUILT_IN_MEMSET
,
9864 "memset", ECF_NOTHROW
| ECF_LEAF
);
9867 /* If we're checking the stack, `alloca' can throw. */
9868 const int alloca_flags
9869 = ECF_MALLOC
| ECF_LEAF
| (flag_stack_check
? 0 : ECF_NOTHROW
);
9871 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA
))
9873 ftype
= build_function_type_list (ptr_type_node
,
9874 size_type_node
, NULL_TREE
);
9875 local_define_builtin ("__builtin_alloca", ftype
, BUILT_IN_ALLOCA
,
9876 "alloca", alloca_flags
);
9879 ftype
= build_function_type_list (ptr_type_node
, size_type_node
,
9880 size_type_node
, NULL_TREE
);
9881 local_define_builtin ("__builtin_alloca_with_align", ftype
,
9882 BUILT_IN_ALLOCA_WITH_ALIGN
,
9883 "__builtin_alloca_with_align",
9886 ftype
= build_function_type_list (ptr_type_node
, size_type_node
,
9887 size_type_node
, size_type_node
, NULL_TREE
);
9888 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype
,
9889 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
,
9890 "__builtin_alloca_with_align_and_max",
9893 ftype
= build_function_type_list (void_type_node
,
9894 ptr_type_node
, ptr_type_node
,
9895 ptr_type_node
, NULL_TREE
);
9896 local_define_builtin ("__builtin_init_trampoline", ftype
,
9897 BUILT_IN_INIT_TRAMPOLINE
,
9898 "__builtin_init_trampoline", ECF_NOTHROW
| ECF_LEAF
);
9899 local_define_builtin ("__builtin_init_heap_trampoline", ftype
,
9900 BUILT_IN_INIT_HEAP_TRAMPOLINE
,
9901 "__builtin_init_heap_trampoline",
9902 ECF_NOTHROW
| ECF_LEAF
);
9903 local_define_builtin ("__builtin_init_descriptor", ftype
,
9904 BUILT_IN_INIT_DESCRIPTOR
,
9905 "__builtin_init_descriptor", ECF_NOTHROW
| ECF_LEAF
);
9907 ftype
= build_function_type_list (ptr_type_node
, ptr_type_node
, NULL_TREE
);
9908 local_define_builtin ("__builtin_adjust_trampoline", ftype
,
9909 BUILT_IN_ADJUST_TRAMPOLINE
,
9910 "__builtin_adjust_trampoline",
9911 ECF_CONST
| ECF_NOTHROW
);
9912 local_define_builtin ("__builtin_adjust_descriptor", ftype
,
9913 BUILT_IN_ADJUST_DESCRIPTOR
,
9914 "__builtin_adjust_descriptor",
9915 ECF_CONST
| ECF_NOTHROW
);
9917 ftype
= build_function_type_list (void_type_node
,
9918 ptr_type_node
, ptr_type_node
, NULL_TREE
);
9919 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_CACHE
))
9920 local_define_builtin ("__builtin___clear_cache", ftype
,
9921 BUILT_IN_CLEAR_CACHE
,
9925 local_define_builtin ("__builtin_nonlocal_goto", ftype
,
9926 BUILT_IN_NONLOCAL_GOTO
,
9927 "__builtin_nonlocal_goto",
9928 ECF_NORETURN
| ECF_NOTHROW
);
9930 tree ptr_ptr_type_node
= build_pointer_type (ptr_type_node
);
9932 ftype
= build_function_type_list (void_type_node
,
9933 ptr_type_node
, // void *chain
9934 ptr_type_node
, // void *func
9935 ptr_ptr_type_node
, // void **dst
9937 local_define_builtin ("__builtin_nested_func_ptr_created", ftype
,
9938 BUILT_IN_NESTED_PTR_CREATED
,
9939 "__builtin_nested_func_ptr_created", ECF_NOTHROW
);
9941 ftype
= build_function_type_list (void_type_node
,
9943 local_define_builtin ("__builtin_nested_func_ptr_deleted", ftype
,
9944 BUILT_IN_NESTED_PTR_DELETED
,
9945 "__builtin_nested_func_ptr_deleted", ECF_NOTHROW
);
9947 ftype
= build_function_type_list (void_type_node
,
9948 ptr_type_node
, ptr_type_node
, NULL_TREE
);
9949 local_define_builtin ("__builtin_setjmp_setup", ftype
,
9950 BUILT_IN_SETJMP_SETUP
,
9951 "__builtin_setjmp_setup", ECF_NOTHROW
);
9953 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
9954 local_define_builtin ("__builtin_setjmp_receiver", ftype
,
9955 BUILT_IN_SETJMP_RECEIVER
,
9956 "__builtin_setjmp_receiver", ECF_NOTHROW
| ECF_LEAF
);
9958 ftype
= build_function_type_list (ptr_type_node
, NULL_TREE
);
9959 local_define_builtin ("__builtin_stack_save", ftype
, BUILT_IN_STACK_SAVE
,
9960 "__builtin_stack_save", ECF_NOTHROW
| ECF_LEAF
);
9962 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
9963 local_define_builtin ("__builtin_stack_restore", ftype
,
9964 BUILT_IN_STACK_RESTORE
,
9965 "__builtin_stack_restore", ECF_NOTHROW
| ECF_LEAF
);
9967 ftype
= build_function_type_list (integer_type_node
, const_ptr_type_node
,
9968 const_ptr_type_node
, size_type_node
,
9970 local_define_builtin ("__builtin_memcmp_eq", ftype
, BUILT_IN_MEMCMP_EQ
,
9971 "__builtin_memcmp_eq",
9972 ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9974 local_define_builtin ("__builtin_strncmp_eq", ftype
, BUILT_IN_STRNCMP_EQ
,
9975 "__builtin_strncmp_eq",
9976 ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9978 local_define_builtin ("__builtin_strcmp_eq", ftype
, BUILT_IN_STRCMP_EQ
,
9979 "__builtin_strcmp_eq",
9980 ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9982 /* If there's a possibility that we might use the ARM EABI, build the
9983 alternate __cxa_end_cleanup node used to resume from C++. */
9984 if (targetm
.arm_eabi_unwinder
)
9986 ftype
= build_function_type_list (void_type_node
, NULL_TREE
);
9987 local_define_builtin ("__builtin_cxa_end_cleanup", ftype
,
9988 BUILT_IN_CXA_END_CLEANUP
,
9989 "__cxa_end_cleanup",
9990 ECF_NORETURN
| ECF_XTHROW
| ECF_LEAF
);
9993 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
9994 local_define_builtin ("__builtin_unwind_resume", ftype
,
9995 BUILT_IN_UNWIND_RESUME
,
9996 ((targetm_common
.except_unwind_info (&global_options
)
9998 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
9999 ECF_NORETURN
| ECF_XTHROW
);
10001 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS
) == NULL_TREE
)
10003 ftype
= build_function_type_list (ptr_type_node
, integer_type_node
,
10005 local_define_builtin ("__builtin_return_address", ftype
,
10006 BUILT_IN_RETURN_ADDRESS
,
10007 "__builtin_return_address",
10011 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER
)
10012 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT
))
10014 ftype
= build_function_type_list (void_type_node
, ptr_type_node
,
10015 ptr_type_node
, NULL_TREE
);
10016 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER
))
10017 local_define_builtin ("__cyg_profile_func_enter", ftype
,
10018 BUILT_IN_PROFILE_FUNC_ENTER
,
10019 "__cyg_profile_func_enter", 0);
10020 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT
))
10021 local_define_builtin ("__cyg_profile_func_exit", ftype
,
10022 BUILT_IN_PROFILE_FUNC_EXIT
,
10023 "__cyg_profile_func_exit", 0);
10026 /* The exception object and filter values from the runtime. The argument
10027 must be zero before exception lowering, i.e. from the front end. After
10028 exception lowering, it will be the region number for the exception
10029 landing pad. These functions are PURE instead of CONST to prevent
10030 them from being hoisted past the exception edge that will initialize
10031 its value in the landing pad. */
10032 ftype
= build_function_type_list (ptr_type_node
,
10033 integer_type_node
, NULL_TREE
);
10034 ecf_flags
= ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
;
10035 /* Only use TM_PURE if we have TM language support. */
10036 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1
))
10037 ecf_flags
|= ECF_TM_PURE
;
10038 local_define_builtin ("__builtin_eh_pointer", ftype
, BUILT_IN_EH_POINTER
,
10039 "__builtin_eh_pointer", ecf_flags
);
10041 tmp
= lang_hooks
.types
.type_for_mode (targetm
.eh_return_filter_mode (), 0);
10042 ftype
= build_function_type_list (tmp
, integer_type_node
, NULL_TREE
);
10043 local_define_builtin ("__builtin_eh_filter", ftype
, BUILT_IN_EH_FILTER
,
10044 "__builtin_eh_filter", ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
10046 ftype
= build_function_type_list (void_type_node
,
10047 integer_type_node
, integer_type_node
,
10049 local_define_builtin ("__builtin_eh_copy_values", ftype
,
10050 BUILT_IN_EH_COPY_VALUES
,
10051 "__builtin_eh_copy_values", ECF_NOTHROW
);
10053 /* Complex multiplication and division. These are handled as builtins
10054 rather than optabs because emit_library_call_value doesn't support
10055 complex. Further, we can do slightly better with folding these
10056 beasties if the real and complex parts of the arguments are separate. */
10060 for (mode
= MIN_MODE_COMPLEX_FLOAT
; mode
<= MAX_MODE_COMPLEX_FLOAT
; ++mode
)
10062 char mode_name_buf
[4], *q
;
10064 enum built_in_function mcode
, dcode
;
10065 tree type
, inner_type
;
10066 const char *prefix
= "__";
10068 if (targetm
.libfunc_gnu_prefix
)
10071 type
= lang_hooks
.types
.type_for_mode ((machine_mode
) mode
, 0);
10074 inner_type
= TREE_TYPE (type
);
10076 ftype
= build_function_type_list (type
, inner_type
, inner_type
,
10077 inner_type
, inner_type
, NULL_TREE
);
10079 mcode
= ((enum built_in_function
)
10080 (BUILT_IN_COMPLEX_MUL_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
10081 dcode
= ((enum built_in_function
)
10082 (BUILT_IN_COMPLEX_DIV_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
10084 for (p
= GET_MODE_NAME (mode
), q
= mode_name_buf
; *p
; p
++, q
++)
10088 /* For -ftrapping-math these should throw from a former
10089 -fnon-call-exception stmt. */
10090 built_in_names
[mcode
] = concat (prefix
, "mul", mode_name_buf
, "3",
10092 local_define_builtin (built_in_names
[mcode
], ftype
, mcode
,
10093 built_in_names
[mcode
],
10094 ECF_CONST
| ECF_LEAF
);
10096 built_in_names
[dcode
] = concat (prefix
, "div", mode_name_buf
, "3",
10098 local_define_builtin (built_in_names
[dcode
], ftype
, dcode
,
10099 built_in_names
[dcode
],
10100 ECF_CONST
| ECF_LEAF
);
10104 init_internal_fns ();
10107 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10110 If we requested a pointer to a vector, build up the pointers that
10111 we stripped off while looking for the inner type. Similarly for
10112 return values from functions.
10114 The argument TYPE is the top of the chain, and BOTTOM is the
10115 new type which we will point to. */
10118 reconstruct_complex_type (tree type
, tree bottom
)
10122 if (TREE_CODE (type
) == POINTER_TYPE
)
10124 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10125 outer
= build_pointer_type_for_mode (inner
, TYPE_MODE (type
),
10126 TYPE_REF_CAN_ALIAS_ALL (type
));
10128 else if (TREE_CODE (type
) == REFERENCE_TYPE
)
10130 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10131 outer
= build_reference_type_for_mode (inner
, TYPE_MODE (type
),
10132 TYPE_REF_CAN_ALIAS_ALL (type
));
10134 else if (TREE_CODE (type
) == ARRAY_TYPE
)
10136 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10137 outer
= build_array_type (inner
, TYPE_DOMAIN (type
));
10139 else if (TREE_CODE (type
) == FUNCTION_TYPE
)
10141 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10142 outer
= build_function_type (inner
, TYPE_ARG_TYPES (type
),
10143 TYPE_NO_NAMED_ARGS_STDARG_P (type
));
10145 else if (TREE_CODE (type
) == METHOD_TYPE
)
10147 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10148 /* The build_method_type_directly() routine prepends 'this' to argument list,
10149 so we must compensate by getting rid of it. */
10151 = build_method_type_directly
10152 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type
))),
10154 TREE_CHAIN (TYPE_ARG_TYPES (type
)));
10156 else if (TREE_CODE (type
) == OFFSET_TYPE
)
10158 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10159 outer
= build_offset_type (TYPE_OFFSET_BASETYPE (type
), inner
);
10164 return build_type_attribute_qual_variant (outer
, TYPE_ATTRIBUTES (type
),
10165 TYPE_QUALS (type
));
10168 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10171 build_vector_type_for_mode (tree innertype
, machine_mode mode
)
10174 unsigned int bitsize
;
10176 switch (GET_MODE_CLASS (mode
))
10178 case MODE_VECTOR_BOOL
:
10179 case MODE_VECTOR_INT
:
10180 case MODE_VECTOR_FLOAT
:
10181 case MODE_VECTOR_FRACT
:
10182 case MODE_VECTOR_UFRACT
:
10183 case MODE_VECTOR_ACCUM
:
10184 case MODE_VECTOR_UACCUM
:
10185 nunits
= GET_MODE_NUNITS (mode
);
10189 /* Check that there are no leftover bits. */
10190 bitsize
= GET_MODE_BITSIZE (as_a
<scalar_int_mode
> (mode
));
10191 gcc_assert (bitsize
% TREE_INT_CST_LOW (TYPE_SIZE (innertype
)) == 0);
10192 nunits
= bitsize
/ TREE_INT_CST_LOW (TYPE_SIZE (innertype
));
10196 gcc_unreachable ();
10199 return make_vector_type (innertype
, nunits
, mode
);
10202 /* Similarly, but takes the inner type and number of units, which must be
10206 build_vector_type (tree innertype
, poly_int64 nunits
)
10208 return make_vector_type (innertype
, nunits
, VOIDmode
);
10211 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE. */
10214 build_truth_vector_type_for_mode (poly_uint64 nunits
, machine_mode mask_mode
)
10216 gcc_assert (mask_mode
!= BLKmode
);
10218 unsigned HOST_WIDE_INT esize
;
10219 if (VECTOR_MODE_P (mask_mode
))
10221 poly_uint64 vsize
= GET_MODE_PRECISION (mask_mode
);
10222 esize
= vector_element_size (vsize
, nunits
);
10227 tree bool_type
= build_nonstandard_boolean_type (esize
);
10229 return make_vector_type (bool_type
, nunits
, mask_mode
);
10232 /* Build a vector type that holds one boolean result for each element of
10233 vector type VECTYPE. The public interface for this operation is
10237 build_truth_vector_type_for (tree vectype
)
10239 machine_mode vector_mode
= TYPE_MODE (vectype
);
10240 poly_uint64 nunits
= TYPE_VECTOR_SUBPARTS (vectype
);
10242 machine_mode mask_mode
;
10243 if (VECTOR_MODE_P (vector_mode
)
10244 && targetm
.vectorize
.get_mask_mode (vector_mode
).exists (&mask_mode
))
10245 return build_truth_vector_type_for_mode (nunits
, mask_mode
);
10247 poly_uint64 vsize
= tree_to_poly_uint64 (TYPE_SIZE (vectype
));
10248 unsigned HOST_WIDE_INT esize
= vector_element_size (vsize
, nunits
);
10249 tree bool_type
= build_nonstandard_boolean_type (esize
);
10251 return make_vector_type (bool_type
, nunits
, VOIDmode
);
10254 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
10258 build_opaque_vector_type (tree innertype
, poly_int64 nunits
)
10260 tree t
= make_vector_type (innertype
, nunits
, VOIDmode
);
10262 /* We always build the non-opaque variant before the opaque one,
10263 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10264 cand
= TYPE_NEXT_VARIANT (t
);
10266 && TYPE_VECTOR_OPAQUE (cand
)
10267 && check_qualified_type (cand
, t
, TYPE_QUALS (t
)))
10269 /* Othewise build a variant type and make sure to queue it after
10270 the non-opaque type. */
10271 cand
= build_distinct_type_copy (t
);
10272 TYPE_VECTOR_OPAQUE (cand
) = true;
10273 TYPE_CANONICAL (cand
) = TYPE_CANONICAL (t
);
10274 TYPE_NEXT_VARIANT (cand
) = TYPE_NEXT_VARIANT (t
);
10275 TYPE_NEXT_VARIANT (t
) = cand
;
10276 TYPE_MAIN_VARIANT (cand
) = TYPE_MAIN_VARIANT (t
);
10277 /* Type variants have no alias set defined. */
10278 TYPE_ALIAS_SET (cand
) = -1;
10282 /* Return the value of element I of VECTOR_CST T as a wide_int. */
10284 static poly_wide_int
10285 vector_cst_int_elt (const_tree t
, unsigned int i
)
10287 /* First handle elements that are directly encoded. */
10288 unsigned int encoded_nelts
= vector_cst_encoded_nelts (t
);
10289 if (i
< encoded_nelts
)
10290 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t
, i
));
10292 /* Identify the pattern that contains element I and work out the index of
10293 the last encoded element for that pattern. */
10294 unsigned int npatterns
= VECTOR_CST_NPATTERNS (t
);
10295 unsigned int pattern
= i
% npatterns
;
10296 unsigned int count
= i
/ npatterns
;
10297 unsigned int final_i
= encoded_nelts
- npatterns
+ pattern
;
10299 /* If there are no steps, the final encoded value is the right one. */
10300 if (!VECTOR_CST_STEPPED_P (t
))
10301 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t
, final_i
));
10303 /* Otherwise work out the value from the last two encoded elements. */
10304 tree v1
= VECTOR_CST_ENCODED_ELT (t
, final_i
- npatterns
);
10305 tree v2
= VECTOR_CST_ENCODED_ELT (t
, final_i
);
10306 poly_wide_int diff
= wi::to_poly_wide (v2
) - wi::to_poly_wide (v1
);
10307 return wi::to_poly_wide (v2
) + (count
- 2) * diff
;
10310 /* Return the value of element I of VECTOR_CST T. */
10313 vector_cst_elt (const_tree t
, unsigned int i
)
10315 /* First handle elements that are directly encoded. */
10316 unsigned int encoded_nelts
= vector_cst_encoded_nelts (t
);
10317 if (i
< encoded_nelts
)
10318 return VECTOR_CST_ENCODED_ELT (t
, i
);
10320 /* If there are no steps, the final encoded value is the right one. */
10321 if (!VECTOR_CST_STEPPED_P (t
))
10323 /* Identify the pattern that contains element I and work out the index of
10324 the last encoded element for that pattern. */
10325 unsigned int npatterns
= VECTOR_CST_NPATTERNS (t
);
10326 unsigned int pattern
= i
% npatterns
;
10327 unsigned int final_i
= encoded_nelts
- npatterns
+ pattern
;
10328 return VECTOR_CST_ENCODED_ELT (t
, final_i
);
10331 /* Otherwise work out the value from the last two encoded elements. */
10332 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t
)),
10333 vector_cst_int_elt (t
, i
));
10336 /* Given an initializer INIT, return TRUE if INIT is zero or some
10337 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
10338 null, set *NONZERO if and only if INIT is known not to be all
10339 zeros. The combination of return value of false and *NONZERO
10340 false implies that INIT may but need not be all zeros. Other
10341 combinations indicate definitive answers. */
10344 initializer_zerop (const_tree init
, bool *nonzero
/* = NULL */)
10350 /* Conservatively clear NONZERO and set it only if INIT is definitely
10356 unsigned HOST_WIDE_INT off
= 0;
10358 switch (TREE_CODE (init
))
10361 if (integer_zerop (init
))
10368 /* ??? Note that this is not correct for C4X float formats. There,
10369 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10370 negative exponent. */
10371 if (real_zerop (init
)
10372 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init
)))
10379 if (fixed_zerop (init
))
10386 if (integer_zerop (init
)
10387 || (real_zerop (init
)
10388 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init
)))
10389 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init
)))))
10396 if (VECTOR_CST_NPATTERNS (init
) == 1
10397 && VECTOR_CST_DUPLICATE_P (init
)
10398 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init
, 0)))
10406 if (TREE_CLOBBER_P (init
))
10409 unsigned HOST_WIDE_INT idx
;
10412 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init
), idx
, elt
)
10413 if (!initializer_zerop (elt
, nonzero
))
10421 tree arg
= TREE_OPERAND (init
, 0);
10422 if (TREE_CODE (arg
) != ADDR_EXPR
)
10424 tree offset
= TREE_OPERAND (init
, 1);
10425 if (TREE_CODE (offset
) != INTEGER_CST
10426 || !tree_fits_uhwi_p (offset
))
10428 off
= tree_to_uhwi (offset
);
10431 arg
= TREE_OPERAND (arg
, 0);
10432 if (TREE_CODE (arg
) != STRING_CST
)
10436 /* Fall through. */
10440 gcc_assert (off
<= INT_MAX
);
10443 int n
= TREE_STRING_LENGTH (init
);
10447 /* We need to loop through all elements to handle cases like
10448 "\0" and "\0foobar". */
10449 for (i
= 0; i
< n
; ++i
)
10450 if (TREE_STRING_POINTER (init
)[i
] != '\0')
10464 /* Return true if EXPR is an initializer expression in which every element
10465 is a constant that is numerically equal to 0 or 1. The elements do not
10466 need to be equal to each other. */
10469 initializer_each_zero_or_onep (const_tree expr
)
10471 STRIP_ANY_LOCATION_WRAPPER (expr
);
10473 switch (TREE_CODE (expr
))
10476 return integer_zerop (expr
) || integer_onep (expr
);
10479 return real_zerop (expr
) || real_onep (expr
);
10483 unsigned HOST_WIDE_INT nelts
= vector_cst_encoded_nelts (expr
);
10484 if (VECTOR_CST_STEPPED_P (expr
)
10485 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr
)).is_constant (&nelts
))
10488 for (unsigned int i
= 0; i
< nelts
; ++i
)
10490 tree elt
= vector_cst_elt (expr
, i
);
10491 if (!initializer_each_zero_or_onep (elt
))
10503 /* Check if vector VEC consists of all the equal elements and
10504 that the number of elements corresponds to the type of VEC.
10505 The function returns first element of the vector
10506 or NULL_TREE if the vector is not uniform. */
10508 uniform_vector_p (const_tree vec
)
10511 unsigned HOST_WIDE_INT i
, nelts
;
10513 if (vec
== NULL_TREE
)
10516 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec
)));
10518 if (TREE_CODE (vec
) == VEC_DUPLICATE_EXPR
)
10519 return TREE_OPERAND (vec
, 0);
10521 else if (TREE_CODE (vec
) == VECTOR_CST
)
10523 if (VECTOR_CST_NPATTERNS (vec
) == 1 && VECTOR_CST_DUPLICATE_P (vec
))
10524 return VECTOR_CST_ENCODED_ELT (vec
, 0);
10528 else if (TREE_CODE (vec
) == CONSTRUCTOR
10529 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec
)).is_constant (&nelts
))
10531 first
= error_mark_node
;
10533 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec
), i
, t
)
10540 if (!operand_equal_p (first
, t
, 0))
10546 if (TREE_CODE (first
) == CONSTRUCTOR
|| TREE_CODE (first
) == VECTOR_CST
)
10547 return uniform_vector_p (first
);
10554 /* If the argument is INTEGER_CST, return it. If the argument is vector
10555 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
10557 Look through location wrappers. */
10560 uniform_integer_cst_p (tree t
)
10562 STRIP_ANY_LOCATION_WRAPPER (t
);
10564 if (TREE_CODE (t
) == INTEGER_CST
)
10567 if (VECTOR_TYPE_P (TREE_TYPE (t
)))
10569 t
= uniform_vector_p (t
);
10570 if (t
&& TREE_CODE (t
) == INTEGER_CST
)
10577 /* Checks to see if T is a constant or a constant vector and if each element E
10578 adheres to ~E + 1 == pow2 then return ~E otherwise NULL_TREE. */
10581 bitmask_inv_cst_vector_p (tree t
)
10584 tree_code code
= TREE_CODE (t
);
10585 tree type
= TREE_TYPE (t
);
10587 if (!INTEGRAL_TYPE_P (type
)
10588 && !VECTOR_INTEGER_TYPE_P (type
))
10591 unsigned HOST_WIDE_INT nelts
= 1;
10593 unsigned int idx
= 0;
10594 bool uniform
= uniform_integer_cst_p (t
);
10595 tree newtype
= unsigned_type_for (type
);
10596 tree_vector_builder builder
;
10597 if (code
== INTEGER_CST
)
10601 if (!VECTOR_CST_NELTS (t
).is_constant (&nelts
))
10604 cst
= vector_cst_elt (t
, 0);
10605 builder
.new_vector (newtype
, nelts
, 1);
10608 tree ty
= unsigned_type_for (TREE_TYPE (cst
));
10613 cst
= vector_cst_elt (t
, idx
);
10614 wide_int icst
= wi::to_wide (cst
);
10615 wide_int inv
= wi::bit_not (icst
);
10616 icst
= wi::add (1, inv
);
10617 if (wi::popcount (icst
) != 1)
10620 tree newcst
= wide_int_to_tree (ty
, inv
);
10623 return build_uniform_cst (newtype
, newcst
);
10625 builder
.quick_push (newcst
);
10627 while (++idx
< nelts
);
10629 return builder
.build ();
10632 /* If VECTOR_CST T has a single nonzero element, return the index of that
10633 element, otherwise return -1. */
10636 single_nonzero_element (const_tree t
)
10638 unsigned HOST_WIDE_INT nelts
;
10639 unsigned int repeat_nelts
;
10640 if (VECTOR_CST_NELTS (t
).is_constant (&nelts
))
10641 repeat_nelts
= nelts
;
10642 else if (VECTOR_CST_NELTS_PER_PATTERN (t
) == 2)
10644 nelts
= vector_cst_encoded_nelts (t
);
10645 repeat_nelts
= VECTOR_CST_NPATTERNS (t
);
10651 for (unsigned int i
= 0; i
< nelts
; ++i
)
10653 tree elt
= vector_cst_elt (t
, i
);
10654 if (!integer_zerop (elt
) && !real_zerop (elt
))
10656 if (res
>= 0 || i
>= repeat_nelts
)
10664 /* Build an empty statement at location LOC. */
10667 build_empty_stmt (location_t loc
)
10669 tree t
= build1 (NOP_EXPR
, void_type_node
, size_zero_node
);
10670 SET_EXPR_LOCATION (t
, loc
);
10675 /* Build an OMP clause with code CODE. LOC is the location of the
10679 build_omp_clause (location_t loc
, enum omp_clause_code code
)
10684 length
= omp_clause_num_ops
[code
];
10685 size
= (sizeof (struct tree_omp_clause
) + (length
- 1) * sizeof (tree
));
10687 record_node_allocation_statistics (OMP_CLAUSE
, size
);
10689 t
= (tree
) ggc_internal_alloc (size
);
10690 memset (t
, 0, size
);
10691 TREE_SET_CODE (t
, OMP_CLAUSE
);
10692 OMP_CLAUSE_SET_CODE (t
, code
);
10693 OMP_CLAUSE_LOCATION (t
) = loc
;
10698 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10699 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10700 Except for the CODE and operand count field, other storage for the
10701 object is initialized to zeros. */
10704 build_vl_exp (enum tree_code code
, int len MEM_STAT_DECL
)
10707 int length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_exp
);
10709 gcc_assert (TREE_CODE_CLASS (code
) == tcc_vl_exp
);
10710 gcc_assert (len
>= 1);
10712 record_node_allocation_statistics (code
, length
);
10714 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
10716 TREE_SET_CODE (t
, code
);
10718 /* Can't use TREE_OPERAND to store the length because if checking is
10719 enabled, it will try to check the length before we store it. :-P */
10720 t
->exp
.operands
[0] = build_int_cst (sizetype
, len
);
10725 /* Helper function for build_call_* functions; build a CALL_EXPR with
10726 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10727 the argument slots. */
10730 build_call_1 (tree return_type
, tree fn
, int nargs
)
10734 t
= build_vl_exp (CALL_EXPR
, nargs
+ 3);
10735 TREE_TYPE (t
) = return_type
;
10736 CALL_EXPR_FN (t
) = fn
;
10737 CALL_EXPR_STATIC_CHAIN (t
) = NULL
;
10742 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10743 FN and a null static chain slot. NARGS is the number of call arguments
10744 which are specified as "..." arguments. */
10747 build_call_nary (tree return_type
, tree fn
, int nargs
, ...)
10751 va_start (args
, nargs
);
10752 ret
= build_call_valist (return_type
, fn
, nargs
, args
);
10757 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10758 FN and a null static chain slot. NARGS is the number of call arguments
10759 which are specified as a va_list ARGS. */
10762 build_call_valist (tree return_type
, tree fn
, int nargs
, va_list args
)
10767 t
= build_call_1 (return_type
, fn
, nargs
);
10768 for (i
= 0; i
< nargs
; i
++)
10769 CALL_EXPR_ARG (t
, i
) = va_arg (args
, tree
);
10770 process_call_operands (t
);
10774 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10775 FN and a null static chain slot. NARGS is the number of call arguments
10776 which are specified as a tree array ARGS. */
10779 build_call_array_loc (location_t loc
, tree return_type
, tree fn
,
10780 int nargs
, const tree
*args
)
10785 t
= build_call_1 (return_type
, fn
, nargs
);
10786 for (i
= 0; i
< nargs
; i
++)
10787 CALL_EXPR_ARG (t
, i
) = args
[i
];
10788 process_call_operands (t
);
10789 SET_EXPR_LOCATION (t
, loc
);
10793 /* Like build_call_array, but takes a vec. */
10796 build_call_vec (tree return_type
, tree fn
, const vec
<tree
, va_gc
> *args
)
10801 ret
= build_call_1 (return_type
, fn
, vec_safe_length (args
));
10802 FOR_EACH_VEC_SAFE_ELT (args
, ix
, t
)
10803 CALL_EXPR_ARG (ret
, ix
) = t
;
10804 process_call_operands (ret
);
10808 /* Conveniently construct a function call expression. FNDECL names the
10809 function to be called and N arguments are passed in the array
10813 build_call_expr_loc_array (location_t loc
, tree fndecl
, int n
, tree
*argarray
)
10815 tree fntype
= TREE_TYPE (fndecl
);
10816 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10818 return fold_build_call_array_loc (loc
, TREE_TYPE (fntype
), fn
, n
, argarray
);
10821 /* Conveniently construct a function call expression. FNDECL names the
10822 function to be called and the arguments are passed in the vector
10826 build_call_expr_loc_vec (location_t loc
, tree fndecl
, vec
<tree
, va_gc
> *vec
)
10828 return build_call_expr_loc_array (loc
, fndecl
, vec_safe_length (vec
),
10829 vec_safe_address (vec
));
10833 /* Conveniently construct a function call expression. FNDECL names the
10834 function to be called, N is the number of arguments, and the "..."
10835 parameters are the argument expressions. */
10838 build_call_expr_loc (location_t loc
, tree fndecl
, int n
, ...)
10841 tree
*argarray
= XALLOCAVEC (tree
, n
);
10845 for (i
= 0; i
< n
; i
++)
10846 argarray
[i
] = va_arg (ap
, tree
);
10848 return build_call_expr_loc_array (loc
, fndecl
, n
, argarray
);
10851 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10852 varargs macros aren't supported by all bootstrap compilers. */
10855 build_call_expr (tree fndecl
, int n
, ...)
10858 tree
*argarray
= XALLOCAVEC (tree
, n
);
10862 for (i
= 0; i
< n
; i
++)
10863 argarray
[i
] = va_arg (ap
, tree
);
10865 return build_call_expr_loc_array (UNKNOWN_LOCATION
, fndecl
, n
, argarray
);
10868 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10869 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10870 It will get gimplified later into an ordinary internal function. */
10873 build_call_expr_internal_loc_array (location_t loc
, internal_fn ifn
,
10874 tree type
, int n
, const tree
*args
)
10876 tree t
= build_call_1 (type
, NULL_TREE
, n
);
10877 for (int i
= 0; i
< n
; ++i
)
10878 CALL_EXPR_ARG (t
, i
) = args
[i
];
10879 SET_EXPR_LOCATION (t
, loc
);
10880 CALL_EXPR_IFN (t
) = ifn
;
10881 process_call_operands (t
);
10885 /* Build internal call expression. This is just like CALL_EXPR, except
10886 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10887 internal function. */
10890 build_call_expr_internal_loc (location_t loc
, enum internal_fn ifn
,
10891 tree type
, int n
, ...)
10894 tree
*argarray
= XALLOCAVEC (tree
, n
);
10898 for (i
= 0; i
< n
; i
++)
10899 argarray
[i
] = va_arg (ap
, tree
);
10901 return build_call_expr_internal_loc_array (loc
, ifn
, type
, n
, argarray
);
10904 /* Return a function call to FN, if the target is guaranteed to support it,
10907 N is the number of arguments, passed in the "...", and TYPE is the
10908 type of the return value. */
10911 maybe_build_call_expr_loc (location_t loc
, combined_fn fn
, tree type
,
10915 tree
*argarray
= XALLOCAVEC (tree
, n
);
10919 for (i
= 0; i
< n
; i
++)
10920 argarray
[i
] = va_arg (ap
, tree
);
10922 if (internal_fn_p (fn
))
10924 internal_fn ifn
= as_internal_fn (fn
);
10925 if (direct_internal_fn_p (ifn
))
10927 tree_pair types
= direct_internal_fn_types (ifn
, type
, argarray
);
10928 if (!direct_internal_fn_supported_p (ifn
, types
,
10929 OPTIMIZE_FOR_BOTH
))
10932 return build_call_expr_internal_loc_array (loc
, ifn
, type
, n
, argarray
);
10936 tree fndecl
= builtin_decl_implicit (as_builtin_fn (fn
));
10939 return build_call_expr_loc_array (loc
, fndecl
, n
, argarray
);
10943 /* Return a function call to the appropriate builtin alloca variant.
10945 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
10946 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
10947 bound for SIZE in case it is not a fixed value. */
10950 build_alloca_call_expr (tree size
, unsigned int align
, HOST_WIDE_INT max_size
)
10954 tree t
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
);
10956 build_call_expr (t
, 3, size
, size_int (align
), size_int (max_size
));
10958 else if (align
> 0)
10960 tree t
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
10961 return build_call_expr (t
, 2, size
, size_int (align
));
10965 tree t
= builtin_decl_explicit (BUILT_IN_ALLOCA
);
10966 return build_call_expr (t
, 1, size
);
10970 /* The built-in decl to use to mark code points believed to be unreachable.
10971 Typically __builtin_unreachable, but __builtin_trap if
10972 -fsanitize=unreachable -fsanitize-trap=unreachable. If only
10973 -fsanitize=unreachable, we rely on sanopt to replace calls with the
10974 appropriate ubsan function. When building a call directly, use
10975 {gimple_},build_builtin_unreachable instead. */
10978 builtin_decl_unreachable ()
10980 enum built_in_function fncode
= BUILT_IN_UNREACHABLE
;
10982 if (sanitize_flags_p (SANITIZE_UNREACHABLE
)
10983 ? (flag_sanitize_trap
& SANITIZE_UNREACHABLE
)
10984 : flag_unreachable_traps
)
10985 fncode
= BUILT_IN_UNREACHABLE_TRAP
;
10986 /* For non-trapping sanitize, we will rewrite __builtin_unreachable () later,
10987 in the sanopt pass. */
10989 return builtin_decl_explicit (fncode
);
10992 /* Build a call to __builtin_unreachable, possibly rewritten by
10993 -fsanitize=unreachable. Use this rather than the above when practical. */
10996 build_builtin_unreachable (location_t loc
)
10998 tree data
= NULL_TREE
;
10999 tree fn
= sanitize_unreachable_fn (&data
, loc
);
11000 return build_call_expr_loc (loc
, fn
, data
!= NULL_TREE
, data
);
11003 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
11004 if SIZE == -1) and return a tree node representing char* pointer to
11005 it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). When STR is nonnull
11006 the STRING_CST value is the LEN bytes at STR (the representation
11007 of the string, which may be wide). Otherwise it's all zeros. */
11010 build_string_literal (unsigned len
, const char *str
/* = NULL */,
11011 tree eltype
/* = char_type_node */,
11012 unsigned HOST_WIDE_INT size
/* = -1 */)
11014 tree t
= build_string (len
, str
);
11015 /* Set the maximum valid index based on the string length or SIZE. */
11016 unsigned HOST_WIDE_INT maxidx
11017 = (size
== HOST_WIDE_INT_M1U
? len
: size
) - 1;
11019 tree index
= build_index_type (size_int (maxidx
));
11020 eltype
= build_type_variant (eltype
, 1, 0);
11021 tree type
= build_array_type (eltype
, index
);
11022 TREE_TYPE (t
) = type
;
11023 TREE_CONSTANT (t
) = 1;
11024 TREE_READONLY (t
) = 1;
11025 TREE_STATIC (t
) = 1;
11027 type
= build_pointer_type (eltype
);
11028 t
= build1 (ADDR_EXPR
, type
,
11029 build4 (ARRAY_REF
, eltype
,
11030 t
, integer_zero_node
, NULL_TREE
, NULL_TREE
));
11036 /* Return true if T (assumed to be a DECL) must be assigned a memory
11040 needs_to_live_in_memory (const_tree t
)
11042 return (TREE_ADDRESSABLE (t
)
11043 || is_global_var (t
)
11044 || (TREE_CODE (t
) == RESULT_DECL
11045 && !DECL_BY_REFERENCE (t
)
11046 && aggregate_value_p (t
, current_function_decl
)));
11049 /* Return value of a constant X and sign-extend it. */
11052 int_cst_value (const_tree x
)
11054 unsigned bits
= TYPE_PRECISION (TREE_TYPE (x
));
11055 unsigned HOST_WIDE_INT val
= TREE_INT_CST_LOW (x
);
11057 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11058 gcc_assert (cst_and_fits_in_hwi (x
));
11060 if (bits
< HOST_BITS_PER_WIDE_INT
)
11062 bool negative
= ((val
>> (bits
- 1)) & 1) != 0;
11064 val
|= HOST_WIDE_INT_M1U
<< (bits
- 1) << 1;
11066 val
&= ~(HOST_WIDE_INT_M1U
<< (bits
- 1) << 1);
11072 /* If TYPE is an integral or pointer type, return an integer type with
11073 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11074 if TYPE is already an integer type of signedness UNSIGNEDP.
11075 If TYPE is a floating-point type, return an integer type with the same
11076 bitsize and with the signedness given by UNSIGNEDP; this is useful
11077 when doing bit-level operations on a floating-point value. */
11080 signed_or_unsigned_type_for (int unsignedp
, tree type
)
11082 if (ANY_INTEGRAL_TYPE_P (type
) && TYPE_UNSIGNED (type
) == unsignedp
)
11085 if (TREE_CODE (type
) == VECTOR_TYPE
)
11087 tree inner
= TREE_TYPE (type
);
11088 tree inner2
= signed_or_unsigned_type_for (unsignedp
, inner
);
11091 if (inner
== inner2
)
11093 machine_mode new_mode
;
11094 if (VECTOR_MODE_P (TYPE_MODE (type
))
11095 && related_int_vector_mode (TYPE_MODE (type
)).exists (&new_mode
))
11096 return build_vector_type_for_mode (inner2
, new_mode
);
11097 return build_vector_type (inner2
, TYPE_VECTOR_SUBPARTS (type
));
11100 if (TREE_CODE (type
) == COMPLEX_TYPE
)
11102 tree inner
= TREE_TYPE (type
);
11103 tree inner2
= signed_or_unsigned_type_for (unsignedp
, inner
);
11106 if (inner
== inner2
)
11108 return build_complex_type (inner2
);
11112 if (INTEGRAL_TYPE_P (type
)
11113 || POINTER_TYPE_P (type
)
11114 || TREE_CODE (type
) == OFFSET_TYPE
)
11115 bits
= TYPE_PRECISION (type
);
11116 else if (TREE_CODE (type
) == REAL_TYPE
)
11117 bits
= GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type
));
11121 if (TREE_CODE (type
) == BITINT_TYPE
&& (unsignedp
|| bits
> 1))
11122 return build_bitint_type (bits
, unsignedp
);
11123 return build_nonstandard_integer_type (bits
, unsignedp
);
11126 /* If TYPE is an integral or pointer type, return an integer type with
11127 the same precision which is unsigned, or itself if TYPE is already an
11128 unsigned integer type. If TYPE is a floating-point type, return an
11129 unsigned integer type with the same bitsize as TYPE. */
11132 unsigned_type_for (tree type
)
11134 return signed_or_unsigned_type_for (1, type
);
11137 /* If TYPE is an integral or pointer type, return an integer type with
11138 the same precision which is signed, or itself if TYPE is already a
11139 signed integer type. If TYPE is a floating-point type, return a
11140 signed integer type with the same bitsize as TYPE. */
11143 signed_type_for (tree type
)
11145 return signed_or_unsigned_type_for (0, type
);
11148 /* - For VECTOR_TYPEs:
11149 - The truth type must be a VECTOR_BOOLEAN_TYPE.
11150 - The number of elements must match (known_eq).
11151 - targetm.vectorize.get_mask_mode exists, and exactly
11152 the same mode as the truth type.
11153 - Otherwise, the truth type must be a BOOLEAN_TYPE
11154 or useless_type_conversion_p to BOOLEAN_TYPE. */
11156 is_truth_type_for (tree type
, tree truth_type
)
11158 machine_mode mask_mode
= TYPE_MODE (truth_type
);
11159 machine_mode vmode
= TYPE_MODE (type
);
11160 machine_mode tmask_mode
;
11162 if (TREE_CODE (type
) == VECTOR_TYPE
)
11164 if (VECTOR_BOOLEAN_TYPE_P (truth_type
)
11165 && known_eq (TYPE_VECTOR_SUBPARTS (type
),
11166 TYPE_VECTOR_SUBPARTS (truth_type
))
11167 && targetm
.vectorize
.get_mask_mode (vmode
).exists (&tmask_mode
)
11168 && tmask_mode
== mask_mode
)
11174 return useless_type_conversion_p (boolean_type_node
, truth_type
);
11177 /* If TYPE is a vector type, return a signed integer vector type with the
11178 same width and number of subparts. Otherwise return boolean_type_node. */
11181 truth_type_for (tree type
)
11183 if (TREE_CODE (type
) == VECTOR_TYPE
)
11185 if (VECTOR_BOOLEAN_TYPE_P (type
))
11187 return build_truth_vector_type_for (type
);
11190 return boolean_type_node
;
11193 /* Returns the largest value obtainable by casting something in INNER type to
11197 upper_bound_in_type (tree outer
, tree inner
)
11199 unsigned int det
= 0;
11200 unsigned oprec
= TYPE_PRECISION (outer
);
11201 unsigned iprec
= TYPE_PRECISION (inner
);
11204 /* Compute a unique number for every combination. */
11205 det
|= (oprec
> iprec
) ? 4 : 0;
11206 det
|= TYPE_UNSIGNED (outer
) ? 2 : 0;
11207 det
|= TYPE_UNSIGNED (inner
) ? 1 : 0;
11209 /* Determine the exponent to use. */
11214 /* oprec <= iprec, outer: signed, inner: don't care. */
11219 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11223 /* oprec > iprec, outer: signed, inner: signed. */
11227 /* oprec > iprec, outer: signed, inner: unsigned. */
11231 /* oprec > iprec, outer: unsigned, inner: signed. */
11235 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11239 gcc_unreachable ();
11242 return wide_int_to_tree (outer
,
11243 wi::mask (prec
, false, TYPE_PRECISION (outer
)));
11246 /* Returns the smallest value obtainable by casting something in INNER type to
11250 lower_bound_in_type (tree outer
, tree inner
)
11252 unsigned oprec
= TYPE_PRECISION (outer
);
11253 unsigned iprec
= TYPE_PRECISION (inner
);
11255 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11257 if (TYPE_UNSIGNED (outer
)
11258 /* If we are widening something of an unsigned type, OUTER type
11259 contains all values of INNER type. In particular, both INNER
11260 and OUTER types have zero in common. */
11261 || (oprec
> iprec
&& TYPE_UNSIGNED (inner
)))
11262 return build_int_cst (outer
, 0);
11265 /* If we are widening a signed type to another signed type, we
11266 want to obtain -2^^(iprec-1). If we are keeping the
11267 precision or narrowing to a signed type, we want to obtain
11269 unsigned prec
= oprec
> iprec
? iprec
: oprec
;
11270 return wide_int_to_tree (outer
,
11271 wi::mask (prec
- 1, true,
11272 TYPE_PRECISION (outer
)));
11276 /* Return true if two operands that are suitable for PHI nodes are
11277 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11278 SSA_NAME or invariant. Note that this is strictly an optimization.
11279 That is, callers of this function can directly call operand_equal_p
11280 and get the same result, only slower. */
11283 operand_equal_for_phi_arg_p (const_tree arg0
, const_tree arg1
)
11287 if (TREE_CODE (arg0
) == SSA_NAME
|| TREE_CODE (arg1
) == SSA_NAME
)
11289 return operand_equal_p (arg0
, arg1
, 0);
11292 /* Returns number of zeros at the end of binary representation of X. */
11295 num_ending_zeros (const_tree x
)
11297 return build_int_cst (TREE_TYPE (x
), wi::ctz (wi::to_wide (x
)));
11301 #define WALK_SUBTREE(NODE) \
11304 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11310 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11311 be walked whenever a type is seen in the tree. Rest of operands and return
11312 value are as for walk_tree. */
11315 walk_type_fields (tree type
, walk_tree_fn func
, void *data
,
11316 hash_set
<tree
> *pset
, walk_tree_lh lh
)
11318 tree result
= NULL_TREE
;
11320 switch (TREE_CODE (type
))
11323 case REFERENCE_TYPE
:
11325 /* We have to worry about mutually recursive pointers. These can't
11326 be written in C. They can in Ada. It's pathological, but
11327 there's an ACATS test (c38102a) that checks it. Deal with this
11328 by checking if we're pointing to another pointer, that one
11329 points to another pointer, that one does too, and we have no htab.
11330 If so, get a hash table. We check three levels deep to avoid
11331 the cost of the hash table if we don't need one. */
11332 if (POINTER_TYPE_P (TREE_TYPE (type
))
11333 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type
)))
11334 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type
))))
11337 result
= walk_tree_without_duplicates (&TREE_TYPE (type
),
11348 WALK_SUBTREE (TREE_TYPE (type
));
11352 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type
));
11354 /* Fall through. */
11356 case FUNCTION_TYPE
:
11357 WALK_SUBTREE (TREE_TYPE (type
));
11361 /* We never want to walk into default arguments. */
11362 for (arg
= TYPE_ARG_TYPES (type
); arg
; arg
= TREE_CHAIN (arg
))
11363 WALK_SUBTREE (TREE_VALUE (arg
));
11368 /* Don't follow this nodes's type if a pointer for fear that
11369 we'll have infinite recursion. If we have a PSET, then we
11372 || (!POINTER_TYPE_P (TREE_TYPE (type
))
11373 && TREE_CODE (TREE_TYPE (type
)) != OFFSET_TYPE
))
11374 WALK_SUBTREE (TREE_TYPE (type
));
11375 WALK_SUBTREE (TYPE_DOMAIN (type
));
11379 WALK_SUBTREE (TREE_TYPE (type
));
11380 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type
));
11390 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11391 called with the DATA and the address of each sub-tree. If FUNC returns a
11392 non-NULL value, the traversal is stopped, and the value returned by FUNC
11393 is returned. If PSET is non-NULL it is used to record the nodes visited,
11394 and to avoid visiting a node more than once. */
11397 walk_tree_1 (tree
*tp
, walk_tree_fn func
, void *data
,
11398 hash_set
<tree
> *pset
, walk_tree_lh lh
)
11400 #define WALK_SUBTREE_TAIL(NODE) \
11404 goto tail_recurse; \
11409 /* Skip empty subtrees. */
11413 /* Don't walk the same tree twice, if the user has requested
11414 that we avoid doing so. */
11415 if (pset
&& pset
->add (*tp
))
11418 /* Call the function. */
11419 int walk_subtrees
= 1;
11420 tree result
= (*func
) (tp
, &walk_subtrees
, data
);
11422 /* If we found something, return it. */
11427 tree_code code
= TREE_CODE (t
);
11429 /* Even if we didn't, FUNC may have decided that there was nothing
11430 interesting below this point in the tree. */
11431 if (!walk_subtrees
)
11433 /* But we still need to check our siblings. */
11434 if (code
== TREE_LIST
)
11435 WALK_SUBTREE_TAIL (TREE_CHAIN (t
));
11436 else if (code
== OMP_CLAUSE
)
11437 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (t
));
11444 result
= (*lh
) (tp
, &walk_subtrees
, func
, data
, pset
);
11445 if (result
|| !walk_subtrees
)
11452 case IDENTIFIER_NODE
:
11458 case PLACEHOLDER_EXPR
:
11462 /* None of these have subtrees other than those already walked
11467 WALK_SUBTREE (TREE_VALUE (t
));
11468 WALK_SUBTREE_TAIL (TREE_CHAIN (t
));
11472 int len
= TREE_VEC_LENGTH (t
);
11477 /* Walk all elements but the last. */
11478 for (int i
= 0; i
< len
- 1; ++i
)
11479 WALK_SUBTREE (TREE_VEC_ELT (t
, i
));
11481 /* Now walk the last one as a tail call. */
11482 WALK_SUBTREE_TAIL (TREE_VEC_ELT (t
, len
- 1));
11487 unsigned len
= vector_cst_encoded_nelts (t
);
11490 /* Walk all elements but the last. */
11491 for (unsigned i
= 0; i
< len
- 1; ++i
)
11492 WALK_SUBTREE (VECTOR_CST_ENCODED_ELT (t
, i
));
11493 /* Now walk the last one as a tail call. */
11494 WALK_SUBTREE_TAIL (VECTOR_CST_ENCODED_ELT (t
, len
- 1));
11498 WALK_SUBTREE (TREE_REALPART (t
));
11499 WALK_SUBTREE_TAIL (TREE_IMAGPART (t
));
11503 unsigned HOST_WIDE_INT idx
;
11504 constructor_elt
*ce
;
11506 for (idx
= 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t
), idx
, &ce
);
11508 WALK_SUBTREE (ce
->value
);
11513 WALK_SUBTREE_TAIL (TREE_OPERAND (t
, 0));
11518 for (decl
= BIND_EXPR_VARS (t
); decl
; decl
= DECL_CHAIN (decl
))
11520 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11521 into declarations that are just mentioned, rather than
11522 declared; they don't really belong to this part of the tree.
11523 And, we can see cycles: the initializer for a declaration
11524 can refer to the declaration itself. */
11525 WALK_SUBTREE (DECL_INITIAL (decl
));
11526 WALK_SUBTREE (DECL_SIZE (decl
));
11527 WALK_SUBTREE (DECL_SIZE_UNIT (decl
));
11529 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (t
));
11532 case STATEMENT_LIST
:
11534 tree_stmt_iterator i
;
11535 for (i
= tsi_start (t
); !tsi_end_p (i
); tsi_next (&i
))
11536 WALK_SUBTREE (*tsi_stmt_ptr (i
));
11542 int len
= omp_clause_num_ops
[OMP_CLAUSE_CODE (t
)];
11543 for (int i
= 0; i
< len
; i
++)
11544 WALK_SUBTREE (OMP_CLAUSE_OPERAND (t
, i
));
11545 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (t
));
11552 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11553 But, we only want to walk once. */
11554 len
= (TREE_OPERAND (t
, 3) == TREE_OPERAND (t
, 1)) ? 2 : 3;
11555 for (i
= 0; i
< len
; ++i
)
11556 WALK_SUBTREE (TREE_OPERAND (t
, i
));
11557 WALK_SUBTREE_TAIL (TREE_OPERAND (t
, len
));
11561 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11562 defining. We only want to walk into these fields of a type in this
11563 case and not in the general case of a mere reference to the type.
11565 The criterion is as follows: if the field can be an expression, it
11566 must be walked only here. This should be in keeping with the fields
11567 that are directly gimplified in gimplify_type_sizes in order for the
11568 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11569 variable-sized types.
11571 Note that DECLs get walked as part of processing the BIND_EXPR. */
11572 if (TREE_CODE (DECL_EXPR_DECL (t
)) == TYPE_DECL
)
11574 /* Call the function for the decl so e.g. copy_tree_body_r can
11575 replace it with the remapped one. */
11576 result
= (*func
) (&DECL_EXPR_DECL (t
), &walk_subtrees
, data
);
11577 if (result
|| !walk_subtrees
)
11580 tree
*type_p
= &TREE_TYPE (DECL_EXPR_DECL (t
));
11581 if (TREE_CODE (*type_p
) == ERROR_MARK
)
11584 /* Call the function for the type. See if it returns anything or
11585 doesn't want us to continue. If we are to continue, walk both
11586 the normal fields and those for the declaration case. */
11587 result
= (*func
) (type_p
, &walk_subtrees
, data
);
11588 if (result
|| !walk_subtrees
)
11591 tree type
= *type_p
;
11593 /* But do not walk a pointed-to type since it may itself need to
11594 be walked in the declaration case if it isn't anonymous. */
11595 if (!POINTER_TYPE_P (type
))
11597 result
= walk_type_fields (type
, func
, data
, pset
, lh
);
11602 /* If this is a record type, also walk the fields. */
11603 if (RECORD_OR_UNION_TYPE_P (type
))
11607 for (field
= TYPE_FIELDS (type
); field
;
11608 field
= DECL_CHAIN (field
))
11610 /* We'd like to look at the type of the field, but we can
11611 easily get infinite recursion. So assume it's pointed
11612 to elsewhere in the tree. Also, ignore things that
11614 if (TREE_CODE (field
) != FIELD_DECL
)
11617 WALK_SUBTREE (DECL_FIELD_OFFSET (field
));
11618 WALK_SUBTREE (DECL_SIZE (field
));
11619 WALK_SUBTREE (DECL_SIZE_UNIT (field
));
11620 if (TREE_CODE (type
) == QUAL_UNION_TYPE
)
11621 WALK_SUBTREE (DECL_QUALIFIER (field
));
11625 /* Same for scalar types. */
11626 else if (TREE_CODE (type
) == BOOLEAN_TYPE
11627 || TREE_CODE (type
) == ENUMERAL_TYPE
11628 || TREE_CODE (type
) == INTEGER_TYPE
11629 || TREE_CODE (type
) == FIXED_POINT_TYPE
11630 || TREE_CODE (type
) == REAL_TYPE
)
11632 WALK_SUBTREE (TYPE_MIN_VALUE (type
));
11633 WALK_SUBTREE (TYPE_MAX_VALUE (type
));
11636 WALK_SUBTREE (TYPE_SIZE (type
));
11637 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (type
));
11642 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
11646 /* Walk over all the sub-trees of this operand. */
11647 len
= TREE_OPERAND_LENGTH (t
);
11649 /* Go through the subtrees. We need to do this in forward order so
11650 that the scope of a FOR_EXPR is handled properly. */
11653 for (i
= 0; i
< len
- 1; ++i
)
11654 WALK_SUBTREE (TREE_OPERAND (t
, i
));
11655 WALK_SUBTREE_TAIL (TREE_OPERAND (t
, len
- 1));
11658 /* If this is a type, walk the needed fields in the type. */
11659 else if (TYPE_P (t
))
11660 return walk_type_fields (t
, func
, data
, pset
, lh
);
11664 /* We didn't find what we were looking for. */
11667 #undef WALK_SUBTREE_TAIL
11669 #undef WALK_SUBTREE
11671 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11674 walk_tree_without_duplicates_1 (tree
*tp
, walk_tree_fn func
, void *data
,
11679 hash_set
<tree
> pset
;
11680 result
= walk_tree_1 (tp
, func
, data
, &pset
, lh
);
11686 tree_block (tree t
)
11688 const enum tree_code_class c
= TREE_CODE_CLASS (TREE_CODE (t
));
11690 if (IS_EXPR_CODE_CLASS (c
))
11691 return LOCATION_BLOCK (t
->exp
.locus
);
11692 gcc_unreachable ();
11697 tree_set_block (tree t
, tree b
)
11699 const enum tree_code_class c
= TREE_CODE_CLASS (TREE_CODE (t
));
11701 if (IS_EXPR_CODE_CLASS (c
))
11703 t
->exp
.locus
= set_block (t
->exp
.locus
, b
);
11706 gcc_unreachable ();
11709 /* Create a nameless artificial label and put it in the current
11710 function context. The label has a location of LOC. Returns the
11711 newly created label. */
11714 create_artificial_label (location_t loc
)
11716 tree lab
= build_decl (loc
,
11717 LABEL_DECL
, NULL_TREE
, void_type_node
);
11719 DECL_ARTIFICIAL (lab
) = 1;
11720 DECL_IGNORED_P (lab
) = 1;
11721 DECL_CONTEXT (lab
) = current_function_decl
;
11725 /* Given a tree, try to return a useful variable name that we can use
11726 to prefix a temporary that is being assigned the value of the tree.
11727 I.E. given <temp> = &A, return A. */
11732 tree stripped_decl
;
11735 STRIP_NOPS (stripped_decl
);
11736 if (DECL_P (stripped_decl
) && DECL_NAME (stripped_decl
))
11737 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl
));
11738 else if (TREE_CODE (stripped_decl
) == SSA_NAME
)
11740 tree name
= SSA_NAME_IDENTIFIER (stripped_decl
);
11743 return IDENTIFIER_POINTER (name
);
11747 switch (TREE_CODE (stripped_decl
))
11750 return get_name (TREE_OPERAND (stripped_decl
, 0));
11757 /* Return true if TYPE has a variable argument list. */
11760 stdarg_p (const_tree fntype
)
11762 function_args_iterator args_iter
;
11763 tree n
= NULL_TREE
, t
;
11768 if (TYPE_NO_NAMED_ARGS_STDARG_P (fntype
))
11771 FOREACH_FUNCTION_ARGS (fntype
, t
, args_iter
)
11776 return n
!= NULL_TREE
&& n
!= void_type_node
;
11779 /* Return true if TYPE has a prototype. */
11782 prototype_p (const_tree fntype
)
11786 gcc_assert (fntype
!= NULL_TREE
);
11788 if (TYPE_NO_NAMED_ARGS_STDARG_P (fntype
))
11791 t
= TYPE_ARG_TYPES (fntype
);
11792 return (t
!= NULL_TREE
);
11795 /* If BLOCK is inlined from an __attribute__((__artificial__))
11796 routine, return pointer to location from where it has been
11799 block_nonartificial_location (tree block
)
11801 location_t
*ret
= NULL
;
11803 while (block
&& TREE_CODE (block
) == BLOCK
11804 && BLOCK_ABSTRACT_ORIGIN (block
))
11806 tree ao
= BLOCK_ABSTRACT_ORIGIN (block
);
11807 if (TREE_CODE (ao
) == FUNCTION_DECL
)
11809 /* If AO is an artificial inline, point RET to the
11810 call site locus at which it has been inlined and continue
11811 the loop, in case AO's caller is also an artificial
11813 if (DECL_DECLARED_INLINE_P (ao
)
11814 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao
)))
11815 ret
= &BLOCK_SOURCE_LOCATION (block
);
11819 else if (TREE_CODE (ao
) != BLOCK
)
11822 block
= BLOCK_SUPERCONTEXT (block
);
11828 /* If EXP is inlined from an __attribute__((__artificial__))
11829 function, return the location of the original call expression. */
11832 tree_nonartificial_location (tree exp
)
11834 location_t
*loc
= block_nonartificial_location (TREE_BLOCK (exp
));
11839 return EXPR_LOCATION (exp
);
11842 /* Return the location into which EXP has been inlined. Analogous
11843 to tree_nonartificial_location() above but not limited to artificial
11844 functions declared inline. If SYSTEM_HEADER is true, return
11845 the macro expansion point of the location if it's in a system header */
11848 tree_inlined_location (tree exp
, bool system_header
/* = true */)
11850 location_t loc
= UNKNOWN_LOCATION
;
11852 tree block
= TREE_BLOCK (exp
);
11854 while (block
&& TREE_CODE (block
) == BLOCK
11855 && BLOCK_ABSTRACT_ORIGIN (block
))
11857 tree ao
= BLOCK_ABSTRACT_ORIGIN (block
);
11858 if (TREE_CODE (ao
) == FUNCTION_DECL
)
11859 loc
= BLOCK_SOURCE_LOCATION (block
);
11860 else if (TREE_CODE (ao
) != BLOCK
)
11863 block
= BLOCK_SUPERCONTEXT (block
);
11866 if (loc
== UNKNOWN_LOCATION
)
11868 loc
= EXPR_LOCATION (exp
);
11870 /* Only consider macro expansion when the block traversal failed
11871 to find a location. Otherwise it's not relevant. */
11872 return expansion_point_location_if_in_system_header (loc
);
11878 /* These are the hash table functions for the hash table of OPTIMIZATION_NODE
11881 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11884 cl_option_hasher::hash (tree x
)
11886 const_tree
const t
= x
;
11888 if (TREE_CODE (t
) == OPTIMIZATION_NODE
)
11889 return cl_optimization_hash (TREE_OPTIMIZATION (t
));
11890 else if (TREE_CODE (t
) == TARGET_OPTION_NODE
)
11891 return cl_target_option_hash (TREE_TARGET_OPTION (t
));
11893 gcc_unreachable ();
11896 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11897 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11901 cl_option_hasher::equal (tree x
, tree y
)
11903 const_tree
const xt
= x
;
11904 const_tree
const yt
= y
;
11906 if (TREE_CODE (xt
) != TREE_CODE (yt
))
11909 if (TREE_CODE (xt
) == OPTIMIZATION_NODE
)
11910 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt
),
11911 TREE_OPTIMIZATION (yt
));
11912 else if (TREE_CODE (xt
) == TARGET_OPTION_NODE
)
11913 return cl_target_option_eq (TREE_TARGET_OPTION (xt
),
11914 TREE_TARGET_OPTION (yt
));
11916 gcc_unreachable ();
11919 /* Build an OPTIMIZATION_NODE based on the options in OPTS and OPTS_SET. */
11922 build_optimization_node (struct gcc_options
*opts
,
11923 struct gcc_options
*opts_set
)
11927 /* Use the cache of optimization nodes. */
11929 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node
),
11932 tree
*slot
= cl_option_hash_table
->find_slot (cl_optimization_node
, INSERT
);
11936 /* Insert this one into the hash table. */
11937 t
= cl_optimization_node
;
11940 /* Make a new node for next time round. */
11941 cl_optimization_node
= make_node (OPTIMIZATION_NODE
);
11947 /* Build a TARGET_OPTION_NODE based on the options in OPTS and OPTS_SET. */
11950 build_target_option_node (struct gcc_options
*opts
,
11951 struct gcc_options
*opts_set
)
11955 /* Use the cache of optimization nodes. */
11957 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node
),
11960 tree
*slot
= cl_option_hash_table
->find_slot (cl_target_option_node
, INSERT
);
11964 /* Insert this one into the hash table. */
11965 t
= cl_target_option_node
;
11968 /* Make a new node for next time round. */
11969 cl_target_option_node
= make_node (TARGET_OPTION_NODE
);
11975 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11976 so that they aren't saved during PCH writing. */
11979 prepare_target_option_nodes_for_pch (void)
11981 hash_table
<cl_option_hasher
>::iterator iter
= cl_option_hash_table
->begin ();
11982 for (; iter
!= cl_option_hash_table
->end (); ++iter
)
11983 if (TREE_CODE (*iter
) == TARGET_OPTION_NODE
)
11984 TREE_TARGET_GLOBALS (*iter
) = NULL
;
11987 /* Determine the "ultimate origin" of a block. */
11990 block_ultimate_origin (const_tree block
)
11992 tree origin
= BLOCK_ABSTRACT_ORIGIN (block
);
11994 if (origin
== NULL_TREE
)
11998 gcc_checking_assert ((DECL_P (origin
)
11999 && DECL_ORIGIN (origin
) == origin
)
12000 || BLOCK_ORIGIN (origin
) == origin
);
12005 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12009 tree_nop_conversion_p (const_tree outer_type
, const_tree inner_type
)
12011 /* Do not strip casts into or out of differing address spaces. */
12012 if (POINTER_TYPE_P (outer_type
)
12013 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type
)) != ADDR_SPACE_GENERIC
)
12015 if (!POINTER_TYPE_P (inner_type
)
12016 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type
))
12017 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type
))))
12020 else if (POINTER_TYPE_P (inner_type
)
12021 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type
)) != ADDR_SPACE_GENERIC
)
12023 /* We already know that outer_type is not a pointer with
12024 a non-generic address space. */
12028 /* Use precision rather then machine mode when we can, which gives
12029 the correct answer even for submode (bit-field) types. */
12030 if ((INTEGRAL_TYPE_P (outer_type
)
12031 || POINTER_TYPE_P (outer_type
)
12032 || TREE_CODE (outer_type
) == OFFSET_TYPE
)
12033 && (INTEGRAL_TYPE_P (inner_type
)
12034 || POINTER_TYPE_P (inner_type
)
12035 || TREE_CODE (inner_type
) == OFFSET_TYPE
))
12036 return TYPE_PRECISION (outer_type
) == TYPE_PRECISION (inner_type
);
12038 /* Otherwise fall back on comparing machine modes (e.g. for
12039 aggregate types, floats). */
12040 return TYPE_MODE (outer_type
) == TYPE_MODE (inner_type
);
12043 /* Return true iff conversion in EXP generates no instruction. Mark
12044 it inline so that we fully inline into the stripping functions even
12045 though we have two uses of this function. */
12048 tree_nop_conversion (const_tree exp
)
12050 tree outer_type
, inner_type
;
12052 if (location_wrapper_p (exp
))
12054 if (!CONVERT_EXPR_P (exp
)
12055 && TREE_CODE (exp
) != NON_LVALUE_EXPR
)
12058 outer_type
= TREE_TYPE (exp
);
12059 inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
12060 if (!inner_type
|| inner_type
== error_mark_node
)
12063 return tree_nop_conversion_p (outer_type
, inner_type
);
12066 /* Return true iff conversion in EXP generates no instruction. Don't
12067 consider conversions changing the signedness. */
12070 tree_sign_nop_conversion (const_tree exp
)
12072 tree outer_type
, inner_type
;
12074 if (!tree_nop_conversion (exp
))
12077 outer_type
= TREE_TYPE (exp
);
12078 inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
12080 return (TYPE_UNSIGNED (outer_type
) == TYPE_UNSIGNED (inner_type
)
12081 && POINTER_TYPE_P (outer_type
) == POINTER_TYPE_P (inner_type
));
12084 /* Strip conversions from EXP according to tree_nop_conversion and
12085 return the resulting expression. */
12088 tree_strip_nop_conversions (tree exp
)
12090 while (tree_nop_conversion (exp
))
12091 exp
= TREE_OPERAND (exp
, 0);
12095 /* Strip conversions from EXP according to tree_sign_nop_conversion
12096 and return the resulting expression. */
12099 tree_strip_sign_nop_conversions (tree exp
)
12101 while (tree_sign_nop_conversion (exp
))
12102 exp
= TREE_OPERAND (exp
, 0);
12106 /* Avoid any floating point extensions from EXP. */
12108 strip_float_extensions (tree exp
)
12110 tree sub
, expt
, subt
;
12112 /* For floating point constant look up the narrowest type that can hold
12113 it properly and handle it like (type)(narrowest_type)constant.
12114 This way we can optimize for instance a=a*2.0 where "a" is float
12115 but 2.0 is double constant. */
12116 if (TREE_CODE (exp
) == REAL_CST
&& !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp
)))
12118 REAL_VALUE_TYPE orig
;
12121 orig
= TREE_REAL_CST (exp
);
12122 if (TYPE_PRECISION (TREE_TYPE (exp
)) > TYPE_PRECISION (float_type_node
)
12123 && exact_real_truncate (TYPE_MODE (float_type_node
), &orig
))
12124 type
= float_type_node
;
12125 else if (TYPE_PRECISION (TREE_TYPE (exp
))
12126 > TYPE_PRECISION (double_type_node
)
12127 && exact_real_truncate (TYPE_MODE (double_type_node
), &orig
))
12128 type
= double_type_node
;
12130 return build_real_truncate (type
, orig
);
12133 if (!CONVERT_EXPR_P (exp
))
12136 sub
= TREE_OPERAND (exp
, 0);
12137 subt
= TREE_TYPE (sub
);
12138 expt
= TREE_TYPE (exp
);
12140 if (!FLOAT_TYPE_P (subt
))
12143 if (DECIMAL_FLOAT_TYPE_P (expt
) != DECIMAL_FLOAT_TYPE_P (subt
))
12146 if (element_precision (subt
) > element_precision (expt
))
12149 return strip_float_extensions (sub
);
12152 /* Strip out all handled components that produce invariant
12156 strip_invariant_refs (const_tree op
)
12158 while (handled_component_p (op
))
12160 switch (TREE_CODE (op
))
12163 case ARRAY_RANGE_REF
:
12164 if (!is_gimple_constant (TREE_OPERAND (op
, 1))
12165 || TREE_OPERAND (op
, 2) != NULL_TREE
12166 || TREE_OPERAND (op
, 3) != NULL_TREE
)
12170 case COMPONENT_REF
:
12171 if (TREE_OPERAND (op
, 2) != NULL_TREE
)
12177 op
= TREE_OPERAND (op
, 0);
12183 /* Strip handled components with zero offset from OP. */
12186 strip_zero_offset_components (tree op
)
12188 while (TREE_CODE (op
) == COMPONENT_REF
12189 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (op
, 1)))
12190 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (op
, 1))))
12191 op
= TREE_OPERAND (op
, 0);
12195 static GTY(()) tree gcc_eh_personality_decl
;
12197 /* Return the GCC personality function decl. */
12200 lhd_gcc_personality (void)
12202 if (!gcc_eh_personality_decl
)
12203 gcc_eh_personality_decl
= build_personality_function ("gcc");
12204 return gcc_eh_personality_decl
;
12207 /* TARGET is a call target of GIMPLE call statement
12208 (obtained by gimple_call_fn). Return true if it is
12209 OBJ_TYPE_REF representing an virtual call of C++ method.
12210 (As opposed to OBJ_TYPE_REF representing objc calls
12211 through a cast where middle-end devirtualization machinery
12212 can't apply.) FOR_DUMP_P is true when being called from
12213 the dump routines. */
12216 virtual_method_call_p (const_tree target
, bool for_dump_p
)
12218 if (TREE_CODE (target
) != OBJ_TYPE_REF
)
12220 tree t
= TREE_TYPE (target
);
12221 gcc_checking_assert (TREE_CODE (t
) == POINTER_TYPE
);
12223 if (TREE_CODE (t
) == FUNCTION_TYPE
)
12225 gcc_checking_assert (TREE_CODE (t
) == METHOD_TYPE
);
12226 /* If we do not have BINFO associated, it means that type was built
12227 without devirtualization enabled. Do not consider this a virtual
12229 if (!TYPE_BINFO (obj_type_ref_class (target
, for_dump_p
)))
12234 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12237 lookup_binfo_at_offset (tree binfo
, tree type
, HOST_WIDE_INT pos
)
12240 tree base_binfo
, b
;
12242 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
12243 if (pos
== tree_to_shwi (BINFO_OFFSET (base_binfo
))
12244 && types_same_for_odr (TREE_TYPE (base_binfo
), type
))
12246 else if ((b
= lookup_binfo_at_offset (base_binfo
, type
, pos
)) != NULL
)
12251 /* Try to find a base info of BINFO that would have its field decl at offset
12252 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12253 found, return, otherwise return NULL_TREE. */
12256 get_binfo_at_offset (tree binfo
, poly_int64 offset
, tree expected_type
)
12258 tree type
= BINFO_TYPE (binfo
);
12262 HOST_WIDE_INT pos
, size
;
12266 if (types_same_for_odr (type
, expected_type
))
12268 if (maybe_lt (offset
, 0))
12271 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
12273 if (TREE_CODE (fld
) != FIELD_DECL
|| !DECL_ARTIFICIAL (fld
))
12276 pos
= int_bit_position (fld
);
12277 size
= tree_to_uhwi (DECL_SIZE (fld
));
12278 if (known_in_range_p (offset
, pos
, size
))
12281 if (!fld
|| TREE_CODE (TREE_TYPE (fld
)) != RECORD_TYPE
)
12284 /* Offset 0 indicates the primary base, whose vtable contents are
12285 represented in the binfo for the derived class. */
12286 else if (maybe_ne (offset
, 0))
12288 tree found_binfo
= NULL
, base_binfo
;
12289 /* Offsets in BINFO are in bytes relative to the whole structure
12290 while POS is in bits relative to the containing field. */
12291 int binfo_offset
= (tree_to_shwi (BINFO_OFFSET (binfo
)) + pos
12294 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
12295 if (tree_to_shwi (BINFO_OFFSET (base_binfo
)) == binfo_offset
12296 && types_same_for_odr (TREE_TYPE (base_binfo
), TREE_TYPE (fld
)))
12298 found_binfo
= base_binfo
;
12302 binfo
= found_binfo
;
12304 binfo
= lookup_binfo_at_offset (binfo
, TREE_TYPE (fld
),
12308 type
= TREE_TYPE (fld
);
12313 /* PR 84195: Replace control characters in "unescaped" with their
12314 escaped equivalents. Allow newlines if -fmessage-length has
12315 been set to a non-zero value. This is done here, rather than
12316 where the attribute is recorded as the message length can
12317 change between these two locations. */
12320 escaped_string::escape (const char *unescaped
)
12323 size_t i
, new_i
, len
;
12328 m_str
= const_cast<char *> (unescaped
);
12331 if (unescaped
== NULL
|| *unescaped
== 0)
12334 len
= strlen (unescaped
);
12338 for (i
= 0; i
< len
; i
++)
12340 char c
= unescaped
[i
];
12345 escaped
[new_i
++] = c
;
12349 if (c
!= '\n' || !pp_is_wrapping_line (global_dc
->printer
))
12351 if (escaped
== NULL
)
12353 /* We only allocate space for a new string if we
12354 actually encounter a control character that
12355 needs replacing. */
12356 escaped
= (char *) xmalloc (len
* 2 + 1);
12357 strncpy (escaped
, unescaped
, i
);
12361 escaped
[new_i
++] = '\\';
12365 case '\a': escaped
[new_i
++] = 'a'; break;
12366 case '\b': escaped
[new_i
++] = 'b'; break;
12367 case '\f': escaped
[new_i
++] = 'f'; break;
12368 case '\n': escaped
[new_i
++] = 'n'; break;
12369 case '\r': escaped
[new_i
++] = 'r'; break;
12370 case '\t': escaped
[new_i
++] = 't'; break;
12371 case '\v': escaped
[new_i
++] = 'v'; break;
12372 default: escaped
[new_i
++] = '?'; break;
12376 escaped
[new_i
++] = c
;
12381 escaped
[new_i
] = 0;
12387 /* Warn about a use of an identifier which was marked deprecated. Returns
12388 whether a warning was given. */
12391 warn_deprecated_use (tree node
, tree attr
)
12393 escaped_string msg
;
12395 if (node
== 0 || !warn_deprecated_decl
)
12401 attr
= DECL_ATTRIBUTES (node
);
12402 else if (TYPE_P (node
))
12404 tree decl
= TYPE_STUB_DECL (node
);
12406 attr
= TYPE_ATTRIBUTES (TREE_TYPE (decl
));
12407 else if ((decl
= TYPE_STUB_DECL (TYPE_MAIN_VARIANT (node
)))
12410 node
= TREE_TYPE (decl
);
12411 attr
= TYPE_ATTRIBUTES (node
);
12417 attr
= lookup_attribute ("deprecated", attr
);
12420 msg
.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
12425 auto_diagnostic_group d
;
12427 w
= warning (OPT_Wdeprecated_declarations
,
12428 "%qD is deprecated: %s", node
, (const char *) msg
);
12430 w
= warning (OPT_Wdeprecated_declarations
,
12431 "%qD is deprecated", node
);
12433 inform (DECL_SOURCE_LOCATION (node
), "declared here");
12435 else if (TYPE_P (node
))
12437 tree what
= NULL_TREE
;
12438 tree decl
= TYPE_STUB_DECL (node
);
12440 if (TYPE_NAME (node
))
12442 if (TREE_CODE (TYPE_NAME (node
)) == IDENTIFIER_NODE
)
12443 what
= TYPE_NAME (node
);
12444 else if (TREE_CODE (TYPE_NAME (node
)) == TYPE_DECL
12445 && DECL_NAME (TYPE_NAME (node
)))
12446 what
= DECL_NAME (TYPE_NAME (node
));
12449 auto_diagnostic_group d
;
12453 w
= warning (OPT_Wdeprecated_declarations
,
12454 "%qE is deprecated: %s", what
, (const char *) msg
);
12456 w
= warning (OPT_Wdeprecated_declarations
,
12457 "%qE is deprecated", what
);
12462 w
= warning (OPT_Wdeprecated_declarations
,
12463 "type is deprecated: %s", (const char *) msg
);
12465 w
= warning (OPT_Wdeprecated_declarations
,
12466 "type is deprecated");
12470 inform (DECL_SOURCE_LOCATION (decl
), "declared here");
12476 /* Error out with an identifier which was marked 'unavailable'. */
12478 error_unavailable_use (tree node
, tree attr
)
12480 escaped_string msg
;
12488 attr
= DECL_ATTRIBUTES (node
);
12489 else if (TYPE_P (node
))
12491 tree decl
= TYPE_STUB_DECL (node
);
12493 attr
= lookup_attribute ("unavailable",
12494 TYPE_ATTRIBUTES (TREE_TYPE (decl
)));
12499 attr
= lookup_attribute ("unavailable", attr
);
12502 msg
.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
12506 auto_diagnostic_group d
;
12508 error ("%qD is unavailable: %s", node
, (const char *) msg
);
12510 error ("%qD is unavailable", node
);
12511 inform (DECL_SOURCE_LOCATION (node
), "declared here");
12513 else if (TYPE_P (node
))
12515 tree what
= NULL_TREE
;
12516 tree decl
= TYPE_STUB_DECL (node
);
12518 if (TYPE_NAME (node
))
12520 if (TREE_CODE (TYPE_NAME (node
)) == IDENTIFIER_NODE
)
12521 what
= TYPE_NAME (node
);
12522 else if (TREE_CODE (TYPE_NAME (node
)) == TYPE_DECL
12523 && DECL_NAME (TYPE_NAME (node
)))
12524 what
= DECL_NAME (TYPE_NAME (node
));
12527 auto_diagnostic_group d
;
12531 error ("%qE is unavailable: %s", what
, (const char *) msg
);
12533 error ("%qE is unavailable", what
);
12538 error ("type is unavailable: %s", (const char *) msg
);
12540 error ("type is unavailable");
12544 inform (DECL_SOURCE_LOCATION (decl
), "declared here");
12548 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12549 somewhere in it. */
12552 contains_bitfld_component_ref_p (const_tree ref
)
12554 while (handled_component_p (ref
))
12556 if (TREE_CODE (ref
) == COMPONENT_REF
12557 && DECL_BIT_FIELD (TREE_OPERAND (ref
, 1)))
12559 ref
= TREE_OPERAND (ref
, 0);
12565 /* Try to determine whether a TRY_CATCH expression can fall through.
12566 This is a subroutine of block_may_fallthru. */
12569 try_catch_may_fallthru (const_tree stmt
)
12571 tree_stmt_iterator i
;
12573 /* If the TRY block can fall through, the whole TRY_CATCH can
12575 if (block_may_fallthru (TREE_OPERAND (stmt
, 0)))
12578 switch (TREE_CODE (TREE_OPERAND (stmt
, 1)))
12582 return block_may_fallthru (CATCH_BODY (TREE_OPERAND (stmt
, 1)));
12584 case EH_FILTER_EXPR
:
12586 return block_may_fallthru (EH_FILTER_FAILURE (TREE_OPERAND (stmt
, 1)));
12588 case STATEMENT_LIST
:
12596 i
= tsi_start (TREE_OPERAND (stmt
, 1));
12597 switch (TREE_CODE (tsi_stmt (i
)))
12600 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12601 catch expression and a body. The whole TRY_CATCH may fall
12602 through iff any of the catch bodies falls through. */
12603 for (; !tsi_end_p (i
); tsi_next (&i
))
12605 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i
))))
12610 case EH_FILTER_EXPR
:
12611 /* The exception filter expression only matters if there is an
12612 exception. If the exception does not match EH_FILTER_TYPES,
12613 we will execute EH_FILTER_FAILURE, and we will fall through
12614 if that falls through. If the exception does match
12615 EH_FILTER_TYPES, the stack unwinder will continue up the
12616 stack, so we will not fall through. We don't know whether we
12617 will throw an exception which matches EH_FILTER_TYPES or not,
12618 so we just ignore EH_FILTER_TYPES and assume that we might
12619 throw an exception which doesn't match. */
12620 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i
)));
12623 /* This case represents statements to be executed when an
12624 exception occurs. Those statements are implicitly followed
12625 by a RESX statement to resume execution after the exception.
12626 So in this case the TRY_CATCH never falls through. */
12631 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12632 need not be 100% accurate; simply be conservative and return true if we
12633 don't know. This is used only to avoid stupidly generating extra code.
12634 If we're wrong, we'll just delete the extra code later. */
12637 block_may_fallthru (const_tree block
)
12639 /* This CONST_CAST is okay because expr_last returns its argument
12640 unmodified and we assign it to a const_tree. */
12641 const_tree stmt
= expr_last (CONST_CAST_TREE (block
));
12643 switch (stmt
? TREE_CODE (stmt
) : ERROR_MARK
)
12647 /* Easy cases. If the last statement of the block implies
12648 control transfer, then we can't fall through. */
12652 /* If there is a default: label or case labels cover all possible
12653 SWITCH_COND values, then the SWITCH_EXPR will transfer control
12654 to some case label in all cases and all we care is whether the
12655 SWITCH_BODY falls through. */
12656 if (SWITCH_ALL_CASES_P (stmt
))
12657 return block_may_fallthru (SWITCH_BODY (stmt
));
12661 if (block_may_fallthru (COND_EXPR_THEN (stmt
)))
12663 return block_may_fallthru (COND_EXPR_ELSE (stmt
));
12666 return block_may_fallthru (BIND_EXPR_BODY (stmt
));
12668 case TRY_CATCH_EXPR
:
12669 return try_catch_may_fallthru (stmt
);
12671 case TRY_FINALLY_EXPR
:
12672 /* The finally clause is always executed after the try clause,
12673 so if it does not fall through, then the try-finally will not
12674 fall through. Otherwise, if the try clause does not fall
12675 through, then when the finally clause falls through it will
12676 resume execution wherever the try clause was going. So the
12677 whole try-finally will only fall through if both the try
12678 clause and the finally clause fall through. */
12679 return (block_may_fallthru (TREE_OPERAND (stmt
, 0))
12680 && block_may_fallthru (TREE_OPERAND (stmt
, 1)));
12683 return block_may_fallthru (TREE_OPERAND (stmt
, 0));
12686 if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
)
12687 stmt
= TREE_OPERAND (stmt
, 1);
12693 /* Functions that do not return do not fall through. */
12694 return (call_expr_flags (stmt
) & ECF_NORETURN
) == 0;
12696 case CLEANUP_POINT_EXPR
:
12697 return block_may_fallthru (TREE_OPERAND (stmt
, 0));
12700 return block_may_fallthru (TREE_OPERAND (stmt
, 1));
12706 return lang_hooks
.block_may_fallthru (stmt
);
12710 /* True if we are using EH to handle cleanups. */
12711 static bool using_eh_for_cleanups_flag
= false;
12713 /* This routine is called from front ends to indicate eh should be used for
12716 using_eh_for_cleanups (void)
12718 using_eh_for_cleanups_flag
= true;
12721 /* Query whether EH is used for cleanups. */
12723 using_eh_for_cleanups_p (void)
12725 return using_eh_for_cleanups_flag
;
12728 /* Wrapper for tree_code_name to ensure that tree code is valid */
12730 get_tree_code_name (enum tree_code code
)
12732 const char *invalid
= "<invalid tree code>";
12734 /* The tree_code enum promotes to signed, but we could be getting
12735 invalid values, so force an unsigned comparison. */
12736 if (unsigned (code
) >= MAX_TREE_CODES
)
12738 if ((unsigned)code
== 0xa5a5)
12739 return "ggc_freed";
12743 return tree_code_name
[code
];
12746 /* Drops the TREE_OVERFLOW flag from T. */
12749 drop_tree_overflow (tree t
)
12751 gcc_checking_assert (TREE_OVERFLOW (t
));
12753 /* For tree codes with a sharing machinery re-build the result. */
12754 if (poly_int_tree_p (t
))
12755 return wide_int_to_tree (TREE_TYPE (t
), wi::to_poly_wide (t
));
12757 /* For VECTOR_CST, remove the overflow bits from the encoded elements
12758 and canonicalize the result. */
12759 if (TREE_CODE (t
) == VECTOR_CST
)
12761 tree_vector_builder builder
;
12762 builder
.new_unary_operation (TREE_TYPE (t
), t
, true);
12763 unsigned int count
= builder
.encoded_nelts ();
12764 for (unsigned int i
= 0; i
< count
; ++i
)
12766 tree elt
= VECTOR_CST_ELT (t
, i
);
12767 if (TREE_OVERFLOW (elt
))
12768 elt
= drop_tree_overflow (elt
);
12769 builder
.quick_push (elt
);
12771 return builder
.build ();
12774 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12775 and drop the flag. */
12777 TREE_OVERFLOW (t
) = 0;
12779 /* For constants that contain nested constants, drop the flag
12780 from those as well. */
12781 if (TREE_CODE (t
) == COMPLEX_CST
)
12783 if (TREE_OVERFLOW (TREE_REALPART (t
)))
12784 TREE_REALPART (t
) = drop_tree_overflow (TREE_REALPART (t
));
12785 if (TREE_OVERFLOW (TREE_IMAGPART (t
)))
12786 TREE_IMAGPART (t
) = drop_tree_overflow (TREE_IMAGPART (t
));
12792 /* Given a memory reference expression T, return its base address.
12793 The base address of a memory reference expression is the main
12794 object being referenced. For instance, the base address for
12795 'array[i].fld[j]' is 'array'. You can think of this as stripping
12796 away the offset part from a memory address.
12798 This function calls handled_component_p to strip away all the inner
12799 parts of the memory reference until it reaches the base object. */
12802 get_base_address (tree t
)
12804 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
12805 t
= TREE_OPERAND (t
, 0);
12806 while (handled_component_p (t
))
12807 t
= TREE_OPERAND (t
, 0);
12809 if ((TREE_CODE (t
) == MEM_REF
12810 || TREE_CODE (t
) == TARGET_MEM_REF
)
12811 && TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
)
12812 t
= TREE_OPERAND (TREE_OPERAND (t
, 0), 0);
12817 /* Return a tree of sizetype representing the size, in bytes, of the element
12818 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12821 array_ref_element_size (tree exp
)
12823 tree aligned_size
= TREE_OPERAND (exp
, 3);
12824 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
12825 location_t loc
= EXPR_LOCATION (exp
);
12827 /* If a size was specified in the ARRAY_REF, it's the size measured
12828 in alignment units of the element type. So multiply by that value. */
12831 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12832 sizetype from another type of the same width and signedness. */
12833 if (TREE_TYPE (aligned_size
) != sizetype
)
12834 aligned_size
= fold_convert_loc (loc
, sizetype
, aligned_size
);
12835 return size_binop_loc (loc
, MULT_EXPR
, aligned_size
,
12836 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
12839 /* Otherwise, take the size from that of the element type. Substitute
12840 any PLACEHOLDER_EXPR that we have. */
12842 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type
), exp
);
12845 /* Return a tree representing the lower bound of the array mentioned in
12846 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12849 array_ref_low_bound (tree exp
)
12851 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
12853 /* If a lower bound is specified in EXP, use it. */
12854 if (TREE_OPERAND (exp
, 2))
12855 return TREE_OPERAND (exp
, 2);
12857 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12858 substituting for a PLACEHOLDER_EXPR as needed. */
12859 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
12860 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type
), exp
);
12862 /* Otherwise, return a zero of the appropriate type. */
12863 tree idxtype
= TREE_TYPE (TREE_OPERAND (exp
, 1));
12864 return (idxtype
== error_mark_node
12865 ? integer_zero_node
: build_int_cst (idxtype
, 0));
12868 /* Return a tree representing the upper bound of the array mentioned in
12869 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12872 array_ref_up_bound (tree exp
)
12874 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
12876 /* If there is a domain type and it has an upper bound, use it, substituting
12877 for a PLACEHOLDER_EXPR as needed. */
12878 if (domain_type
&& TYPE_MAX_VALUE (domain_type
))
12879 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type
), exp
);
12881 /* Otherwise fail. */
12885 /* Returns true if REF is an array reference, a component reference,
12886 or a memory reference to an array whose actual size might be larger
12887 than its upper bound implies, there are multiple cases:
12888 A. a ref to a flexible array member at the end of a structure;
12889 B. a ref to an array with a different type against the original decl;
12892 short a[16] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 };
12893 (*((char(*)[16])&a[0]))[i+8]
12895 C. a ref to an array that was passed as a parameter;
12898 int test (uint8_t *p, uint32_t t[1][1], int n) {
12899 for (int i = 0; i < 4; i++, p++)
12902 If non-null, set IS_TRAILING_ARRAY to true if the ref is the above case A.
12906 array_ref_flexible_size_p (tree ref
, bool *is_trailing_array
/* = NULL */)
12908 /* The TYPE for this array referece. */
12909 tree atype
= NULL_TREE
;
12910 /* The FIELD_DECL for the array field in the containing structure. */
12911 tree afield_decl
= NULL_TREE
;
12912 /* Whether this array is the trailing array of a structure. */
12913 bool is_trailing_array_tmp
= false;
12914 if (!is_trailing_array
)
12915 is_trailing_array
= &is_trailing_array_tmp
;
12917 if (TREE_CODE (ref
) == ARRAY_REF
12918 || TREE_CODE (ref
) == ARRAY_RANGE_REF
)
12920 atype
= TREE_TYPE (TREE_OPERAND (ref
, 0));
12921 ref
= TREE_OPERAND (ref
, 0);
12923 else if (TREE_CODE (ref
) == COMPONENT_REF
12924 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref
, 1))) == ARRAY_TYPE
)
12926 atype
= TREE_TYPE (TREE_OPERAND (ref
, 1));
12927 afield_decl
= TREE_OPERAND (ref
, 1);
12929 else if (TREE_CODE (ref
) == MEM_REF
)
12931 tree arg
= TREE_OPERAND (ref
, 0);
12932 if (TREE_CODE (arg
) == ADDR_EXPR
)
12933 arg
= TREE_OPERAND (arg
, 0);
12934 tree argtype
= TREE_TYPE (arg
);
12935 if (TREE_CODE (argtype
) == RECORD_TYPE
)
12937 if (tree fld
= last_field (argtype
))
12939 atype
= TREE_TYPE (fld
);
12941 if (TREE_CODE (atype
) != ARRAY_TYPE
)
12943 if (VAR_P (arg
) && DECL_SIZE (fld
))
12955 if (TREE_CODE (ref
) == STRING_CST
)
12958 tree ref_to_array
= ref
;
12959 while (handled_component_p (ref
))
12961 /* If the reference chain contains a component reference to a
12962 non-union type and there follows another field the reference
12963 is not at the end of a structure. */
12964 if (TREE_CODE (ref
) == COMPONENT_REF
)
12966 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref
, 0))) == RECORD_TYPE
)
12968 tree nextf
= DECL_CHAIN (TREE_OPERAND (ref
, 1));
12969 while (nextf
&& TREE_CODE (nextf
) != FIELD_DECL
)
12970 nextf
= DECL_CHAIN (nextf
);
12975 /* If we have a multi-dimensional array we do not consider
12976 a non-innermost dimension as flex array if the whole
12977 multi-dimensional array is at struct end.
12978 Same for an array of aggregates with a trailing array
12980 else if (TREE_CODE (ref
) == ARRAY_REF
)
12982 else if (TREE_CODE (ref
) == ARRAY_RANGE_REF
)
12984 /* If we view an underlying object as sth else then what we
12985 gathered up to now is what we have to rely on. */
12986 else if (TREE_CODE (ref
) == VIEW_CONVERT_EXPR
)
12989 gcc_unreachable ();
12991 ref
= TREE_OPERAND (ref
, 0);
12994 gcc_assert (!afield_decl
12995 || (afield_decl
&& TREE_CODE (afield_decl
) == FIELD_DECL
));
12997 /* The array now is at struct end. Treat flexible array member as
12998 always subject to extend, even into just padding constrained by
12999 an underlying decl. */
13000 if (! TYPE_SIZE (atype
)
13001 || ! TYPE_DOMAIN (atype
)
13002 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype
)))
13004 *is_trailing_array
= afield_decl
&& TREE_CODE (afield_decl
) == FIELD_DECL
;
13005 return afield_decl
? !DECL_NOT_FLEXARRAY (afield_decl
) : true;
13008 /* If the reference is based on a declared entity, the size of the array
13009 is constrained by its given domain. (Do not trust commons PR/69368). */
13010 ref
= get_base_address (ref
);
13013 && !(flag_unconstrained_commons
13014 && VAR_P (ref
) && DECL_COMMON (ref
))
13015 && DECL_SIZE_UNIT (ref
)
13016 && TREE_CODE (DECL_SIZE_UNIT (ref
)) == INTEGER_CST
)
13018 /* If the object itself is the array it is not at struct end. */
13019 if (DECL_P (ref_to_array
))
13022 /* Check whether the array domain covers all of the available
13025 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype
))) != INTEGER_CST
13026 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype
))) != INTEGER_CST
13027 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype
))) != INTEGER_CST
)
13030 = afield_decl
&& TREE_CODE (afield_decl
) == FIELD_DECL
;
13031 return afield_decl
? !DECL_NOT_FLEXARRAY (afield_decl
) : true;
13033 if (! get_addr_base_and_unit_offset (ref_to_array
, &offset
))
13036 = afield_decl
&& TREE_CODE (afield_decl
) == FIELD_DECL
;
13037 return afield_decl
? !DECL_NOT_FLEXARRAY (afield_decl
) : true;
13040 /* If at least one extra element fits it is a flexarray. */
13041 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype
)))
13042 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype
)))
13044 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype
))),
13045 wi::to_offset (DECL_SIZE_UNIT (ref
)) - offset
))
13048 = afield_decl
&& TREE_CODE (afield_decl
) == FIELD_DECL
;
13049 return afield_decl
? !DECL_NOT_FLEXARRAY (afield_decl
) : true;
13055 *is_trailing_array
= afield_decl
&& TREE_CODE (afield_decl
) == FIELD_DECL
;
13056 return afield_decl
? !DECL_NOT_FLEXARRAY (afield_decl
) : true;
13060 /* Return a tree representing the offset, in bytes, of the field referenced
13061 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
13064 component_ref_field_offset (tree exp
)
13066 tree aligned_offset
= TREE_OPERAND (exp
, 2);
13067 tree field
= TREE_OPERAND (exp
, 1);
13068 location_t loc
= EXPR_LOCATION (exp
);
13070 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
13071 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
13073 if (aligned_offset
)
13075 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13076 sizetype from another type of the same width and signedness. */
13077 if (TREE_TYPE (aligned_offset
) != sizetype
)
13078 aligned_offset
= fold_convert_loc (loc
, sizetype
, aligned_offset
);
13079 return size_binop_loc (loc
, MULT_EXPR
, aligned_offset
,
13080 size_int (DECL_OFFSET_ALIGN (field
)
13084 /* Otherwise, take the offset from that of the field. Substitute
13085 any PLACEHOLDER_EXPR that we have. */
13087 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field
), exp
);
13090 /* Given the initializer INIT, return the initializer for the field
13091 DECL if it exists, otherwise null. Used to obtain the initializer
13092 for a flexible array member and determine its size. */
13095 get_initializer_for (tree init
, tree decl
)
13099 tree fld
, fld_init
;
13100 unsigned HOST_WIDE_INT i
;
13101 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), i
, fld
, fld_init
)
13106 if (TREE_CODE (fld
) == CONSTRUCTOR
)
13108 fld_init
= get_initializer_for (fld_init
, decl
);
13117 /* Determines the special array member type for the array reference REF. */
13118 special_array_member
13119 component_ref_sam_type (tree ref
)
13121 special_array_member sam_type
= special_array_member::none
;
13123 tree member
= TREE_OPERAND (ref
, 1);
13124 tree memsize
= DECL_SIZE_UNIT (member
);
13127 tree memtype
= TREE_TYPE (member
);
13128 if (TREE_CODE (memtype
) != ARRAY_TYPE
)
13131 bool trailing
= false;
13132 (void) array_ref_flexible_size_p (ref
, &trailing
);
13133 bool zero_elts
= integer_zerop (memsize
);
13134 if (zero_elts
&& integer_zerop (TYPE_SIZE_UNIT (TREE_TYPE (memtype
))))
13136 /* If array element has zero size, verify if it is a flexible
13137 array member or zero length array. Clear zero_elts if
13138 it has one or more members or is a VLA member. */
13139 if (tree dom
= TYPE_DOMAIN (memtype
))
13140 if (tree min
= TYPE_MIN_VALUE (dom
))
13141 if (tree max
= TYPE_MAX_VALUE (dom
))
13142 if (TREE_CODE (min
) != INTEGER_CST
13143 || TREE_CODE (max
) != INTEGER_CST
13144 || !((integer_zerop (min
) && integer_all_onesp (max
))
13145 || tree_int_cst_lt (max
, min
)))
13148 if (!trailing
&& !zero_elts
)
13149 /* MEMBER is an interior array with more than one element. */
13150 return special_array_member::int_n
;
13155 return special_array_member::trail_0
;
13157 return special_array_member::int_0
;
13161 if (tree dom
= TYPE_DOMAIN (memtype
))
13162 if (tree min
= TYPE_MIN_VALUE (dom
))
13163 if (tree max
= TYPE_MAX_VALUE (dom
))
13164 if (TREE_CODE (min
) == INTEGER_CST
13165 && TREE_CODE (max
) == INTEGER_CST
)
13167 offset_int minidx
= wi::to_offset (min
);
13168 offset_int maxidx
= wi::to_offset (max
);
13169 offset_int neltsm1
= maxidx
- minidx
;
13171 /* MEMBER is a trailing array with more than
13173 return special_array_member::trail_n
;
13176 return special_array_member::trail_1
;
13183 /* Determines the size of the member referenced by the COMPONENT_REF
13184 REF, using its initializer expression if necessary in order to
13185 determine the size of an initialized flexible array member.
13186 If non-null, set *SAM to the type of special array member.
13187 Returns the size as sizetype (which might be zero for an object
13188 with an uninitialized flexible array member) or null if the size
13189 cannot be determined. */
13192 component_ref_size (tree ref
, special_array_member
*sam
/* = NULL */)
13194 gcc_assert (TREE_CODE (ref
) == COMPONENT_REF
);
13196 special_array_member sambuf
;
13199 *sam
= component_ref_sam_type (ref
);
13201 /* The object/argument referenced by the COMPONENT_REF and its type. */
13202 tree arg
= TREE_OPERAND (ref
, 0);
13203 tree argtype
= TREE_TYPE (arg
);
13204 /* The referenced member. */
13205 tree member
= TREE_OPERAND (ref
, 1);
13207 tree memsize
= DECL_SIZE_UNIT (member
);
13210 tree memtype
= TREE_TYPE (member
);
13211 if (TREE_CODE (memtype
) != ARRAY_TYPE
)
13212 /* DECL_SIZE may be less than TYPE_SIZE in C++ when referring
13213 to the type of a class with a virtual base which doesn't
13214 reflect the size of the virtual's members (see pr97595).
13215 If that's the case fail for now and implement something
13216 more robust in the future. */
13217 return (tree_int_cst_equal (memsize
, TYPE_SIZE_UNIT (memtype
))
13218 ? memsize
: NULL_TREE
);
13220 /* 2-or-more elements arrays are treated as normal arrays by default. */
13221 if (*sam
== special_array_member::int_n
13222 || *sam
== special_array_member::trail_n
)
13225 tree afield_decl
= TREE_OPERAND (ref
, 1);
13226 gcc_assert (TREE_CODE (afield_decl
) == FIELD_DECL
);
13227 /* If the trailing array is a not a flexible array member, treat it as
13229 if (DECL_NOT_FLEXARRAY (afield_decl
)
13230 && *sam
!= special_array_member::int_0
)
13233 if (*sam
== special_array_member::int_0
)
13234 memsize
= NULL_TREE
;
13236 /* For a reference to a flexible array member of a union
13237 use the size of the union instead of the size of the member. */
13238 if (TREE_CODE (argtype
) == UNION_TYPE
)
13239 memsize
= TYPE_SIZE_UNIT (argtype
);
13242 /* MEMBER is either a bona fide flexible array member, or a zero-elements
13243 array member, or an array of length one treated as such. */
13245 /* If the reference is to a declared object and the member a true
13246 flexible array, try to determine its size from its initializer. */
13247 poly_int64 baseoff
= 0;
13248 tree base
= get_addr_base_and_unit_offset (ref
, &baseoff
);
13249 if (!base
|| !VAR_P (base
))
13251 if (*sam
!= special_array_member::int_0
)
13254 if (TREE_CODE (arg
) != COMPONENT_REF
)
13258 while (TREE_CODE (base
) == COMPONENT_REF
)
13259 base
= TREE_OPERAND (base
, 0);
13260 baseoff
= tree_to_poly_int64 (byte_position (TREE_OPERAND (ref
, 1)));
13263 /* BASE is the declared object of which MEMBER is either a member
13264 or that is cast to ARGTYPE (e.g., a char buffer used to store
13265 an ARGTYPE object). */
13266 tree basetype
= TREE_TYPE (base
);
13268 /* Determine the base type of the referenced object. If it's
13269 the same as ARGTYPE and MEMBER has a known size, return it. */
13270 tree bt
= basetype
;
13271 if (*sam
!= special_array_member::int_0
)
13272 while (TREE_CODE (bt
) == ARRAY_TYPE
)
13273 bt
= TREE_TYPE (bt
);
13274 bool typematch
= useless_type_conversion_p (argtype
, bt
);
13275 if (memsize
&& typematch
)
13278 memsize
= NULL_TREE
;
13281 /* MEMBER is a true flexible array member. Compute its size from
13282 the initializer of the BASE object if it has one. */
13283 if (tree init
= DECL_P (base
) ? DECL_INITIAL (base
) : NULL_TREE
)
13284 if (init
!= error_mark_node
)
13286 init
= get_initializer_for (init
, member
);
13289 memsize
= TYPE_SIZE_UNIT (TREE_TYPE (init
));
13290 if (tree refsize
= TYPE_SIZE_UNIT (argtype
))
13292 /* Use the larger of the initializer size and the tail
13293 padding in the enclosing struct. */
13294 poly_int64 rsz
= tree_to_poly_int64 (refsize
);
13296 if (known_lt (tree_to_poly_int64 (memsize
), rsz
))
13297 memsize
= wide_int_to_tree (TREE_TYPE (memsize
), rsz
);
13309 && DECL_EXTERNAL (base
)
13311 && *sam
!= special_array_member::int_0
)
13312 /* The size of a flexible array member of an extern struct
13313 with no initializer cannot be determined (it's defined
13314 in another translation unit and can have an initializer
13315 with an arbitrary number of elements). */
13318 /* Use the size of the base struct or, for interior zero-length
13319 arrays, the size of the enclosing type. */
13320 memsize
= TYPE_SIZE_UNIT (bt
);
13322 else if (DECL_P (base
))
13323 /* Use the size of the BASE object (possibly an array of some
13324 other type such as char used to store the struct). */
13325 memsize
= DECL_SIZE_UNIT (base
);
13330 /* If the flexible array member has a known size use the greater
13331 of it and the tail padding in the enclosing struct.
13332 Otherwise, when the size of the flexible array member is unknown
13333 and the referenced object is not a struct, use the size of its
13334 type when known. This detects sizes of array buffers when cast
13335 to struct types with flexible array members. */
13338 if (!tree_fits_poly_int64_p (memsize
))
13340 poly_int64 memsz64
= memsize
? tree_to_poly_int64 (memsize
) : 0;
13341 if (known_lt (baseoff
, memsz64
))
13343 memsz64
-= baseoff
;
13344 return wide_int_to_tree (TREE_TYPE (memsize
), memsz64
);
13346 return size_zero_node
;
13349 /* Return "don't know" for an external non-array object since its
13350 flexible array member can be initialized to have any number of
13351 elements. Otherwise, return zero because the flexible array
13352 member has no elements. */
13353 return (DECL_P (base
)
13354 && DECL_EXTERNAL (base
)
13356 || TREE_CODE (basetype
) != ARRAY_TYPE
)
13357 ? NULL_TREE
: size_zero_node
);
13360 /* Return the machine mode of T. For vectors, returns the mode of the
13361 inner type. The main use case is to feed the result to HONOR_NANS,
13362 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
13365 element_mode (const_tree t
)
13369 if (VECTOR_TYPE_P (t
) || TREE_CODE (t
) == COMPLEX_TYPE
)
13371 return TYPE_MODE (t
);
13374 /* Vector types need to re-check the target flags each time we report
13375 the machine mode. We need to do this because attribute target can
13376 change the result of vector_mode_supported_p and have_regs_of_mode
13377 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
13378 change on a per-function basis. */
13379 /* ??? Possibly a better solution is to run through all the types
13380 referenced by a function and re-compute the TYPE_MODE once, rather
13381 than make the TYPE_MODE macro call a function. */
13384 vector_type_mode (const_tree t
)
13388 gcc_assert (TREE_CODE (t
) == VECTOR_TYPE
);
13390 mode
= t
->type_common
.mode
;
13391 if (VECTOR_MODE_P (mode
)
13392 && (!targetm
.vector_mode_supported_p (mode
)
13393 || !have_regs_of_mode
[mode
]))
13395 scalar_int_mode innermode
;
13397 /* For integers, try mapping it to a same-sized scalar mode. */
13398 if (is_int_mode (TREE_TYPE (t
)->type_common
.mode
, &innermode
))
13400 poly_int64 size
= (TYPE_VECTOR_SUBPARTS (t
)
13401 * GET_MODE_BITSIZE (innermode
));
13402 scalar_int_mode mode
;
13403 if (int_mode_for_size (size
, 0).exists (&mode
)
13404 && have_regs_of_mode
[mode
])
13414 /* Return the size in bits of each element of vector type TYPE. */
13417 vector_element_bits (const_tree type
)
13419 gcc_checking_assert (VECTOR_TYPE_P (type
));
13420 if (VECTOR_BOOLEAN_TYPE_P (type
))
13421 return TYPE_PRECISION (TREE_TYPE (type
));
13422 return tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type
)));
13425 /* Calculate the size in bits of each element of vector type TYPE
13426 and return the result as a tree of type bitsizetype. */
13429 vector_element_bits_tree (const_tree type
)
13431 gcc_checking_assert (VECTOR_TYPE_P (type
));
13432 if (VECTOR_BOOLEAN_TYPE_P (type
))
13433 return bitsize_int (vector_element_bits (type
));
13434 return TYPE_SIZE (TREE_TYPE (type
));
13437 /* Verify that basic properties of T match TV and thus T can be a variant of
13438 TV. TV should be the more specified variant (i.e. the main variant). */
13441 verify_type_variant (const_tree t
, tree tv
)
13443 /* Type variant can differ by:
13445 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13446 ENCODE_QUAL_ADDR_SPACE.
13447 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13448 in this case some values may not be set in the variant types
13449 (see TYPE_COMPLETE_P checks).
13450 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13451 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13452 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13453 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13454 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13455 this is necessary to make it possible to merge types form different TUs
13456 - arrays, pointers and references may have TREE_TYPE that is a variant
13457 of TREE_TYPE of their main variants.
13458 - aggregates may have new TYPE_FIELDS list that list variants of
13459 the main variant TYPE_FIELDS.
13460 - vector types may differ by TYPE_VECTOR_OPAQUE
13463 /* Convenience macro for matching individual fields. */
13464 #define verify_variant_match(flag) \
13466 if (flag (tv) != flag (t)) \
13468 error ("type variant differs by %s", #flag); \
13474 /* tree_base checks. */
13476 verify_variant_match (TREE_CODE
);
13477 /* FIXME: Ada builds non-artificial variants of artificial types. */
13479 if (TYPE_ARTIFICIAL (tv
))
13480 verify_variant_match (TYPE_ARTIFICIAL
);
13482 if (POINTER_TYPE_P (tv
))
13483 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL
);
13484 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13485 verify_variant_match (TYPE_UNSIGNED
);
13486 verify_variant_match (TYPE_PACKED
);
13487 if (TREE_CODE (t
) == REFERENCE_TYPE
)
13488 verify_variant_match (TYPE_REF_IS_RVALUE
);
13489 if (AGGREGATE_TYPE_P (t
))
13490 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER
);
13492 verify_variant_match (TYPE_SATURATING
);
13493 /* FIXME: This check trigger during libstdc++ build. */
13495 if (RECORD_OR_UNION_TYPE_P (t
) && COMPLETE_TYPE_P (t
))
13496 verify_variant_match (TYPE_FINAL_P
);
13499 /* tree_type_common checks. */
13501 if (COMPLETE_TYPE_P (t
))
13503 verify_variant_match (TYPE_MODE
);
13504 if (TREE_CODE (TYPE_SIZE (t
)) != PLACEHOLDER_EXPR
13505 && TREE_CODE (TYPE_SIZE (tv
)) != PLACEHOLDER_EXPR
)
13506 verify_variant_match (TYPE_SIZE
);
13507 if (TREE_CODE (TYPE_SIZE_UNIT (t
)) != PLACEHOLDER_EXPR
13508 && TREE_CODE (TYPE_SIZE_UNIT (tv
)) != PLACEHOLDER_EXPR
13509 && TYPE_SIZE_UNIT (t
) != TYPE_SIZE_UNIT (tv
))
13511 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t
),
13512 TYPE_SIZE_UNIT (tv
), 0));
13513 error ("type variant has different %<TYPE_SIZE_UNIT%>");
13515 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
13516 debug_tree (TYPE_SIZE_UNIT (tv
));
13517 error ("type%'s %<TYPE_SIZE_UNIT%>");
13518 debug_tree (TYPE_SIZE_UNIT (t
));
13521 verify_variant_match (TYPE_NEEDS_CONSTRUCTING
);
13523 verify_variant_match (TYPE_PRECISION_RAW
);
13524 if (RECORD_OR_UNION_TYPE_P (t
))
13525 verify_variant_match (TYPE_TRANSPARENT_AGGR
);
13526 else if (TREE_CODE (t
) == ARRAY_TYPE
)
13527 verify_variant_match (TYPE_NONALIASED_COMPONENT
);
13528 /* During LTO we merge variant lists from diferent translation units
13529 that may differ BY TYPE_CONTEXT that in turn may point
13530 to TRANSLATION_UNIT_DECL.
13531 Ada also builds variants of types with different TYPE_CONTEXT. */
13533 if (!in_lto_p
|| !TYPE_FILE_SCOPE_P (t
))
13534 verify_variant_match (TYPE_CONTEXT
);
13536 if (TREE_CODE (t
) == ARRAY_TYPE
|| TREE_CODE (t
) == INTEGER_TYPE
)
13537 verify_variant_match (TYPE_STRING_FLAG
);
13538 if (TREE_CODE (t
) == RECORD_TYPE
|| TREE_CODE (t
) == UNION_TYPE
)
13539 verify_variant_match (TYPE_CXX_ODR_P
);
13540 if (TYPE_ALIAS_SET_KNOWN_P (t
))
13542 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13547 /* tree_type_non_common checks. */
13549 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13550 and dangle the pointer from time to time. */
13551 if (RECORD_OR_UNION_TYPE_P (t
) && TYPE_VFIELD (t
) != TYPE_VFIELD (tv
)
13552 && (in_lto_p
|| !TYPE_VFIELD (tv
)
13553 || TREE_CODE (TYPE_VFIELD (tv
)) != TREE_LIST
))
13555 error ("type variant has different %<TYPE_VFIELD%>");
13559 if ((TREE_CODE (t
) == ENUMERAL_TYPE
&& COMPLETE_TYPE_P (t
))
13560 || TREE_CODE (t
) == INTEGER_TYPE
13561 || TREE_CODE (t
) == BOOLEAN_TYPE
13562 || TREE_CODE (t
) == BITINT_TYPE
13563 || SCALAR_FLOAT_TYPE_P (t
)
13564 || FIXED_POINT_TYPE_P (t
))
13566 verify_variant_match (TYPE_MAX_VALUE
);
13567 verify_variant_match (TYPE_MIN_VALUE
);
13569 if (TREE_CODE (t
) == METHOD_TYPE
)
13570 verify_variant_match (TYPE_METHOD_BASETYPE
);
13571 if (TREE_CODE (t
) == OFFSET_TYPE
)
13572 verify_variant_match (TYPE_OFFSET_BASETYPE
);
13573 if (TREE_CODE (t
) == ARRAY_TYPE
)
13574 verify_variant_match (TYPE_ARRAY_MAX_SIZE
);
13575 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13576 or even type's main variant. This is needed to make bootstrap pass
13577 and the bug seems new in GCC 5.
13578 C++ FE should be updated to make this consistent and we should check
13579 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13580 is a match with main variant.
13582 Also disable the check for Java for now because of parser hack that builds
13583 first an dummy BINFO and then sometimes replace it by real BINFO in some
13585 if (RECORD_OR_UNION_TYPE_P (t
) && TYPE_BINFO (t
) && TYPE_BINFO (tv
)
13586 && TYPE_BINFO (t
) != TYPE_BINFO (tv
)
13587 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13588 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13589 at LTO time only. */
13590 && (in_lto_p
&& odr_type_p (t
)))
13592 error ("type variant has different %<TYPE_BINFO%>");
13594 error ("type variant%'s %<TYPE_BINFO%>");
13595 debug_tree (TYPE_BINFO (tv
));
13596 error ("type%'s %<TYPE_BINFO%>");
13597 debug_tree (TYPE_BINFO (t
));
13601 /* Check various uses of TYPE_VALUES_RAW. */
13602 if (TREE_CODE (t
) == ENUMERAL_TYPE
13603 && TYPE_VALUES (t
))
13604 verify_variant_match (TYPE_VALUES
);
13605 else if (TREE_CODE (t
) == ARRAY_TYPE
)
13606 verify_variant_match (TYPE_DOMAIN
);
13607 /* Permit incomplete variants of complete type. While FEs may complete
13608 all variants, this does not happen for C++ templates in all cases. */
13609 else if (RECORD_OR_UNION_TYPE_P (t
)
13610 && COMPLETE_TYPE_P (t
)
13611 && TYPE_FIELDS (t
) != TYPE_FIELDS (tv
))
13615 /* Fortran builds qualified variants as new records with items of
13616 qualified type. Verify that they looks same. */
13617 for (f1
= TYPE_FIELDS (t
), f2
= TYPE_FIELDS (tv
);
13619 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
13620 if (TREE_CODE (f1
) != FIELD_DECL
|| TREE_CODE (f2
) != FIELD_DECL
13621 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1
))
13622 != TYPE_MAIN_VARIANT (TREE_TYPE (f2
))
13623 /* FIXME: gfc_nonrestricted_type builds all types as variants
13624 with exception of pointer types. It deeply copies the type
13625 which means that we may end up with a variant type
13626 referring non-variant pointer. We may change it to
13627 produce types as variants, too, like
13628 objc_get_protocol_qualified_type does. */
13629 && !POINTER_TYPE_P (TREE_TYPE (f1
)))
13630 || DECL_FIELD_OFFSET (f1
) != DECL_FIELD_OFFSET (f2
)
13631 || DECL_FIELD_BIT_OFFSET (f1
) != DECL_FIELD_BIT_OFFSET (f2
))
13635 error ("type variant has different %<TYPE_FIELDS%>");
13637 error ("first mismatch is field");
13639 error ("and field");
13644 else if (FUNC_OR_METHOD_TYPE_P (t
))
13645 verify_variant_match (TYPE_ARG_TYPES
);
13646 /* For C++ the qualified variant of array type is really an array type
13647 of qualified TREE_TYPE.
13648 objc builds variants of pointer where pointer to type is a variant, too
13649 in objc_get_protocol_qualified_type. */
13650 if (TREE_TYPE (t
) != TREE_TYPE (tv
)
13651 && ((TREE_CODE (t
) != ARRAY_TYPE
13652 && !POINTER_TYPE_P (t
))
13653 || TYPE_MAIN_VARIANT (TREE_TYPE (t
))
13654 != TYPE_MAIN_VARIANT (TREE_TYPE (tv
))))
13656 error ("type variant has different %<TREE_TYPE%>");
13658 error ("type variant%'s %<TREE_TYPE%>");
13659 debug_tree (TREE_TYPE (tv
));
13660 error ("type%'s %<TREE_TYPE%>");
13661 debug_tree (TREE_TYPE (t
));
13664 if (type_with_alias_set_p (t
)
13665 && !gimple_canonical_types_compatible_p (t
, tv
, false))
13667 error ("type is not compatible with its variant");
13669 error ("type variant%'s %<TREE_TYPE%>");
13670 debug_tree (TREE_TYPE (tv
));
13671 error ("type%'s %<TREE_TYPE%>");
13672 debug_tree (TREE_TYPE (t
));
13676 #undef verify_variant_match
13680 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13681 the middle-end types_compatible_p function. It needs to avoid
13682 claiming types are different for types that should be treated
13683 the same with respect to TBAA. Canonical types are also used
13684 for IL consistency checks via the useless_type_conversion_p
13685 predicate which does not handle all type kinds itself but falls
13686 back to pointer-comparison of TYPE_CANONICAL for aggregates
13689 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13690 type calculation because we need to allow inter-operability between signed
13691 and unsigned variants. */
13694 type_with_interoperable_signedness (const_tree type
)
13696 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13697 signed char and unsigned char. Similarly fortran FE builds
13698 C_SIZE_T as signed type, while C defines it unsigned. */
13700 return tree_code_for_canonical_type_merging (TREE_CODE (type
))
13702 && (TYPE_PRECISION (type
) == TYPE_PRECISION (signed_char_type_node
)
13703 || TYPE_PRECISION (type
) == TYPE_PRECISION (size_type_node
));
13706 /* Return true iff T1 and T2 are structurally identical for what
13708 This function is used both by lto.cc canonical type merging and by the
13709 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13710 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13711 only for LTO because only in these cases TYPE_CANONICAL equivalence
13712 correspond to one defined by gimple_canonical_types_compatible_p. */
13715 gimple_canonical_types_compatible_p (const_tree t1
, const_tree t2
,
13716 bool trust_type_canonical
)
13718 /* Type variants should be same as the main variant. When not doing sanity
13719 checking to verify this fact, go to main variants and save some work. */
13720 if (trust_type_canonical
)
13722 t1
= TYPE_MAIN_VARIANT (t1
);
13723 t2
= TYPE_MAIN_VARIANT (t2
);
13726 /* Check first for the obvious case of pointer identity. */
13730 /* Check that we have two types to compare. */
13731 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
13734 /* We consider complete types always compatible with incomplete type.
13735 This does not make sense for canonical type calculation and thus we
13736 need to ensure that we are never called on it.
13738 FIXME: For more correctness the function probably should have three modes
13739 1) mode assuming that types are complete mathcing their structure
13740 2) mode allowing incomplete types but producing equivalence classes
13741 and thus ignoring all info from complete types
13742 3) mode allowing incomplete types to match complete but checking
13743 compatibility between complete types.
13745 1 and 2 can be used for canonical type calculation. 3 is the real
13746 definition of type compatibility that can be used i.e. for warnings during
13747 declaration merging. */
13749 gcc_assert (!trust_type_canonical
13750 || (type_with_alias_set_p (t1
) && type_with_alias_set_p (t2
)));
13752 /* If the types have been previously registered and found equal
13755 if (TYPE_CANONICAL (t1
) && TYPE_CANONICAL (t2
)
13756 && trust_type_canonical
)
13758 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13759 they are always NULL, but they are set to non-NULL for types
13760 constructed by build_pointer_type and variants. In this case the
13761 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13762 all pointers are considered equal. Be sure to not return false
13764 gcc_checking_assert (canonical_type_used_p (t1
)
13765 && canonical_type_used_p (t2
));
13766 return TYPE_CANONICAL (t1
) == TYPE_CANONICAL (t2
);
13769 /* For types where we do ODR based TBAA the canonical type is always
13770 set correctly, so we know that types are different if their
13771 canonical types does not match. */
13772 if (trust_type_canonical
13773 && (odr_type_p (t1
) && odr_based_tbaa_p (t1
))
13774 != (odr_type_p (t2
) && odr_based_tbaa_p (t2
)))
13777 /* Can't be the same type if the types don't have the same code. */
13778 enum tree_code code
= tree_code_for_canonical_type_merging (TREE_CODE (t1
));
13779 if (code
!= tree_code_for_canonical_type_merging (TREE_CODE (t2
)))
13782 /* Qualifiers do not matter for canonical type comparison purposes. */
13784 /* Void types and nullptr types are always the same. */
13785 if (VOID_TYPE_P (t1
)
13786 || TREE_CODE (t1
) == NULLPTR_TYPE
)
13789 /* Can't be the same type if they have different mode. */
13790 if (TYPE_MODE (t1
) != TYPE_MODE (t2
))
13793 /* Non-aggregate types can be handled cheaply. */
13794 if (INTEGRAL_TYPE_P (t1
)
13795 || SCALAR_FLOAT_TYPE_P (t1
)
13796 || FIXED_POINT_TYPE_P (t1
)
13797 || VECTOR_TYPE_P (t1
)
13798 || TREE_CODE (t1
) == COMPLEX_TYPE
13799 || TREE_CODE (t1
) == OFFSET_TYPE
13800 || POINTER_TYPE_P (t1
))
13802 /* Can't be the same type if they have different precision. */
13803 if (TYPE_PRECISION_RAW (t1
) != TYPE_PRECISION_RAW (t2
))
13806 /* In some cases the signed and unsigned types are required to be
13808 if (TYPE_UNSIGNED (t1
) != TYPE_UNSIGNED (t2
)
13809 && !type_with_interoperable_signedness (t1
))
13812 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13813 interoperable with "signed char". Unless all frontends are revisited
13814 to agree on these types, we must ignore the flag completely. */
13816 /* Fortran standard define C_PTR type that is compatible with every
13817 C pointer. For this reason we need to glob all pointers into one.
13818 Still pointers in different address spaces are not compatible. */
13819 if (POINTER_TYPE_P (t1
))
13821 if (TYPE_ADDR_SPACE (TREE_TYPE (t1
))
13822 != TYPE_ADDR_SPACE (TREE_TYPE (t2
)))
13826 /* Tail-recurse to components. */
13827 if (VECTOR_TYPE_P (t1
)
13828 || TREE_CODE (t1
) == COMPLEX_TYPE
)
13829 return gimple_canonical_types_compatible_p (TREE_TYPE (t1
),
13831 trust_type_canonical
);
13836 /* Do type-specific comparisons. */
13837 switch (TREE_CODE (t1
))
13840 /* Array types are the same if the element types are the same and
13841 the number of elements are the same. */
13842 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
),
13843 trust_type_canonical
)
13844 || TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
)
13845 || TYPE_REVERSE_STORAGE_ORDER (t1
) != TYPE_REVERSE_STORAGE_ORDER (t2
)
13846 || TYPE_NONALIASED_COMPONENT (t1
) != TYPE_NONALIASED_COMPONENT (t2
))
13850 tree i1
= TYPE_DOMAIN (t1
);
13851 tree i2
= TYPE_DOMAIN (t2
);
13853 /* For an incomplete external array, the type domain can be
13854 NULL_TREE. Check this condition also. */
13855 if (i1
== NULL_TREE
&& i2
== NULL_TREE
)
13857 else if (i1
== NULL_TREE
|| i2
== NULL_TREE
)
13861 tree min1
= TYPE_MIN_VALUE (i1
);
13862 tree min2
= TYPE_MIN_VALUE (i2
);
13863 tree max1
= TYPE_MAX_VALUE (i1
);
13864 tree max2
= TYPE_MAX_VALUE (i2
);
13866 /* The minimum/maximum values have to be the same. */
13869 && ((TREE_CODE (min1
) == PLACEHOLDER_EXPR
13870 && TREE_CODE (min2
) == PLACEHOLDER_EXPR
)
13871 || operand_equal_p (min1
, min2
, 0))))
13874 && ((TREE_CODE (max1
) == PLACEHOLDER_EXPR
13875 && TREE_CODE (max2
) == PLACEHOLDER_EXPR
)
13876 || operand_equal_p (max1
, max2
, 0)))))
13884 case FUNCTION_TYPE
:
13885 /* Function types are the same if the return type and arguments types
13887 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
),
13888 trust_type_canonical
))
13891 if (TYPE_ARG_TYPES (t1
) == TYPE_ARG_TYPES (t2
)
13892 && (TYPE_NO_NAMED_ARGS_STDARG_P (t1
)
13893 == TYPE_NO_NAMED_ARGS_STDARG_P (t2
)))
13897 tree parms1
, parms2
;
13899 for (parms1
= TYPE_ARG_TYPES (t1
), parms2
= TYPE_ARG_TYPES (t2
);
13901 parms1
= TREE_CHAIN (parms1
), parms2
= TREE_CHAIN (parms2
))
13903 if (!gimple_canonical_types_compatible_p
13904 (TREE_VALUE (parms1
), TREE_VALUE (parms2
),
13905 trust_type_canonical
))
13909 if (parms1
|| parms2
)
13917 case QUAL_UNION_TYPE
:
13921 /* Don't try to compare variants of an incomplete type, before
13922 TYPE_FIELDS has been copied around. */
13923 if (!COMPLETE_TYPE_P (t1
) && !COMPLETE_TYPE_P (t2
))
13927 if (TYPE_REVERSE_STORAGE_ORDER (t1
) != TYPE_REVERSE_STORAGE_ORDER (t2
))
13930 /* For aggregate types, all the fields must be the same. */
13931 for (f1
= TYPE_FIELDS (t1
), f2
= TYPE_FIELDS (t2
);
13933 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
13935 /* Skip non-fields and zero-sized fields. */
13936 while (f1
&& (TREE_CODE (f1
) != FIELD_DECL
13938 && integer_zerop (DECL_SIZE (f1
)))))
13939 f1
= TREE_CHAIN (f1
);
13940 while (f2
&& (TREE_CODE (f2
) != FIELD_DECL
13942 && integer_zerop (DECL_SIZE (f2
)))))
13943 f2
= TREE_CHAIN (f2
);
13946 /* The fields must have the same name, offset and type. */
13947 if (DECL_NONADDRESSABLE_P (f1
) != DECL_NONADDRESSABLE_P (f2
)
13948 || !gimple_compare_field_offset (f1
, f2
)
13949 || !gimple_canonical_types_compatible_p
13950 (TREE_TYPE (f1
), TREE_TYPE (f2
),
13951 trust_type_canonical
))
13955 /* If one aggregate has more fields than the other, they
13956 are not the same. */
13964 /* Consider all types with language specific trees in them mutually
13965 compatible. This is executed only from verify_type and false
13966 positives can be tolerated. */
13967 gcc_assert (!in_lto_p
);
13972 /* For OPAQUE_TYPE T, it should have only size and alignment information
13973 and its mode should be of class MODE_OPAQUE. This function verifies
13974 these properties of T match TV which is the main variant of T and TC
13975 which is the canonical of T. */
13978 verify_opaque_type (const_tree t
, tree tv
, tree tc
)
13980 gcc_assert (OPAQUE_TYPE_P (t
));
13981 gcc_assert (tv
&& tv
== TYPE_MAIN_VARIANT (tv
));
13982 gcc_assert (tc
&& tc
== TYPE_CANONICAL (tc
));
13984 /* For an opaque type T1, check if some of its properties match
13985 the corresponding ones of the other opaque type T2, emit some
13986 error messages for those inconsistent ones. */
13987 auto check_properties_for_opaque_type
= [](const_tree t1
, tree t2
,
13988 const char *kind_msg
)
13990 if (!OPAQUE_TYPE_P (t2
))
13992 error ("type %s is not an opaque type", kind_msg
);
13996 if (!OPAQUE_MODE_P (TYPE_MODE (t2
)))
13998 error ("type %s is not with opaque mode", kind_msg
);
14002 if (TYPE_MODE (t1
) != TYPE_MODE (t2
))
14004 error ("type %s differs by %<TYPE_MODE%>", kind_msg
);
14008 poly_uint64 t1_size
= tree_to_poly_uint64 (TYPE_SIZE (t1
));
14009 poly_uint64 t2_size
= tree_to_poly_uint64 (TYPE_SIZE (t2
));
14010 if (maybe_ne (t1_size
, t2_size
))
14012 error ("type %s differs by %<TYPE_SIZE%>", kind_msg
);
14016 if (TYPE_ALIGN (t1
) != TYPE_ALIGN (t2
))
14018 error ("type %s differs by %<TYPE_ALIGN%>", kind_msg
);
14022 if (TYPE_USER_ALIGN (t1
) != TYPE_USER_ALIGN (t2
))
14024 error ("type %s differs by %<TYPE_USER_ALIGN%>", kind_msg
);
14031 check_properties_for_opaque_type (t
, tv
, "variant");
14034 check_properties_for_opaque_type (t
, tc
, "canonical");
14037 /* Verify type T. */
14040 verify_type (const_tree t
)
14042 bool error_found
= false;
14043 tree mv
= TYPE_MAIN_VARIANT (t
);
14044 tree ct
= TYPE_CANONICAL (t
);
14046 if (OPAQUE_TYPE_P (t
))
14048 verify_opaque_type (t
, mv
, ct
);
14054 error ("main variant is not defined");
14055 error_found
= true;
14057 else if (mv
!= TYPE_MAIN_VARIANT (mv
))
14059 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
14061 error_found
= true;
14063 else if (t
!= mv
&& !verify_type_variant (t
, mv
))
14064 error_found
= true;
14068 else if (TYPE_CANONICAL (ct
) != ct
)
14070 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
14072 error_found
= true;
14074 /* Method and function types cannot be used to address memory and thus
14075 TYPE_CANONICAL really matters only for determining useless conversions.
14077 FIXME: C++ FE produce declarations of builtin functions that are not
14078 compatible with main variants. */
14079 else if (TREE_CODE (t
) == FUNCTION_TYPE
)
14082 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
14083 with variably sized arrays because their sizes possibly
14084 gimplified to different variables. */
14085 && !variably_modified_type_p (ct
, NULL
)
14086 && !gimple_canonical_types_compatible_p (t
, ct
, false)
14087 && COMPLETE_TYPE_P (t
))
14089 error ("%<TYPE_CANONICAL%> is not compatible");
14091 error_found
= true;
14094 if (COMPLETE_TYPE_P (t
) && TYPE_CANONICAL (t
)
14095 && TYPE_MODE (t
) != TYPE_MODE (TYPE_CANONICAL (t
)))
14097 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
14099 error_found
= true;
14101 if (TYPE_MAIN_VARIANT (t
) == t
&& ct
&& TYPE_MAIN_VARIANT (ct
) != ct
)
14103 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
14105 debug_tree (TYPE_MAIN_VARIANT (ct
));
14106 error_found
= true;
14110 /* Check various uses of TYPE_MIN_VALUE_RAW. */
14111 if (RECORD_OR_UNION_TYPE_P (t
))
14113 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
14114 and danagle the pointer from time to time. */
14115 if (TYPE_VFIELD (t
)
14116 && TREE_CODE (TYPE_VFIELD (t
)) != FIELD_DECL
14117 && TREE_CODE (TYPE_VFIELD (t
)) != TREE_LIST
)
14119 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
14120 debug_tree (TYPE_VFIELD (t
));
14121 error_found
= true;
14124 else if (TREE_CODE (t
) == POINTER_TYPE
)
14126 if (TYPE_NEXT_PTR_TO (t
)
14127 && TREE_CODE (TYPE_NEXT_PTR_TO (t
)) != POINTER_TYPE
)
14129 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
14130 debug_tree (TYPE_NEXT_PTR_TO (t
));
14131 error_found
= true;
14134 else if (TREE_CODE (t
) == REFERENCE_TYPE
)
14136 if (TYPE_NEXT_REF_TO (t
)
14137 && TREE_CODE (TYPE_NEXT_REF_TO (t
)) != REFERENCE_TYPE
)
14139 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
14140 debug_tree (TYPE_NEXT_REF_TO (t
));
14141 error_found
= true;
14144 else if (INTEGRAL_TYPE_P (t
) || SCALAR_FLOAT_TYPE_P (t
)
14145 || FIXED_POINT_TYPE_P (t
))
14147 /* FIXME: The following check should pass:
14148 useless_type_conversion_p (const_cast <tree> (t),
14149 TREE_TYPE (TYPE_MIN_VALUE (t))
14150 but does not for C sizetypes in LTO. */
14153 /* Check various uses of TYPE_MAXVAL_RAW. */
14154 if (RECORD_OR_UNION_TYPE_P (t
))
14156 if (!TYPE_BINFO (t
))
14158 else if (TREE_CODE (TYPE_BINFO (t
)) != TREE_BINFO
)
14160 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
14161 debug_tree (TYPE_BINFO (t
));
14162 error_found
= true;
14164 else if (TREE_TYPE (TYPE_BINFO (t
)) != TYPE_MAIN_VARIANT (t
))
14166 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
14167 debug_tree (TREE_TYPE (TYPE_BINFO (t
)));
14168 error_found
= true;
14171 else if (FUNC_OR_METHOD_TYPE_P (t
))
14173 if (TYPE_METHOD_BASETYPE (t
)
14174 && TREE_CODE (TYPE_METHOD_BASETYPE (t
)) != RECORD_TYPE
14175 && TREE_CODE (TYPE_METHOD_BASETYPE (t
)) != UNION_TYPE
)
14177 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
14178 debug_tree (TYPE_METHOD_BASETYPE (t
));
14179 error_found
= true;
14182 else if (TREE_CODE (t
) == OFFSET_TYPE
)
14184 if (TYPE_OFFSET_BASETYPE (t
)
14185 && TREE_CODE (TYPE_OFFSET_BASETYPE (t
)) != RECORD_TYPE
14186 && TREE_CODE (TYPE_OFFSET_BASETYPE (t
)) != UNION_TYPE
)
14188 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
14189 debug_tree (TYPE_OFFSET_BASETYPE (t
));
14190 error_found
= true;
14193 else if (INTEGRAL_TYPE_P (t
) || SCALAR_FLOAT_TYPE_P (t
)
14194 || FIXED_POINT_TYPE_P (t
))
14196 /* FIXME: The following check should pass:
14197 useless_type_conversion_p (const_cast <tree> (t),
14198 TREE_TYPE (TYPE_MAX_VALUE (t))
14199 but does not for C sizetypes in LTO. */
14201 else if (TREE_CODE (t
) == ARRAY_TYPE
)
14203 if (TYPE_ARRAY_MAX_SIZE (t
)
14204 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t
)) != INTEGER_CST
)
14206 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
14207 debug_tree (TYPE_ARRAY_MAX_SIZE (t
));
14208 error_found
= true;
14211 else if (TYPE_MAX_VALUE_RAW (t
))
14213 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
14214 debug_tree (TYPE_MAX_VALUE_RAW (t
));
14215 error_found
= true;
14218 if (TYPE_LANG_SLOT_1 (t
) && in_lto_p
)
14220 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
14221 debug_tree (TYPE_LANG_SLOT_1 (t
));
14222 error_found
= true;
14225 /* Check various uses of TYPE_VALUES_RAW. */
14226 if (TREE_CODE (t
) == ENUMERAL_TYPE
)
14227 for (tree l
= TYPE_VALUES (t
); l
; l
= TREE_CHAIN (l
))
14229 tree value
= TREE_VALUE (l
);
14230 tree name
= TREE_PURPOSE (l
);
14232 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
14233 CONST_DECL of ENUMERAL TYPE. */
14234 if (TREE_CODE (value
) != INTEGER_CST
&& TREE_CODE (value
) != CONST_DECL
)
14236 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
14237 debug_tree (value
);
14239 error_found
= true;
14241 if (TREE_CODE (TREE_TYPE (value
)) != INTEGER_TYPE
14242 && TREE_CODE (TREE_TYPE (value
)) != BOOLEAN_TYPE
14243 && !useless_type_conversion_p (const_cast <tree
> (t
), TREE_TYPE (value
)))
14245 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
14247 debug_tree (value
);
14249 error_found
= true;
14251 if (TREE_CODE (name
) != IDENTIFIER_NODE
)
14253 error ("enum value name is not %<IDENTIFIER_NODE%>");
14254 debug_tree (value
);
14256 error_found
= true;
14259 else if (TREE_CODE (t
) == ARRAY_TYPE
)
14261 if (TYPE_DOMAIN (t
) && TREE_CODE (TYPE_DOMAIN (t
)) != INTEGER_TYPE
)
14263 error ("array %<TYPE_DOMAIN%> is not integer type");
14264 debug_tree (TYPE_DOMAIN (t
));
14265 error_found
= true;
14268 else if (RECORD_OR_UNION_TYPE_P (t
))
14270 if (TYPE_FIELDS (t
) && !COMPLETE_TYPE_P (t
) && in_lto_p
)
14272 error ("%<TYPE_FIELDS%> defined in incomplete type");
14273 error_found
= true;
14275 for (tree fld
= TYPE_FIELDS (t
); fld
; fld
= TREE_CHAIN (fld
))
14277 /* TODO: verify properties of decls. */
14278 if (TREE_CODE (fld
) == FIELD_DECL
)
14280 else if (TREE_CODE (fld
) == TYPE_DECL
)
14282 else if (TREE_CODE (fld
) == CONST_DECL
)
14284 else if (VAR_P (fld
))
14286 else if (TREE_CODE (fld
) == TEMPLATE_DECL
)
14288 else if (TREE_CODE (fld
) == USING_DECL
)
14290 else if (TREE_CODE (fld
) == FUNCTION_DECL
)
14294 error ("wrong tree in %<TYPE_FIELDS%> list");
14296 error_found
= true;
14300 else if (TREE_CODE (t
) == INTEGER_TYPE
14301 || TREE_CODE (t
) == BOOLEAN_TYPE
14302 || TREE_CODE (t
) == BITINT_TYPE
14303 || TREE_CODE (t
) == OFFSET_TYPE
14304 || TREE_CODE (t
) == REFERENCE_TYPE
14305 || TREE_CODE (t
) == NULLPTR_TYPE
14306 || TREE_CODE (t
) == POINTER_TYPE
)
14308 if (TYPE_CACHED_VALUES_P (t
) != (TYPE_CACHED_VALUES (t
) != NULL
))
14310 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
14312 TYPE_CACHED_VALUES_P (t
), (void *)TYPE_CACHED_VALUES (t
));
14313 error_found
= true;
14315 else if (TYPE_CACHED_VALUES_P (t
) && TREE_CODE (TYPE_CACHED_VALUES (t
)) != TREE_VEC
)
14317 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
14318 debug_tree (TYPE_CACHED_VALUES (t
));
14319 error_found
= true;
14321 /* Verify just enough of cache to ensure that no one copied it to new type.
14322 All copying should go by copy_node that should clear it. */
14323 else if (TYPE_CACHED_VALUES_P (t
))
14326 for (i
= 0; i
< TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t
)); i
++)
14327 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t
), i
)
14328 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t
), i
)) != t
)
14330 error ("wrong %<TYPE_CACHED_VALUES%> entry");
14331 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t
), i
));
14332 error_found
= true;
14337 else if (FUNC_OR_METHOD_TYPE_P (t
))
14338 for (tree l
= TYPE_ARG_TYPES (t
); l
; l
= TREE_CHAIN (l
))
14340 /* C++ FE uses TREE_PURPOSE to store initial values. */
14341 if (TREE_PURPOSE (l
) && in_lto_p
)
14343 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
14345 error_found
= true;
14347 if (!TYPE_P (TREE_VALUE (l
)))
14349 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
14351 error_found
= true;
14354 else if (!is_lang_specific (t
) && TYPE_VALUES_RAW (t
))
14356 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
14357 debug_tree (TYPE_VALUES_RAW (t
));
14358 error_found
= true;
14360 if (TREE_CODE (t
) != INTEGER_TYPE
14361 && TREE_CODE (t
) != BOOLEAN_TYPE
14362 && TREE_CODE (t
) != BITINT_TYPE
14363 && TREE_CODE (t
) != OFFSET_TYPE
14364 && TREE_CODE (t
) != REFERENCE_TYPE
14365 && TREE_CODE (t
) != NULLPTR_TYPE
14366 && TREE_CODE (t
) != POINTER_TYPE
14367 && TYPE_CACHED_VALUES_P (t
))
14369 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
14370 error_found
= true;
14373 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14374 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14376 if (TREE_CODE (t
) == METHOD_TYPE
14377 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t
)) != TYPE_METHOD_BASETYPE (t
))
14379 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
14380 error_found
= true;
14385 debug_tree (const_cast <tree
> (t
));
14386 internal_error ("%qs failed", __func__
);
14391 /* Return 1 if ARG interpreted as signed in its precision is known to be
14392 always positive or 2 if ARG is known to be always negative, or 3 if
14393 ARG may be positive or negative. */
14396 get_range_pos_neg (tree arg
)
14398 if (arg
== error_mark_node
)
14401 int prec
= TYPE_PRECISION (TREE_TYPE (arg
));
14403 if (TREE_CODE (arg
) == INTEGER_CST
)
14405 wide_int w
= wi::sext (wi::to_wide (arg
), prec
);
14411 while (CONVERT_EXPR_P (arg
)
14412 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg
, 0)))
14413 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg
, 0))) <= prec
)
14415 arg
= TREE_OPERAND (arg
, 0);
14416 /* Narrower value zero extended into wider type
14417 will always result in positive values. */
14418 if (TYPE_UNSIGNED (TREE_TYPE (arg
))
14419 && TYPE_PRECISION (TREE_TYPE (arg
)) < prec
)
14421 prec
= TYPE_PRECISION (TREE_TYPE (arg
));
14426 if (TREE_CODE (arg
) != SSA_NAME
)
14429 while (!get_global_range_query ()->range_of_expr (r
, arg
)
14430 || r
.undefined_p () || r
.varying_p ())
14432 gimple
*g
= SSA_NAME_DEF_STMT (arg
);
14433 if (is_gimple_assign (g
)
14434 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g
)))
14436 tree t
= gimple_assign_rhs1 (g
);
14437 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
14438 && TYPE_PRECISION (TREE_TYPE (t
)) <= prec
)
14440 if (TYPE_UNSIGNED (TREE_TYPE (t
))
14441 && TYPE_PRECISION (TREE_TYPE (t
)) < prec
)
14443 prec
= TYPE_PRECISION (TREE_TYPE (t
));
14452 if (TYPE_UNSIGNED (TREE_TYPE (arg
)))
14454 /* For unsigned values, the "positive" range comes
14455 below the "negative" range. */
14456 if (!wi::neg_p (wi::sext (r
.upper_bound (), prec
), SIGNED
))
14458 if (wi::neg_p (wi::sext (r
.lower_bound (), prec
), SIGNED
))
14463 if (!wi::neg_p (wi::sext (r
.lower_bound (), prec
), SIGNED
))
14465 if (wi::neg_p (wi::sext (r
.upper_bound (), prec
), SIGNED
))
14474 /* Return true if ARG is marked with the nonnull attribute in the
14475 current function signature. */
14478 nonnull_arg_p (const_tree arg
)
14480 tree t
, attrs
, fntype
;
14481 unsigned HOST_WIDE_INT arg_num
;
14483 gcc_assert (TREE_CODE (arg
) == PARM_DECL
14484 && (POINTER_TYPE_P (TREE_TYPE (arg
))
14485 || TREE_CODE (TREE_TYPE (arg
)) == OFFSET_TYPE
));
14487 /* The static chain decl is always non null. */
14488 if (arg
== cfun
->static_chain_decl
)
14491 /* THIS argument of method is always non-NULL. */
14492 if (TREE_CODE (TREE_TYPE (cfun
->decl
)) == METHOD_TYPE
14493 && arg
== DECL_ARGUMENTS (cfun
->decl
)
14494 && flag_delete_null_pointer_checks
)
14497 /* Values passed by reference are always non-NULL. */
14498 if (TREE_CODE (TREE_TYPE (arg
)) == REFERENCE_TYPE
14499 && flag_delete_null_pointer_checks
)
14502 fntype
= TREE_TYPE (cfun
->decl
);
14503 for (attrs
= TYPE_ATTRIBUTES (fntype
); attrs
; attrs
= TREE_CHAIN (attrs
))
14505 attrs
= lookup_attribute ("nonnull", attrs
);
14507 /* If "nonnull" wasn't specified, we know nothing about the argument. */
14508 if (attrs
== NULL_TREE
)
14511 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
14512 if (TREE_VALUE (attrs
) == NULL_TREE
)
14515 /* Get the position number for ARG in the function signature. */
14516 for (arg_num
= 1, t
= DECL_ARGUMENTS (cfun
->decl
);
14518 t
= DECL_CHAIN (t
), arg_num
++)
14524 gcc_assert (t
== arg
);
14526 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14527 for (t
= TREE_VALUE (attrs
); t
; t
= TREE_CHAIN (t
))
14529 if (compare_tree_int (TREE_VALUE (t
), arg_num
) == 0)
14537 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14541 set_block (location_t loc
, tree block
)
14543 location_t pure_loc
= get_pure_location (loc
);
14544 source_range src_range
= get_range_from_loc (line_table
, loc
);
14545 unsigned discriminator
= get_discriminator_from_loc (line_table
, loc
);
14546 return line_table
->get_or_create_combined_loc (pure_loc
, src_range
, block
,
14551 set_source_range (tree expr
, location_t start
, location_t finish
)
14553 source_range src_range
;
14554 src_range
.m_start
= start
;
14555 src_range
.m_finish
= finish
;
14556 return set_source_range (expr
, src_range
);
14560 set_source_range (tree expr
, source_range src_range
)
14562 if (!EXPR_P (expr
))
14563 return UNKNOWN_LOCATION
;
14565 location_t expr_location
= EXPR_LOCATION (expr
);
14566 location_t pure_loc
= get_pure_location (expr_location
);
14567 unsigned discriminator
= get_discriminator_from_loc (expr_location
);
14568 location_t adhoc
= line_table
->get_or_create_combined_loc (pure_loc
,
14572 SET_EXPR_LOCATION (expr
, adhoc
);
14576 /* Return EXPR, potentially wrapped with a node expression LOC,
14577 if !CAN_HAVE_LOCATION_P (expr).
14579 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
14580 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
14582 Wrapper nodes can be identified using location_wrapper_p. */
14585 maybe_wrap_with_location (tree expr
, location_t loc
)
14589 if (loc
== UNKNOWN_LOCATION
)
14591 if (CAN_HAVE_LOCATION_P (expr
))
14593 /* We should only be adding wrappers for constants and for decls,
14594 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
14595 gcc_assert (CONSTANT_CLASS_P (expr
)
14597 || EXCEPTIONAL_CLASS_P (expr
));
14599 /* For now, don't add wrappers to exceptional tree nodes, to minimize
14600 any impact of the wrapper nodes. */
14601 if (EXCEPTIONAL_CLASS_P (expr
) || error_operand_p (expr
))
14604 /* Compiler-generated temporary variables don't need a wrapper. */
14605 if (DECL_P (expr
) && DECL_ARTIFICIAL (expr
) && DECL_IGNORED_P (expr
))
14608 /* If any auto_suppress_location_wrappers are active, don't create
14610 if (suppress_location_wrappers
> 0)
14614 = (((CONSTANT_CLASS_P (expr
) && TREE_CODE (expr
) != STRING_CST
)
14615 || (TREE_CODE (expr
) == CONST_DECL
&& !TREE_STATIC (expr
)))
14616 ? NON_LVALUE_EXPR
: VIEW_CONVERT_EXPR
);
14617 tree wrapper
= build1_loc (loc
, code
, TREE_TYPE (expr
), expr
);
14618 /* Mark this node as being a wrapper. */
14619 EXPR_LOCATION_WRAPPER_P (wrapper
) = 1;
14623 int suppress_location_wrappers
;
14625 /* Return the name of combined function FN, for debugging purposes. */
14628 combined_fn_name (combined_fn fn
)
14630 if (builtin_fn_p (fn
))
14632 tree fndecl
= builtin_decl_explicit (as_builtin_fn (fn
));
14633 return IDENTIFIER_POINTER (DECL_NAME (fndecl
));
14636 return internal_fn_name (as_internal_fn (fn
));
14639 /* Return a bitmap with a bit set corresponding to each argument in
14640 a function call type FNTYPE declared with attribute nonnull,
14641 or null if none of the function's argument are nonnull. The caller
14642 must free the bitmap. */
14645 get_nonnull_args (const_tree fntype
)
14647 if (fntype
== NULL_TREE
)
14650 bitmap argmap
= NULL
;
14651 if (TREE_CODE (fntype
) == METHOD_TYPE
)
14653 /* The this pointer in C++ non-static member functions is
14654 implicitly nonnull whether or not it's declared as such. */
14655 argmap
= BITMAP_ALLOC (NULL
);
14656 bitmap_set_bit (argmap
, 0);
14659 tree attrs
= TYPE_ATTRIBUTES (fntype
);
14663 /* A function declaration can specify multiple attribute nonnull,
14664 each with zero or more arguments. The loop below creates a bitmap
14665 representing a union of all the arguments. An empty (but non-null)
14666 bitmap means that all arguments have been declaraed nonnull. */
14667 for ( ; attrs
; attrs
= TREE_CHAIN (attrs
))
14669 attrs
= lookup_attribute ("nonnull", attrs
);
14674 argmap
= BITMAP_ALLOC (NULL
);
14676 if (!TREE_VALUE (attrs
))
14678 /* Clear the bitmap in case a previous attribute nonnull
14679 set it and this one overrides it for all arguments. */
14680 bitmap_clear (argmap
);
14684 /* Iterate over the indices of the format arguments declared nonnull
14685 and set a bit for each. */
14686 for (tree idx
= TREE_VALUE (attrs
); idx
; idx
= TREE_CHAIN (idx
))
14688 unsigned int val
= TREE_INT_CST_LOW (TREE_VALUE (idx
)) - 1;
14689 bitmap_set_bit (argmap
, val
);
14696 /* Returns true if TYPE is a type where it and all of its subobjects
14697 (recursively) are of structure, union, or array type. */
14700 is_empty_type (const_tree type
)
14702 if (RECORD_OR_UNION_TYPE_P (type
))
14704 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
14705 if (TREE_CODE (field
) == FIELD_DECL
14706 && !DECL_PADDING_P (field
)
14707 && !is_empty_type (TREE_TYPE (field
)))
14711 else if (TREE_CODE (type
) == ARRAY_TYPE
)
14712 return (integer_minus_onep (array_type_nelts (type
))
14713 || TYPE_DOMAIN (type
) == NULL_TREE
14714 || is_empty_type (TREE_TYPE (type
)));
14718 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
14719 that shouldn't be passed via stack. */
14722 default_is_empty_record (const_tree type
)
14724 if (!abi_version_at_least (12))
14727 if (type
== error_mark_node
)
14730 if (TREE_ADDRESSABLE (type
))
14733 return is_empty_type (TYPE_MAIN_VARIANT (type
));
14736 /* Determine whether TYPE is a structure with a flexible array member,
14737 or a union containing such a structure (possibly recursively). */
14740 flexible_array_type_p (const_tree type
)
14743 switch (TREE_CODE (type
))
14747 for (x
= TYPE_FIELDS (type
); x
!= NULL_TREE
; x
= DECL_CHAIN (x
))
14748 if (TREE_CODE (x
) == FIELD_DECL
)
14750 if (last
== NULL_TREE
)
14752 if (TREE_CODE (TREE_TYPE (last
)) == ARRAY_TYPE
14753 && TYPE_SIZE (TREE_TYPE (last
)) == NULL_TREE
14754 && TYPE_DOMAIN (TREE_TYPE (last
)) != NULL_TREE
14755 && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last
))) == NULL_TREE
)
14759 for (x
= TYPE_FIELDS (type
); x
!= NULL_TREE
; x
= DECL_CHAIN (x
))
14761 if (TREE_CODE (x
) == FIELD_DECL
14762 && flexible_array_type_p (TREE_TYPE (x
)))
14771 /* Like int_size_in_bytes, but handle empty records specially. */
14774 arg_int_size_in_bytes (const_tree type
)
14776 return TYPE_EMPTY_P (type
) ? 0 : int_size_in_bytes (type
);
14779 /* Like size_in_bytes, but handle empty records specially. */
14782 arg_size_in_bytes (const_tree type
)
14784 return TYPE_EMPTY_P (type
) ? size_zero_node
: size_in_bytes (type
);
14787 /* Return true if an expression with CODE has to have the same result type as
14788 its first operand. */
14791 expr_type_first_operand_type_p (tree_code code
)
14804 case TRUNC_DIV_EXPR
:
14805 case CEIL_DIV_EXPR
:
14806 case FLOOR_DIV_EXPR
:
14807 case ROUND_DIV_EXPR
:
14808 case TRUNC_MOD_EXPR
:
14809 case CEIL_MOD_EXPR
:
14810 case FLOOR_MOD_EXPR
:
14811 case ROUND_MOD_EXPR
:
14813 case EXACT_DIV_EXPR
:
14831 /* Return a typenode for the "standard" C type with a given name. */
14833 get_typenode_from_name (const char *name
)
14835 if (name
== NULL
|| *name
== '\0')
14838 if (strcmp (name
, "char") == 0)
14839 return char_type_node
;
14840 if (strcmp (name
, "unsigned char") == 0)
14841 return unsigned_char_type_node
;
14842 if (strcmp (name
, "signed char") == 0)
14843 return signed_char_type_node
;
14845 if (strcmp (name
, "short int") == 0)
14846 return short_integer_type_node
;
14847 if (strcmp (name
, "short unsigned int") == 0)
14848 return short_unsigned_type_node
;
14850 if (strcmp (name
, "int") == 0)
14851 return integer_type_node
;
14852 if (strcmp (name
, "unsigned int") == 0)
14853 return unsigned_type_node
;
14855 if (strcmp (name
, "long int") == 0)
14856 return long_integer_type_node
;
14857 if (strcmp (name
, "long unsigned int") == 0)
14858 return long_unsigned_type_node
;
14860 if (strcmp (name
, "long long int") == 0)
14861 return long_long_integer_type_node
;
14862 if (strcmp (name
, "long long unsigned int") == 0)
14863 return long_long_unsigned_type_node
;
14865 gcc_unreachable ();
14868 /* List of pointer types used to declare builtins before we have seen their
14871 Keep the size up to date in tree.h ! */
14872 const builtin_structptr_type builtin_structptr_types
[6] =
14874 { fileptr_type_node
, ptr_type_node
, "FILE" },
14875 { const_tm_ptr_type_node
, const_ptr_type_node
, "tm" },
14876 { fenv_t_ptr_type_node
, ptr_type_node
, "fenv_t" },
14877 { const_fenv_t_ptr_type_node
, const_ptr_type_node
, "fenv_t" },
14878 { fexcept_t_ptr_type_node
, ptr_type_node
, "fexcept_t" },
14879 { const_fexcept_t_ptr_type_node
, const_ptr_type_node
, "fexcept_t" }
14882 /* Return the maximum object size. */
14885 max_object_size (void)
14887 /* To do: Make this a configurable parameter. */
14888 return TYPE_MAX_VALUE (ptrdiff_type_node
);
14891 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
14892 parameter default to false and that weeds out error_mark_node. */
14895 verify_type_context (location_t loc
, type_context_kind context
,
14896 const_tree type
, bool silent_p
)
14898 if (type
== error_mark_node
)
14901 gcc_assert (TYPE_P (type
));
14902 return (!targetm
.verify_type_context
14903 || targetm
.verify_type_context (loc
, context
, type
, silent_p
));
14906 /* Return true if NEW_ASM and DELETE_ASM name a valid pair of new and
14907 delete operators. Return false if they may or may not name such
14908 a pair and, when nonnull, set *PCERTAIN to true if they certainly
14912 valid_new_delete_pair_p (tree new_asm
, tree delete_asm
,
14913 bool *pcertain
/* = NULL */)
14917 pcertain
= &certain
;
14919 const char *new_name
= IDENTIFIER_POINTER (new_asm
);
14920 const char *delete_name
= IDENTIFIER_POINTER (delete_asm
);
14921 unsigned int new_len
= IDENTIFIER_LENGTH (new_asm
);
14922 unsigned int delete_len
= IDENTIFIER_LENGTH (delete_asm
);
14924 /* The following failures are due to invalid names so they're not
14925 considered certain mismatches. */
14928 if (new_len
< 5 || delete_len
< 6)
14930 if (new_name
[0] == '_')
14931 ++new_name
, --new_len
;
14932 if (new_name
[0] == '_')
14933 ++new_name
, --new_len
;
14934 if (delete_name
[0] == '_')
14935 ++delete_name
, --delete_len
;
14936 if (delete_name
[0] == '_')
14937 ++delete_name
, --delete_len
;
14938 if (new_len
< 4 || delete_len
< 5)
14941 /* The following failures are due to names of user-defined operators
14942 so they're also not considered certain mismatches. */
14944 /* *_len is now just the length after initial underscores. */
14945 if (new_name
[0] != 'Z' || new_name
[1] != 'n')
14947 if (delete_name
[0] != 'Z' || delete_name
[1] != 'd')
14950 /* The following failures are certain mismatches. */
14953 /* _Znw must match _Zdl, _Zna must match _Zda. */
14954 if ((new_name
[2] != 'w' || delete_name
[2] != 'l')
14955 && (new_name
[2] != 'a' || delete_name
[2] != 'a'))
14957 /* 'j', 'm' and 'y' correspond to size_t. */
14958 if (new_name
[3] != 'j' && new_name
[3] != 'm' && new_name
[3] != 'y')
14960 if (delete_name
[3] != 'P' || delete_name
[4] != 'v')
14963 || (new_len
== 18 && !memcmp (new_name
+ 4, "RKSt9nothrow_t", 14)))
14965 /* _ZnXY or _ZnXYRKSt9nothrow_t matches
14966 _ZdXPv, _ZdXPvY and _ZdXPvRKSt9nothrow_t. */
14967 if (delete_len
== 5)
14969 if (delete_len
== 6 && delete_name
[5] == new_name
[3])
14971 if (delete_len
== 19 && !memcmp (delete_name
+ 5, "RKSt9nothrow_t", 14))
14974 else if ((new_len
== 19 && !memcmp (new_name
+ 4, "St11align_val_t", 15))
14976 && !memcmp (new_name
+ 4, "St11align_val_tRKSt9nothrow_t", 29)))
14978 /* _ZnXYSt11align_val_t or _ZnXYSt11align_val_tRKSt9nothrow_t matches
14979 _ZdXPvSt11align_val_t or _ZdXPvYSt11align_val_t or or
14980 _ZdXPvSt11align_val_tRKSt9nothrow_t. */
14981 if (delete_len
== 20 && !memcmp (delete_name
+ 5, "St11align_val_t", 15))
14983 if (delete_len
== 21
14984 && delete_name
[5] == new_name
[3]
14985 && !memcmp (delete_name
+ 6, "St11align_val_t", 15))
14987 if (delete_len
== 34
14988 && !memcmp (delete_name
+ 5, "St11align_val_tRKSt9nothrow_t", 29))
14992 /* The negative result is conservative. */
14997 /* Return the zero-based number corresponding to the argument being
14998 deallocated if FNDECL is a deallocation function or an out-of-bounds
14999 value if it isn't. */
15002 fndecl_dealloc_argno (tree fndecl
)
15004 /* A call to operator delete isn't recognized as one to a built-in. */
15005 if (DECL_IS_OPERATOR_DELETE_P (fndecl
))
15007 if (DECL_IS_REPLACEABLE_OPERATOR (fndecl
))
15010 /* Avoid placement delete that's not been inlined. */
15011 tree fname
= DECL_ASSEMBLER_NAME (fndecl
);
15012 if (id_equal (fname
, "_ZdlPvS_") // ordinary form
15013 || id_equal (fname
, "_ZdaPvS_")) // array form
15018 /* TODO: Handle user-defined functions with attribute malloc? Handle
15019 known non-built-ins like fopen? */
15020 if (fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
15022 switch (DECL_FUNCTION_CODE (fndecl
))
15024 case BUILT_IN_FREE
:
15025 case BUILT_IN_REALLOC
:
15026 case BUILT_IN_GOMP_FREE
:
15027 case BUILT_IN_GOMP_REALLOC
:
15035 tree attrs
= DECL_ATTRIBUTES (fndecl
);
15039 for (tree atfree
= attrs
;
15040 (atfree
= lookup_attribute ("*dealloc", atfree
));
15041 atfree
= TREE_CHAIN (atfree
))
15043 tree alloc
= TREE_VALUE (atfree
);
15047 tree pos
= TREE_CHAIN (alloc
);
15051 pos
= TREE_VALUE (pos
);
15052 return TREE_INT_CST_LOW (pos
) - 1;
15058 /* If EXPR refers to a character array or pointer declared attribute
15059 nonstring, return a decl for that array or pointer and set *REF
15060 to the referenced enclosing object or pointer. Otherwise return
15064 get_attr_nonstring_decl (tree expr
, tree
*ref
)
15067 tree var
= NULL_TREE
;
15068 if (TREE_CODE (decl
) == SSA_NAME
)
15070 gimple
*def
= SSA_NAME_DEF_STMT (decl
);
15072 if (is_gimple_assign (def
))
15074 tree_code code
= gimple_assign_rhs_code (def
);
15075 if (code
== ADDR_EXPR
15076 || code
== COMPONENT_REF
15077 || code
== VAR_DECL
)
15078 decl
= gimple_assign_rhs1 (def
);
15081 var
= SSA_NAME_VAR (decl
);
15084 if (TREE_CODE (decl
) == ADDR_EXPR
)
15085 decl
= TREE_OPERAND (decl
, 0);
15087 /* To simplify calling code, store the referenced DECL regardless of
15088 the attribute determined below, but avoid storing the SSA_NAME_VAR
15089 obtained above (it's not useful for dataflow purposes). */
15093 /* Use the SSA_NAME_VAR that was determined above to see if it's
15094 declared nonstring. Otherwise drill down into the referenced
15098 else if (TREE_CODE (decl
) == ARRAY_REF
)
15099 decl
= TREE_OPERAND (decl
, 0);
15100 else if (TREE_CODE (decl
) == COMPONENT_REF
)
15101 decl
= TREE_OPERAND (decl
, 1);
15102 else if (TREE_CODE (decl
) == MEM_REF
)
15103 return get_attr_nonstring_decl (TREE_OPERAND (decl
, 0), ref
);
15106 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl
)))
15112 /* Return length of attribute names string,
15113 if arglist chain > 1, -1 otherwise. */
15116 get_target_clone_attr_len (tree arglist
)
15119 int str_len_sum
= 0;
15122 for (arg
= arglist
; arg
; arg
= TREE_CHAIN (arg
))
15124 const char *str
= TREE_STRING_POINTER (TREE_VALUE (arg
));
15125 size_t len
= strlen (str
);
15126 str_len_sum
+= len
+ 1;
15127 for (const char *p
= strchr (str
, ','); p
; p
= strchr (p
+ 1, ','))
15133 return str_len_sum
;
15137 tree_cc_finalize (void)
15139 clear_nonstandard_integer_type_cache ();
15140 vec_free (bitint_type_cache
);
15145 namespace selftest
{
15147 /* Selftests for tree. */
15149 /* Verify that integer constants are sane. */
15152 test_integer_constants ()
15154 ASSERT_TRUE (integer_type_node
!= NULL
);
15155 ASSERT_TRUE (build_int_cst (integer_type_node
, 0) != NULL
);
15157 tree type
= integer_type_node
;
15159 tree zero
= build_zero_cst (type
);
15160 ASSERT_EQ (INTEGER_CST
, TREE_CODE (zero
));
15161 ASSERT_EQ (type
, TREE_TYPE (zero
));
15163 tree one
= build_int_cst (type
, 1);
15164 ASSERT_EQ (INTEGER_CST
, TREE_CODE (one
));
15165 ASSERT_EQ (type
, TREE_TYPE (zero
));
15168 /* Verify identifiers. */
15171 test_identifiers ()
15173 tree identifier
= get_identifier ("foo");
15174 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier
));
15175 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier
));
15178 /* Verify LABEL_DECL. */
15183 tree identifier
= get_identifier ("err");
15184 tree label_decl
= build_decl (UNKNOWN_LOCATION
, LABEL_DECL
,
15185 identifier
, void_type_node
);
15186 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl
));
15187 ASSERT_FALSE (FORCED_LABEL (label_decl
));
15190 /* Return a new VECTOR_CST node whose type is TYPE and whose values
15191 are given by VALS. */
15194 build_vector (tree type
, const vec
<tree
> &vals MEM_STAT_DECL
)
15196 gcc_assert (known_eq (vals
.length (), TYPE_VECTOR_SUBPARTS (type
)));
15197 tree_vector_builder
builder (type
, vals
.length (), 1);
15198 builder
.splice (vals
);
15199 return builder
.build ();
15202 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
15205 check_vector_cst (const vec
<tree
> &expected
, tree actual
)
15207 ASSERT_KNOWN_EQ (expected
.length (),
15208 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual
)));
15209 for (unsigned int i
= 0; i
< expected
.length (); ++i
)
15210 ASSERT_EQ (wi::to_wide (expected
[i
]),
15211 wi::to_wide (vector_cst_elt (actual
, i
)));
15214 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
15215 and that its elements match EXPECTED. */
15218 check_vector_cst_duplicate (const vec
<tree
> &expected
, tree actual
,
15219 unsigned int npatterns
)
15221 ASSERT_EQ (npatterns
, VECTOR_CST_NPATTERNS (actual
));
15222 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual
));
15223 ASSERT_EQ (npatterns
, vector_cst_encoded_nelts (actual
));
15224 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual
));
15225 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual
));
15226 check_vector_cst (expected
, actual
);
15229 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
15230 and NPATTERNS background elements, and that its elements match
15234 check_vector_cst_fill (const vec
<tree
> &expected
, tree actual
,
15235 unsigned int npatterns
)
15237 ASSERT_EQ (npatterns
, VECTOR_CST_NPATTERNS (actual
));
15238 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual
));
15239 ASSERT_EQ (2 * npatterns
, vector_cst_encoded_nelts (actual
));
15240 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual
));
15241 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual
));
15242 check_vector_cst (expected
, actual
);
15245 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
15246 and that its elements match EXPECTED. */
15249 check_vector_cst_stepped (const vec
<tree
> &expected
, tree actual
,
15250 unsigned int npatterns
)
15252 ASSERT_EQ (npatterns
, VECTOR_CST_NPATTERNS (actual
));
15253 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual
));
15254 ASSERT_EQ (3 * npatterns
, vector_cst_encoded_nelts (actual
));
15255 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual
));
15256 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual
));
15257 check_vector_cst (expected
, actual
);
15260 /* Test the creation of VECTOR_CSTs. */
15263 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO
)
15265 auto_vec
<tree
, 8> elements (8);
15266 elements
.quick_grow (8);
15267 tree element_type
= build_nonstandard_integer_type (16, true);
15268 tree vector_type
= build_vector_type (element_type
, 8);
15270 /* Test a simple linear series with a base of 0 and a step of 1:
15271 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
15272 for (unsigned int i
= 0; i
< 8; ++i
)
15273 elements
[i
] = build_int_cst (element_type
, i
);
15274 tree vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15275 check_vector_cst_stepped (elements
, vector
, 1);
15277 /* Try the same with the first element replaced by 100:
15278 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
15279 elements
[0] = build_int_cst (element_type
, 100);
15280 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15281 check_vector_cst_stepped (elements
, vector
, 1);
15283 /* Try a series that wraps around.
15284 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
15285 for (unsigned int i
= 1; i
< 8; ++i
)
15286 elements
[i
] = build_int_cst (element_type
, (65530 + i
) & 0xffff);
15287 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15288 check_vector_cst_stepped (elements
, vector
, 1);
15290 /* Try a downward series:
15291 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
15292 for (unsigned int i
= 1; i
< 8; ++i
)
15293 elements
[i
] = build_int_cst (element_type
, 80 - i
);
15294 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15295 check_vector_cst_stepped (elements
, vector
, 1);
15297 /* Try two interleaved series with different bases and steps:
15298 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
15299 elements
[1] = build_int_cst (element_type
, 53);
15300 for (unsigned int i
= 2; i
< 8; i
+= 2)
15302 elements
[i
] = build_int_cst (element_type
, 70 - i
* 2);
15303 elements
[i
+ 1] = build_int_cst (element_type
, 200 + i
* 3);
15305 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15306 check_vector_cst_stepped (elements
, vector
, 2);
15308 /* Try a duplicated value:
15309 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
15310 for (unsigned int i
= 1; i
< 8; ++i
)
15311 elements
[i
] = elements
[0];
15312 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15313 check_vector_cst_duplicate (elements
, vector
, 1);
15315 /* Try an interleaved duplicated value:
15316 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
15317 elements
[1] = build_int_cst (element_type
, 55);
15318 for (unsigned int i
= 2; i
< 8; ++i
)
15319 elements
[i
] = elements
[i
- 2];
15320 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15321 check_vector_cst_duplicate (elements
, vector
, 2);
15323 /* Try a duplicated value with 2 exceptions
15324 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
15325 elements
[0] = build_int_cst (element_type
, 41);
15326 elements
[1] = build_int_cst (element_type
, 97);
15327 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15328 check_vector_cst_fill (elements
, vector
, 2);
15330 /* Try with and without a step
15331 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
15332 for (unsigned int i
= 3; i
< 8; i
+= 2)
15333 elements
[i
] = build_int_cst (element_type
, i
* 7);
15334 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15335 check_vector_cst_stepped (elements
, vector
, 2);
15337 /* Try a fully-general constant:
15338 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
15339 elements
[5] = build_int_cst (element_type
, 9990);
15340 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
15341 check_vector_cst_fill (elements
, vector
, 4);
15344 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
15345 Helper function for test_location_wrappers, to deal with STRIP_NOPS
15346 modifying its argument in-place. */
15349 check_strip_nops (tree node
, tree expected
)
15352 ASSERT_EQ (expected
, node
);
15355 /* Verify location wrappers. */
15358 test_location_wrappers ()
15360 location_t loc
= BUILTINS_LOCATION
;
15362 ASSERT_EQ (NULL_TREE
, maybe_wrap_with_location (NULL_TREE
, loc
));
15364 /* Wrapping a constant. */
15365 tree int_cst
= build_int_cst (integer_type_node
, 42);
15366 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst
));
15367 ASSERT_FALSE (location_wrapper_p (int_cst
));
15369 tree wrapped_int_cst
= maybe_wrap_with_location (int_cst
, loc
);
15370 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst
));
15371 ASSERT_EQ (loc
, EXPR_LOCATION (wrapped_int_cst
));
15372 ASSERT_EQ (int_cst
, tree_strip_any_location_wrapper (wrapped_int_cst
));
15374 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
15375 ASSERT_EQ (int_cst
, maybe_wrap_with_location (int_cst
, UNKNOWN_LOCATION
));
15377 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
15378 tree cast
= build1 (NOP_EXPR
, char_type_node
, int_cst
);
15379 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast
));
15380 ASSERT_EQ (cast
, maybe_wrap_with_location (cast
, loc
));
15382 /* Wrapping a STRING_CST. */
15383 tree string_cst
= build_string (4, "foo");
15384 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst
));
15385 ASSERT_FALSE (location_wrapper_p (string_cst
));
15387 tree wrapped_string_cst
= maybe_wrap_with_location (string_cst
, loc
);
15388 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst
));
15389 ASSERT_EQ (VIEW_CONVERT_EXPR
, TREE_CODE (wrapped_string_cst
));
15390 ASSERT_EQ (loc
, EXPR_LOCATION (wrapped_string_cst
));
15391 ASSERT_EQ (string_cst
, tree_strip_any_location_wrapper (wrapped_string_cst
));
15394 /* Wrapping a variable. */
15395 tree int_var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
,
15396 get_identifier ("some_int_var"),
15397 integer_type_node
);
15398 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var
));
15399 ASSERT_FALSE (location_wrapper_p (int_var
));
15401 tree wrapped_int_var
= maybe_wrap_with_location (int_var
, loc
);
15402 ASSERT_TRUE (location_wrapper_p (wrapped_int_var
));
15403 ASSERT_EQ (loc
, EXPR_LOCATION (wrapped_int_var
));
15404 ASSERT_EQ (int_var
, tree_strip_any_location_wrapper (wrapped_int_var
));
15406 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
15408 tree r_cast
= build1 (NON_LVALUE_EXPR
, integer_type_node
, int_var
);
15409 ASSERT_FALSE (location_wrapper_p (r_cast
));
15410 ASSERT_EQ (r_cast
, tree_strip_any_location_wrapper (r_cast
));
15412 /* Verify that STRIP_NOPS removes wrappers. */
15413 check_strip_nops (wrapped_int_cst
, int_cst
);
15414 check_strip_nops (wrapped_string_cst
, string_cst
);
15415 check_strip_nops (wrapped_int_var
, int_var
);
15418 /* Test various tree predicates. Verify that location wrappers don't
15419 affect the results. */
15424 /* Build various constants and wrappers around them. */
15426 location_t loc
= BUILTINS_LOCATION
;
15428 tree i_0
= build_int_cst (integer_type_node
, 0);
15429 tree wr_i_0
= maybe_wrap_with_location (i_0
, loc
);
15431 tree i_1
= build_int_cst (integer_type_node
, 1);
15432 tree wr_i_1
= maybe_wrap_with_location (i_1
, loc
);
15434 tree i_m1
= build_int_cst (integer_type_node
, -1);
15435 tree wr_i_m1
= maybe_wrap_with_location (i_m1
, loc
);
15437 tree f_0
= build_real_from_int_cst (float_type_node
, i_0
);
15438 tree wr_f_0
= maybe_wrap_with_location (f_0
, loc
);
15439 tree f_1
= build_real_from_int_cst (float_type_node
, i_1
);
15440 tree wr_f_1
= maybe_wrap_with_location (f_1
, loc
);
15441 tree f_m1
= build_real_from_int_cst (float_type_node
, i_m1
);
15442 tree wr_f_m1
= maybe_wrap_with_location (f_m1
, loc
);
15444 tree c_i_0
= build_complex (NULL_TREE
, i_0
, i_0
);
15445 tree c_i_1
= build_complex (NULL_TREE
, i_1
, i_0
);
15446 tree c_i_m1
= build_complex (NULL_TREE
, i_m1
, i_0
);
15448 tree c_f_0
= build_complex (NULL_TREE
, f_0
, f_0
);
15449 tree c_f_1
= build_complex (NULL_TREE
, f_1
, f_0
);
15450 tree c_f_m1
= build_complex (NULL_TREE
, f_m1
, f_0
);
15452 /* TODO: vector constants. */
15454 /* Test integer_onep. */
15455 ASSERT_FALSE (integer_onep (i_0
));
15456 ASSERT_FALSE (integer_onep (wr_i_0
));
15457 ASSERT_TRUE (integer_onep (i_1
));
15458 ASSERT_TRUE (integer_onep (wr_i_1
));
15459 ASSERT_FALSE (integer_onep (i_m1
));
15460 ASSERT_FALSE (integer_onep (wr_i_m1
));
15461 ASSERT_FALSE (integer_onep (f_0
));
15462 ASSERT_FALSE (integer_onep (wr_f_0
));
15463 ASSERT_FALSE (integer_onep (f_1
));
15464 ASSERT_FALSE (integer_onep (wr_f_1
));
15465 ASSERT_FALSE (integer_onep (f_m1
));
15466 ASSERT_FALSE (integer_onep (wr_f_m1
));
15467 ASSERT_FALSE (integer_onep (c_i_0
));
15468 ASSERT_TRUE (integer_onep (c_i_1
));
15469 ASSERT_FALSE (integer_onep (c_i_m1
));
15470 ASSERT_FALSE (integer_onep (c_f_0
));
15471 ASSERT_FALSE (integer_onep (c_f_1
));
15472 ASSERT_FALSE (integer_onep (c_f_m1
));
15474 /* Test integer_zerop. */
15475 ASSERT_TRUE (integer_zerop (i_0
));
15476 ASSERT_TRUE (integer_zerop (wr_i_0
));
15477 ASSERT_FALSE (integer_zerop (i_1
));
15478 ASSERT_FALSE (integer_zerop (wr_i_1
));
15479 ASSERT_FALSE (integer_zerop (i_m1
));
15480 ASSERT_FALSE (integer_zerop (wr_i_m1
));
15481 ASSERT_FALSE (integer_zerop (f_0
));
15482 ASSERT_FALSE (integer_zerop (wr_f_0
));
15483 ASSERT_FALSE (integer_zerop (f_1
));
15484 ASSERT_FALSE (integer_zerop (wr_f_1
));
15485 ASSERT_FALSE (integer_zerop (f_m1
));
15486 ASSERT_FALSE (integer_zerop (wr_f_m1
));
15487 ASSERT_TRUE (integer_zerop (c_i_0
));
15488 ASSERT_FALSE (integer_zerop (c_i_1
));
15489 ASSERT_FALSE (integer_zerop (c_i_m1
));
15490 ASSERT_FALSE (integer_zerop (c_f_0
));
15491 ASSERT_FALSE (integer_zerop (c_f_1
));
15492 ASSERT_FALSE (integer_zerop (c_f_m1
));
15494 /* Test integer_all_onesp. */
15495 ASSERT_FALSE (integer_all_onesp (i_0
));
15496 ASSERT_FALSE (integer_all_onesp (wr_i_0
));
15497 ASSERT_FALSE (integer_all_onesp (i_1
));
15498 ASSERT_FALSE (integer_all_onesp (wr_i_1
));
15499 ASSERT_TRUE (integer_all_onesp (i_m1
));
15500 ASSERT_TRUE (integer_all_onesp (wr_i_m1
));
15501 ASSERT_FALSE (integer_all_onesp (f_0
));
15502 ASSERT_FALSE (integer_all_onesp (wr_f_0
));
15503 ASSERT_FALSE (integer_all_onesp (f_1
));
15504 ASSERT_FALSE (integer_all_onesp (wr_f_1
));
15505 ASSERT_FALSE (integer_all_onesp (f_m1
));
15506 ASSERT_FALSE (integer_all_onesp (wr_f_m1
));
15507 ASSERT_FALSE (integer_all_onesp (c_i_0
));
15508 ASSERT_FALSE (integer_all_onesp (c_i_1
));
15509 ASSERT_FALSE (integer_all_onesp (c_i_m1
));
15510 ASSERT_FALSE (integer_all_onesp (c_f_0
));
15511 ASSERT_FALSE (integer_all_onesp (c_f_1
));
15512 ASSERT_FALSE (integer_all_onesp (c_f_m1
));
15514 /* Test integer_minus_onep. */
15515 ASSERT_FALSE (integer_minus_onep (i_0
));
15516 ASSERT_FALSE (integer_minus_onep (wr_i_0
));
15517 ASSERT_FALSE (integer_minus_onep (i_1
));
15518 ASSERT_FALSE (integer_minus_onep (wr_i_1
));
15519 ASSERT_TRUE (integer_minus_onep (i_m1
));
15520 ASSERT_TRUE (integer_minus_onep (wr_i_m1
));
15521 ASSERT_FALSE (integer_minus_onep (f_0
));
15522 ASSERT_FALSE (integer_minus_onep (wr_f_0
));
15523 ASSERT_FALSE (integer_minus_onep (f_1
));
15524 ASSERT_FALSE (integer_minus_onep (wr_f_1
));
15525 ASSERT_FALSE (integer_minus_onep (f_m1
));
15526 ASSERT_FALSE (integer_minus_onep (wr_f_m1
));
15527 ASSERT_FALSE (integer_minus_onep (c_i_0
));
15528 ASSERT_FALSE (integer_minus_onep (c_i_1
));
15529 ASSERT_TRUE (integer_minus_onep (c_i_m1
));
15530 ASSERT_FALSE (integer_minus_onep (c_f_0
));
15531 ASSERT_FALSE (integer_minus_onep (c_f_1
));
15532 ASSERT_FALSE (integer_minus_onep (c_f_m1
));
15534 /* Test integer_each_onep. */
15535 ASSERT_FALSE (integer_each_onep (i_0
));
15536 ASSERT_FALSE (integer_each_onep (wr_i_0
));
15537 ASSERT_TRUE (integer_each_onep (i_1
));
15538 ASSERT_TRUE (integer_each_onep (wr_i_1
));
15539 ASSERT_FALSE (integer_each_onep (i_m1
));
15540 ASSERT_FALSE (integer_each_onep (wr_i_m1
));
15541 ASSERT_FALSE (integer_each_onep (f_0
));
15542 ASSERT_FALSE (integer_each_onep (wr_f_0
));
15543 ASSERT_FALSE (integer_each_onep (f_1
));
15544 ASSERT_FALSE (integer_each_onep (wr_f_1
));
15545 ASSERT_FALSE (integer_each_onep (f_m1
));
15546 ASSERT_FALSE (integer_each_onep (wr_f_m1
));
15547 ASSERT_FALSE (integer_each_onep (c_i_0
));
15548 ASSERT_FALSE (integer_each_onep (c_i_1
));
15549 ASSERT_FALSE (integer_each_onep (c_i_m1
));
15550 ASSERT_FALSE (integer_each_onep (c_f_0
));
15551 ASSERT_FALSE (integer_each_onep (c_f_1
));
15552 ASSERT_FALSE (integer_each_onep (c_f_m1
));
15554 /* Test integer_truep. */
15555 ASSERT_FALSE (integer_truep (i_0
));
15556 ASSERT_FALSE (integer_truep (wr_i_0
));
15557 ASSERT_TRUE (integer_truep (i_1
));
15558 ASSERT_TRUE (integer_truep (wr_i_1
));
15559 ASSERT_FALSE (integer_truep (i_m1
));
15560 ASSERT_FALSE (integer_truep (wr_i_m1
));
15561 ASSERT_FALSE (integer_truep (f_0
));
15562 ASSERT_FALSE (integer_truep (wr_f_0
));
15563 ASSERT_FALSE (integer_truep (f_1
));
15564 ASSERT_FALSE (integer_truep (wr_f_1
));
15565 ASSERT_FALSE (integer_truep (f_m1
));
15566 ASSERT_FALSE (integer_truep (wr_f_m1
));
15567 ASSERT_FALSE (integer_truep (c_i_0
));
15568 ASSERT_TRUE (integer_truep (c_i_1
));
15569 ASSERT_FALSE (integer_truep (c_i_m1
));
15570 ASSERT_FALSE (integer_truep (c_f_0
));
15571 ASSERT_FALSE (integer_truep (c_f_1
));
15572 ASSERT_FALSE (integer_truep (c_f_m1
));
15574 /* Test integer_nonzerop. */
15575 ASSERT_FALSE (integer_nonzerop (i_0
));
15576 ASSERT_FALSE (integer_nonzerop (wr_i_0
));
15577 ASSERT_TRUE (integer_nonzerop (i_1
));
15578 ASSERT_TRUE (integer_nonzerop (wr_i_1
));
15579 ASSERT_TRUE (integer_nonzerop (i_m1
));
15580 ASSERT_TRUE (integer_nonzerop (wr_i_m1
));
15581 ASSERT_FALSE (integer_nonzerop (f_0
));
15582 ASSERT_FALSE (integer_nonzerop (wr_f_0
));
15583 ASSERT_FALSE (integer_nonzerop (f_1
));
15584 ASSERT_FALSE (integer_nonzerop (wr_f_1
));
15585 ASSERT_FALSE (integer_nonzerop (f_m1
));
15586 ASSERT_FALSE (integer_nonzerop (wr_f_m1
));
15587 ASSERT_FALSE (integer_nonzerop (c_i_0
));
15588 ASSERT_TRUE (integer_nonzerop (c_i_1
));
15589 ASSERT_TRUE (integer_nonzerop (c_i_m1
));
15590 ASSERT_FALSE (integer_nonzerop (c_f_0
));
15591 ASSERT_FALSE (integer_nonzerop (c_f_1
));
15592 ASSERT_FALSE (integer_nonzerop (c_f_m1
));
15594 /* Test real_zerop. */
15595 ASSERT_FALSE (real_zerop (i_0
));
15596 ASSERT_FALSE (real_zerop (wr_i_0
));
15597 ASSERT_FALSE (real_zerop (i_1
));
15598 ASSERT_FALSE (real_zerop (wr_i_1
));
15599 ASSERT_FALSE (real_zerop (i_m1
));
15600 ASSERT_FALSE (real_zerop (wr_i_m1
));
15601 ASSERT_TRUE (real_zerop (f_0
));
15602 ASSERT_TRUE (real_zerop (wr_f_0
));
15603 ASSERT_FALSE (real_zerop (f_1
));
15604 ASSERT_FALSE (real_zerop (wr_f_1
));
15605 ASSERT_FALSE (real_zerop (f_m1
));
15606 ASSERT_FALSE (real_zerop (wr_f_m1
));
15607 ASSERT_FALSE (real_zerop (c_i_0
));
15608 ASSERT_FALSE (real_zerop (c_i_1
));
15609 ASSERT_FALSE (real_zerop (c_i_m1
));
15610 ASSERT_TRUE (real_zerop (c_f_0
));
15611 ASSERT_FALSE (real_zerop (c_f_1
));
15612 ASSERT_FALSE (real_zerop (c_f_m1
));
15614 /* Test real_onep. */
15615 ASSERT_FALSE (real_onep (i_0
));
15616 ASSERT_FALSE (real_onep (wr_i_0
));
15617 ASSERT_FALSE (real_onep (i_1
));
15618 ASSERT_FALSE (real_onep (wr_i_1
));
15619 ASSERT_FALSE (real_onep (i_m1
));
15620 ASSERT_FALSE (real_onep (wr_i_m1
));
15621 ASSERT_FALSE (real_onep (f_0
));
15622 ASSERT_FALSE (real_onep (wr_f_0
));
15623 ASSERT_TRUE (real_onep (f_1
));
15624 ASSERT_TRUE (real_onep (wr_f_1
));
15625 ASSERT_FALSE (real_onep (f_m1
));
15626 ASSERT_FALSE (real_onep (wr_f_m1
));
15627 ASSERT_FALSE (real_onep (c_i_0
));
15628 ASSERT_FALSE (real_onep (c_i_1
));
15629 ASSERT_FALSE (real_onep (c_i_m1
));
15630 ASSERT_FALSE (real_onep (c_f_0
));
15631 ASSERT_TRUE (real_onep (c_f_1
));
15632 ASSERT_FALSE (real_onep (c_f_m1
));
15634 /* Test real_minus_onep. */
15635 ASSERT_FALSE (real_minus_onep (i_0
));
15636 ASSERT_FALSE (real_minus_onep (wr_i_0
));
15637 ASSERT_FALSE (real_minus_onep (i_1
));
15638 ASSERT_FALSE (real_minus_onep (wr_i_1
));
15639 ASSERT_FALSE (real_minus_onep (i_m1
));
15640 ASSERT_FALSE (real_minus_onep (wr_i_m1
));
15641 ASSERT_FALSE (real_minus_onep (f_0
));
15642 ASSERT_FALSE (real_minus_onep (wr_f_0
));
15643 ASSERT_FALSE (real_minus_onep (f_1
));
15644 ASSERT_FALSE (real_minus_onep (wr_f_1
));
15645 ASSERT_TRUE (real_minus_onep (f_m1
));
15646 ASSERT_TRUE (real_minus_onep (wr_f_m1
));
15647 ASSERT_FALSE (real_minus_onep (c_i_0
));
15648 ASSERT_FALSE (real_minus_onep (c_i_1
));
15649 ASSERT_FALSE (real_minus_onep (c_i_m1
));
15650 ASSERT_FALSE (real_minus_onep (c_f_0
));
15651 ASSERT_FALSE (real_minus_onep (c_f_1
));
15652 ASSERT_TRUE (real_minus_onep (c_f_m1
));
15655 ASSERT_TRUE (zerop (i_0
));
15656 ASSERT_TRUE (zerop (wr_i_0
));
15657 ASSERT_FALSE (zerop (i_1
));
15658 ASSERT_FALSE (zerop (wr_i_1
));
15659 ASSERT_FALSE (zerop (i_m1
));
15660 ASSERT_FALSE (zerop (wr_i_m1
));
15661 ASSERT_TRUE (zerop (f_0
));
15662 ASSERT_TRUE (zerop (wr_f_0
));
15663 ASSERT_FALSE (zerop (f_1
));
15664 ASSERT_FALSE (zerop (wr_f_1
));
15665 ASSERT_FALSE (zerop (f_m1
));
15666 ASSERT_FALSE (zerop (wr_f_m1
));
15667 ASSERT_TRUE (zerop (c_i_0
));
15668 ASSERT_FALSE (zerop (c_i_1
));
15669 ASSERT_FALSE (zerop (c_i_m1
));
15670 ASSERT_TRUE (zerop (c_f_0
));
15671 ASSERT_FALSE (zerop (c_f_1
));
15672 ASSERT_FALSE (zerop (c_f_m1
));
15674 /* Test tree_expr_nonnegative_p. */
15675 ASSERT_TRUE (tree_expr_nonnegative_p (i_0
));
15676 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0
));
15677 ASSERT_TRUE (tree_expr_nonnegative_p (i_1
));
15678 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1
));
15679 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1
));
15680 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1
));
15681 ASSERT_TRUE (tree_expr_nonnegative_p (f_0
));
15682 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0
));
15683 ASSERT_TRUE (tree_expr_nonnegative_p (f_1
));
15684 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1
));
15685 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1
));
15686 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1
));
15687 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0
));
15688 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1
));
15689 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1
));
15690 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0
));
15691 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1
));
15692 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1
));
15694 /* Test tree_expr_nonzero_p. */
15695 ASSERT_FALSE (tree_expr_nonzero_p (i_0
));
15696 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0
));
15697 ASSERT_TRUE (tree_expr_nonzero_p (i_1
));
15698 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1
));
15699 ASSERT_TRUE (tree_expr_nonzero_p (i_m1
));
15700 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1
));
15702 /* Test integer_valued_real_p. */
15703 ASSERT_FALSE (integer_valued_real_p (i_0
));
15704 ASSERT_TRUE (integer_valued_real_p (f_0
));
15705 ASSERT_TRUE (integer_valued_real_p (wr_f_0
));
15706 ASSERT_TRUE (integer_valued_real_p (f_1
));
15707 ASSERT_TRUE (integer_valued_real_p (wr_f_1
));
15709 /* Test integer_pow2p. */
15710 ASSERT_FALSE (integer_pow2p (i_0
));
15711 ASSERT_TRUE (integer_pow2p (i_1
));
15712 ASSERT_TRUE (integer_pow2p (wr_i_1
));
15714 /* Test uniform_integer_cst_p. */
15715 ASSERT_TRUE (uniform_integer_cst_p (i_0
));
15716 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0
));
15717 ASSERT_TRUE (uniform_integer_cst_p (i_1
));
15718 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1
));
15719 ASSERT_TRUE (uniform_integer_cst_p (i_m1
));
15720 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1
));
15721 ASSERT_FALSE (uniform_integer_cst_p (f_0
));
15722 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0
));
15723 ASSERT_FALSE (uniform_integer_cst_p (f_1
));
15724 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1
));
15725 ASSERT_FALSE (uniform_integer_cst_p (f_m1
));
15726 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1
));
15727 ASSERT_FALSE (uniform_integer_cst_p (c_i_0
));
15728 ASSERT_FALSE (uniform_integer_cst_p (c_i_1
));
15729 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1
));
15730 ASSERT_FALSE (uniform_integer_cst_p (c_f_0
));
15731 ASSERT_FALSE (uniform_integer_cst_p (c_f_1
));
15732 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1
));
15735 /* Check that string escaping works correctly. */
15738 test_escaped_strings (void)
15741 escaped_string msg
;
15744 /* ASSERT_STREQ does not accept NULL as a valid test
15745 result, so we have to use ASSERT_EQ instead. */
15746 ASSERT_EQ (NULL
, (const char *) msg
);
15749 ASSERT_STREQ ("", (const char *) msg
);
15751 msg
.escape ("foobar");
15752 ASSERT_STREQ ("foobar", (const char *) msg
);
15754 /* Ensure that we have -fmessage-length set to 0. */
15755 saved_cutoff
= pp_line_cutoff (global_dc
->printer
);
15756 pp_line_cutoff (global_dc
->printer
) = 0;
15758 msg
.escape ("foo\nbar");
15759 ASSERT_STREQ ("foo\\nbar", (const char *) msg
);
15761 msg
.escape ("\a\b\f\n\r\t\v");
15762 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg
);
15764 /* Now repeat the tests with -fmessage-length set to 5. */
15765 pp_line_cutoff (global_dc
->printer
) = 5;
15767 /* Note that the newline is not translated into an escape. */
15768 msg
.escape ("foo\nbar");
15769 ASSERT_STREQ ("foo\nbar", (const char *) msg
);
15771 msg
.escape ("\a\b\f\n\r\t\v");
15772 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg
);
15774 /* Restore the original message length setting. */
15775 pp_line_cutoff (global_dc
->printer
) = saved_cutoff
;
15778 /* Run all of the selftests within this file. */
15783 test_integer_constants ();
15784 test_identifiers ();
15786 test_vector_cst_patterns ();
15787 test_location_wrappers ();
15788 test_predicates ();
15789 test_escaped_strings ();
15792 } // namespace selftest
15794 #endif /* CHECKING_P */
15796 #include "gt-tree.h"