1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
32 #include "coretypes.h"
37 #include "tree-pass.h"
40 #include "diagnostic.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
47 #include "toplev.h" /* get_random_seed */
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
57 #include "langhooks-def.h"
58 #include "tree-diagnostic.h"
61 #include "print-tree.h"
62 #include "ipa-utils.h"
64 #include "stringpool.h"
68 #include "tree-vector-builder.h"
69 #include "gimple-fold.h"
70 #include "escaped_string.h"
71 #include "gimple-range.h"
73 /* Tree code classes. */
75 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
76 #define END_OF_BASE_TREE_CODES tcc_exceptional,
78 const enum tree_code_class tree_code_type
[] = {
79 #include "all-tree.def"
83 #undef END_OF_BASE_TREE_CODES
85 /* Table indexed by tree code giving number of expression
86 operands beyond the fixed part of the node structure.
87 Not used for types or decls. */
89 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
90 #define END_OF_BASE_TREE_CODES 0,
92 const unsigned char tree_code_length
[] = {
93 #include "all-tree.def"
97 #undef END_OF_BASE_TREE_CODES
99 /* Names of tree components.
100 Used for printing out the tree and error messages. */
101 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
102 #define END_OF_BASE_TREE_CODES "@dummy",
104 static const char *const tree_code_name
[] = {
105 #include "all-tree.def"
109 #undef END_OF_BASE_TREE_CODES
111 /* Each tree code class has an associated string representation.
112 These must correspond to the tree_code_class entries. */
114 const char *const tree_code_class_strings
[] =
129 /* obstack.[ch] explicitly declined to prototype this. */
130 extern int _obstack_allocated_p (struct obstack
*h
, void *obj
);
132 /* Statistics-gathering stuff. */
134 static uint64_t tree_code_counts
[MAX_TREE_CODES
];
135 uint64_t tree_node_counts
[(int) all_kinds
];
136 uint64_t tree_node_sizes
[(int) all_kinds
];
138 /* Keep in sync with tree.h:enum tree_node_kind. */
139 static const char * const tree_node_kind_names
[] = {
158 /* Unique id for next decl created. */
159 static GTY(()) int next_decl_uid
;
160 /* Unique id for next type created. */
161 static GTY(()) unsigned next_type_uid
= 1;
162 /* Unique id for next debug decl created. Use negative numbers,
163 to catch erroneous uses. */
164 static GTY(()) int next_debug_decl_uid
;
166 /* Since we cannot rehash a type after it is in the table, we have to
167 keep the hash code. */
169 struct GTY((for_user
)) type_hash
{
174 /* Initial size of the hash table (rounded to next prime). */
175 #define TYPE_HASH_INITIAL_SIZE 1000
177 struct type_cache_hasher
: ggc_cache_ptr_hash
<type_hash
>
179 static hashval_t
hash (type_hash
*t
) { return t
->hash
; }
180 static bool equal (type_hash
*a
, type_hash
*b
);
183 keep_cache_entry (type_hash
*&t
)
185 return ggc_marked_p (t
->type
);
189 /* Now here is the hash table. When recording a type, it is added to
190 the slot whose index is the hash code. Note that the hash table is
191 used for several kinds of types (function types, array types and
192 array index range types, for now). While all these live in the
193 same table, they are completely independent, and the hash code is
194 computed differently for each of these. */
196 static GTY ((cache
)) hash_table
<type_cache_hasher
> *type_hash_table
;
198 /* Hash table and temporary node for larger integer const values. */
199 static GTY (()) tree int_cst_node
;
201 struct int_cst_hasher
: ggc_cache_ptr_hash
<tree_node
>
203 static hashval_t
hash (tree t
);
204 static bool equal (tree x
, tree y
);
207 static GTY ((cache
)) hash_table
<int_cst_hasher
> *int_cst_hash_table
;
209 /* Class and variable for making sure that there is a single POLY_INT_CST
210 for a given value. */
211 struct poly_int_cst_hasher
: ggc_cache_ptr_hash
<tree_node
>
213 typedef std::pair
<tree
, const poly_wide_int
*> compare_type
;
214 static hashval_t
hash (tree t
);
215 static bool equal (tree x
, const compare_type
&y
);
218 static GTY ((cache
)) hash_table
<poly_int_cst_hasher
> *poly_int_cst_hash_table
;
220 /* Hash table for optimization flags and target option flags. Use the same
221 hash table for both sets of options. Nodes for building the current
222 optimization and target option nodes. The assumption is most of the time
223 the options created will already be in the hash table, so we avoid
224 allocating and freeing up a node repeatably. */
225 static GTY (()) tree cl_optimization_node
;
226 static GTY (()) tree cl_target_option_node
;
228 struct cl_option_hasher
: ggc_cache_ptr_hash
<tree_node
>
230 static hashval_t
hash (tree t
);
231 static bool equal (tree x
, tree y
);
234 static GTY ((cache
)) hash_table
<cl_option_hasher
> *cl_option_hash_table
;
236 /* General tree->tree mapping structure for use in hash tables. */
240 hash_table
<tree_decl_map_cache_hasher
> *debug_expr_for_decl
;
243 hash_table
<tree_decl_map_cache_hasher
> *value_expr_for_decl
;
245 struct tree_vec_map_cache_hasher
: ggc_cache_ptr_hash
<tree_vec_map
>
247 static hashval_t
hash (tree_vec_map
*m
) { return DECL_UID (m
->base
.from
); }
250 equal (tree_vec_map
*a
, tree_vec_map
*b
)
252 return a
->base
.from
== b
->base
.from
;
256 keep_cache_entry (tree_vec_map
*&m
)
258 return ggc_marked_p (m
->base
.from
);
263 hash_table
<tree_vec_map_cache_hasher
> *debug_args_for_decl
;
265 static void set_type_quals (tree
, int);
266 static void print_type_hash_statistics (void);
267 static void print_debug_expr_statistics (void);
268 static void print_value_expr_statistics (void);
270 tree global_trees
[TI_MAX
];
271 tree integer_types
[itk_none
];
273 bool int_n_enabled_p
[NUM_INT_N_ENTS
];
274 struct int_n_trees_t int_n_trees
[NUM_INT_N_ENTS
];
276 bool tree_contains_struct
[MAX_TREE_CODES
][64];
278 /* Number of operands for each OMP clause. */
279 unsigned const char omp_clause_num_ops
[] =
281 0, /* OMP_CLAUSE_ERROR */
282 1, /* OMP_CLAUSE_PRIVATE */
283 1, /* OMP_CLAUSE_SHARED */
284 1, /* OMP_CLAUSE_FIRSTPRIVATE */
285 2, /* OMP_CLAUSE_LASTPRIVATE */
286 5, /* OMP_CLAUSE_REDUCTION */
287 5, /* OMP_CLAUSE_TASK_REDUCTION */
288 5, /* OMP_CLAUSE_IN_REDUCTION */
289 1, /* OMP_CLAUSE_COPYIN */
290 1, /* OMP_CLAUSE_COPYPRIVATE */
291 3, /* OMP_CLAUSE_LINEAR */
292 1, /* OMP_CLAUSE_AFFINITY */
293 2, /* OMP_CLAUSE_ALIGNED */
294 3, /* OMP_CLAUSE_ALLOCATE */
295 1, /* OMP_CLAUSE_DEPEND */
296 1, /* OMP_CLAUSE_NONTEMPORAL */
297 1, /* OMP_CLAUSE_UNIFORM */
298 1, /* OMP_CLAUSE_TO_DECLARE */
299 1, /* OMP_CLAUSE_LINK */
300 1, /* OMP_CLAUSE_DETACH */
301 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
302 1, /* OMP_CLAUSE_USE_DEVICE_ADDR */
303 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
304 1, /* OMP_CLAUSE_INCLUSIVE */
305 1, /* OMP_CLAUSE_EXCLUSIVE */
306 2, /* OMP_CLAUSE_FROM */
307 2, /* OMP_CLAUSE_TO */
308 2, /* OMP_CLAUSE_MAP */
309 2, /* OMP_CLAUSE__CACHE_ */
310 2, /* OMP_CLAUSE_GANG */
311 1, /* OMP_CLAUSE_ASYNC */
312 1, /* OMP_CLAUSE_WAIT */
313 0, /* OMP_CLAUSE_AUTO */
314 0, /* OMP_CLAUSE_SEQ */
315 1, /* OMP_CLAUSE__LOOPTEMP_ */
316 1, /* OMP_CLAUSE__REDUCTEMP_ */
317 1, /* OMP_CLAUSE__CONDTEMP_ */
318 1, /* OMP_CLAUSE__SCANTEMP_ */
319 1, /* OMP_CLAUSE_IF */
320 1, /* OMP_CLAUSE_NUM_THREADS */
321 1, /* OMP_CLAUSE_SCHEDULE */
322 0, /* OMP_CLAUSE_NOWAIT */
323 1, /* OMP_CLAUSE_ORDERED */
324 0, /* OMP_CLAUSE_DEFAULT */
325 3, /* OMP_CLAUSE_COLLAPSE */
326 0, /* OMP_CLAUSE_UNTIED */
327 1, /* OMP_CLAUSE_FINAL */
328 0, /* OMP_CLAUSE_MERGEABLE */
329 1, /* OMP_CLAUSE_DEVICE */
330 1, /* OMP_CLAUSE_DIST_SCHEDULE */
331 0, /* OMP_CLAUSE_INBRANCH */
332 0, /* OMP_CLAUSE_NOTINBRANCH */
333 1, /* OMP_CLAUSE_NUM_TEAMS */
334 1, /* OMP_CLAUSE_THREAD_LIMIT */
335 0, /* OMP_CLAUSE_PROC_BIND */
336 1, /* OMP_CLAUSE_SAFELEN */
337 1, /* OMP_CLAUSE_SIMDLEN */
338 0, /* OMP_CLAUSE_DEVICE_TYPE */
339 0, /* OMP_CLAUSE_FOR */
340 0, /* OMP_CLAUSE_PARALLEL */
341 0, /* OMP_CLAUSE_SECTIONS */
342 0, /* OMP_CLAUSE_TASKGROUP */
343 1, /* OMP_CLAUSE_PRIORITY */
344 1, /* OMP_CLAUSE_GRAINSIZE */
345 1, /* OMP_CLAUSE_NUM_TASKS */
346 0, /* OMP_CLAUSE_NOGROUP */
347 0, /* OMP_CLAUSE_THREADS */
348 0, /* OMP_CLAUSE_SIMD */
349 1, /* OMP_CLAUSE_HINT */
350 0, /* OMP_CLAUSE_DEFAULTMAP */
351 0, /* OMP_CLAUSE_ORDER */
352 0, /* OMP_CLAUSE_BIND */
353 1, /* OMP_CLAUSE_FILTER */
354 1, /* OMP_CLAUSE__SIMDUID_ */
355 0, /* OMP_CLAUSE__SIMT_ */
356 0, /* OMP_CLAUSE_INDEPENDENT */
357 1, /* OMP_CLAUSE_WORKER */
358 1, /* OMP_CLAUSE_VECTOR */
359 1, /* OMP_CLAUSE_NUM_GANGS */
360 1, /* OMP_CLAUSE_NUM_WORKERS */
361 1, /* OMP_CLAUSE_VECTOR_LENGTH */
362 3, /* OMP_CLAUSE_TILE */
363 0, /* OMP_CLAUSE_IF_PRESENT */
364 0, /* OMP_CLAUSE_FINALIZE */
365 0, /* OMP_CLAUSE_NOHOST */
368 const char * const omp_clause_code_name
[] =
458 /* Return the tree node structure used by tree code CODE. */
460 static inline enum tree_node_structure_enum
461 tree_node_structure_for_code (enum tree_code code
)
463 switch (TREE_CODE_CLASS (code
))
465 case tcc_declaration
:
468 case CONST_DECL
: return TS_CONST_DECL
;
469 case DEBUG_EXPR_DECL
: return TS_DECL_WRTL
;
470 case FIELD_DECL
: return TS_FIELD_DECL
;
471 case FUNCTION_DECL
: return TS_FUNCTION_DECL
;
472 case LABEL_DECL
: return TS_LABEL_DECL
;
473 case PARM_DECL
: return TS_PARM_DECL
;
474 case RESULT_DECL
: return TS_RESULT_DECL
;
475 case TRANSLATION_UNIT_DECL
: return TS_TRANSLATION_UNIT_DECL
;
476 case TYPE_DECL
: return TS_TYPE_DECL
;
477 case VAR_DECL
: return TS_VAR_DECL
;
478 default: return TS_DECL_NON_COMMON
;
481 case tcc_type
: return TS_TYPE_NON_COMMON
;
489 case tcc_vl_exp
: return TS_EXP
;
491 default: /* tcc_constant and tcc_exceptional */
497 /* tcc_constant cases. */
498 case COMPLEX_CST
: return TS_COMPLEX
;
499 case FIXED_CST
: return TS_FIXED_CST
;
500 case INTEGER_CST
: return TS_INT_CST
;
501 case POLY_INT_CST
: return TS_POLY_INT_CST
;
502 case REAL_CST
: return TS_REAL_CST
;
503 case STRING_CST
: return TS_STRING
;
504 case VECTOR_CST
: return TS_VECTOR
;
505 case VOID_CST
: return TS_TYPED
;
507 /* tcc_exceptional cases. */
508 case BLOCK
: return TS_BLOCK
;
509 case CONSTRUCTOR
: return TS_CONSTRUCTOR
;
510 case ERROR_MARK
: return TS_COMMON
;
511 case IDENTIFIER_NODE
: return TS_IDENTIFIER
;
512 case OMP_CLAUSE
: return TS_OMP_CLAUSE
;
513 case OPTIMIZATION_NODE
: return TS_OPTIMIZATION
;
514 case PLACEHOLDER_EXPR
: return TS_COMMON
;
515 case SSA_NAME
: return TS_SSA_NAME
;
516 case STATEMENT_LIST
: return TS_STATEMENT_LIST
;
517 case TARGET_OPTION_NODE
: return TS_TARGET_OPTION
;
518 case TREE_BINFO
: return TS_BINFO
;
519 case TREE_LIST
: return TS_LIST
;
520 case TREE_VEC
: return TS_VEC
;
528 /* Initialize tree_contains_struct to describe the hierarchy of tree
532 initialize_tree_contains_struct (void)
536 for (i
= ERROR_MARK
; i
< LAST_AND_UNUSED_TREE_CODE
; i
++)
539 enum tree_node_structure_enum ts_code
;
541 code
= (enum tree_code
) i
;
542 ts_code
= tree_node_structure_for_code (code
);
544 /* Mark the TS structure itself. */
545 tree_contains_struct
[code
][ts_code
] = 1;
547 /* Mark all the structures that TS is derived from. */
552 case TS_OPTIMIZATION
:
553 case TS_TARGET_OPTION
:
559 case TS_POLY_INT_CST
:
568 case TS_STATEMENT_LIST
:
569 MARK_TS_TYPED (code
);
573 case TS_DECL_MINIMAL
:
579 MARK_TS_COMMON (code
);
582 case TS_TYPE_WITH_LANG_SPECIFIC
:
583 MARK_TS_TYPE_COMMON (code
);
586 case TS_TYPE_NON_COMMON
:
587 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code
);
591 MARK_TS_DECL_MINIMAL (code
);
596 MARK_TS_DECL_COMMON (code
);
599 case TS_DECL_NON_COMMON
:
600 MARK_TS_DECL_WITH_VIS (code
);
603 case TS_DECL_WITH_VIS
:
607 MARK_TS_DECL_WRTL (code
);
611 MARK_TS_DECL_COMMON (code
);
615 MARK_TS_DECL_WITH_VIS (code
);
619 case TS_FUNCTION_DECL
:
620 MARK_TS_DECL_NON_COMMON (code
);
623 case TS_TRANSLATION_UNIT_DECL
:
624 MARK_TS_DECL_COMMON (code
);
632 /* Basic consistency checks for attributes used in fold. */
633 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_NON_COMMON
]);
634 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_NON_COMMON
]);
635 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_DECL_COMMON
]);
636 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_COMMON
]);
637 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_COMMON
]);
638 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_COMMON
]);
639 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_COMMON
]);
640 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_COMMON
]);
641 gcc_assert (tree_contains_struct
[TRANSLATION_UNIT_DECL
][TS_DECL_COMMON
]);
642 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_COMMON
]);
643 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_DECL_COMMON
]);
644 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_WRTL
]);
645 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_WRTL
]);
646 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_WRTL
]);
647 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_WRTL
]);
648 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_WRTL
]);
649 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_DECL_MINIMAL
]);
650 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_MINIMAL
]);
651 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_MINIMAL
]);
652 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_MINIMAL
]);
653 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_MINIMAL
]);
654 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_MINIMAL
]);
655 gcc_assert (tree_contains_struct
[TRANSLATION_UNIT_DECL
][TS_DECL_MINIMAL
]);
656 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_MINIMAL
]);
657 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_DECL_MINIMAL
]);
658 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_WITH_VIS
]);
659 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_WITH_VIS
]);
660 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_WITH_VIS
]);
661 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_VAR_DECL
]);
662 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_FIELD_DECL
]);
663 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_PARM_DECL
]);
664 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_LABEL_DECL
]);
665 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_RESULT_DECL
]);
666 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_CONST_DECL
]);
667 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_TYPE_DECL
]);
668 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_FUNCTION_DECL
]);
669 gcc_assert (tree_contains_struct
[IMPORTED_DECL
][TS_DECL_MINIMAL
]);
670 gcc_assert (tree_contains_struct
[IMPORTED_DECL
][TS_DECL_COMMON
]);
671 gcc_assert (tree_contains_struct
[NAMELIST_DECL
][TS_DECL_MINIMAL
]);
672 gcc_assert (tree_contains_struct
[NAMELIST_DECL
][TS_DECL_COMMON
]);
681 /* Initialize the hash table of types. */
683 = hash_table
<type_cache_hasher
>::create_ggc (TYPE_HASH_INITIAL_SIZE
);
686 = hash_table
<tree_decl_map_cache_hasher
>::create_ggc (512);
689 = hash_table
<tree_decl_map_cache_hasher
>::create_ggc (512);
691 int_cst_hash_table
= hash_table
<int_cst_hasher
>::create_ggc (1024);
693 poly_int_cst_hash_table
= hash_table
<poly_int_cst_hasher
>::create_ggc (64);
695 int_cst_node
= make_int_cst (1, 1);
697 cl_option_hash_table
= hash_table
<cl_option_hasher
>::create_ggc (64);
699 cl_optimization_node
= make_node (OPTIMIZATION_NODE
);
700 cl_target_option_node
= make_node (TARGET_OPTION_NODE
);
702 /* Initialize the tree_contains_struct array. */
703 initialize_tree_contains_struct ();
704 lang_hooks
.init_ts ();
708 /* The name of the object as the assembler will see it (but before any
709 translations made by ASM_OUTPUT_LABELREF). Often this is the same
710 as DECL_NAME. It is an IDENTIFIER_NODE. */
712 decl_assembler_name (tree decl
)
714 if (!DECL_ASSEMBLER_NAME_SET_P (decl
))
715 lang_hooks
.set_decl_assembler_name (decl
);
716 return DECL_ASSEMBLER_NAME_RAW (decl
);
719 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
720 (either of which may be NULL). Inform the FE, if this changes the
724 overwrite_decl_assembler_name (tree decl
, tree name
)
726 if (DECL_ASSEMBLER_NAME_RAW (decl
) != name
)
727 lang_hooks
.overwrite_decl_assembler_name (decl
, name
);
730 /* Return true if DECL may need an assembler name to be set. */
733 need_assembler_name_p (tree decl
)
735 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
736 Rule merging. This makes type_odr_p to return true on those types during
737 LTO and by comparing the mangled name, we can say what types are intended
738 to be equivalent across compilation unit.
740 We do not store names of type_in_anonymous_namespace_p.
742 Record, union and enumeration type have linkage that allows use
743 to check type_in_anonymous_namespace_p. We do not mangle compound types
744 that always can be compared structurally.
746 Similarly for builtin types, we compare properties of their main variant.
747 A special case are integer types where mangling do make differences
748 between char/signed char/unsigned char etc. Storing name for these makes
749 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
750 See cp/mangle.c:write_builtin_type for details. */
752 if (TREE_CODE (decl
) == TYPE_DECL
)
755 && decl
== TYPE_NAME (TREE_TYPE (decl
))
756 && TYPE_MAIN_VARIANT (TREE_TYPE (decl
)) == TREE_TYPE (decl
)
757 && !TYPE_ARTIFICIAL (TREE_TYPE (decl
))
758 && ((TREE_CODE (TREE_TYPE (decl
)) != RECORD_TYPE
759 && TREE_CODE (TREE_TYPE (decl
)) != UNION_TYPE
)
760 || TYPE_CXX_ODR_P (TREE_TYPE (decl
)))
761 && (type_with_linkage_p (TREE_TYPE (decl
))
762 || TREE_CODE (TREE_TYPE (decl
)) == INTEGER_TYPE
)
763 && !variably_modified_type_p (TREE_TYPE (decl
), NULL_TREE
))
764 return !DECL_ASSEMBLER_NAME_SET_P (decl
);
767 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
768 if (!VAR_OR_FUNCTION_DECL_P (decl
))
771 /* If DECL already has its assembler name set, it does not need a
773 if (!HAS_DECL_ASSEMBLER_NAME_P (decl
)
774 || DECL_ASSEMBLER_NAME_SET_P (decl
))
777 /* Abstract decls do not need an assembler name. */
778 if (DECL_ABSTRACT_P (decl
))
781 /* For VAR_DECLs, only static, public and external symbols need an
784 && !TREE_STATIC (decl
)
785 && !TREE_PUBLIC (decl
)
786 && !DECL_EXTERNAL (decl
))
789 if (TREE_CODE (decl
) == FUNCTION_DECL
)
791 /* Do not set assembler name on builtins. Allow RTL expansion to
792 decide whether to expand inline or via a regular call. */
793 if (fndecl_built_in_p (decl
)
794 && DECL_BUILT_IN_CLASS (decl
) != BUILT_IN_FRONTEND
)
797 /* Functions represented in the callgraph need an assembler name. */
798 if (cgraph_node::get (decl
) != NULL
)
801 /* Unused and not public functions don't need an assembler name. */
802 if (!TREE_USED (decl
) && !TREE_PUBLIC (decl
))
809 /* If T needs an assembler name, have one created for it. */
812 assign_assembler_name_if_needed (tree t
)
814 if (need_assembler_name_p (t
))
816 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
817 diagnostics that use input_location to show locus
818 information. The problem here is that, at this point,
819 input_location is generally anchored to the end of the file
820 (since the parser is long gone), so we don't have a good
821 position to pin it to.
823 To alleviate this problem, this uses the location of T's
824 declaration. Examples of this are
825 testsuite/g++.dg/template/cond2.C and
826 testsuite/g++.dg/template/pr35240.C. */
827 location_t saved_location
= input_location
;
828 input_location
= DECL_SOURCE_LOCATION (t
);
830 decl_assembler_name (t
);
832 input_location
= saved_location
;
836 /* When the target supports COMDAT groups, this indicates which group the
837 DECL is associated with. This can be either an IDENTIFIER_NODE or a
838 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
840 decl_comdat_group (const_tree node
)
842 struct symtab_node
*snode
= symtab_node::get (node
);
845 return snode
->get_comdat_group ();
848 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
850 decl_comdat_group_id (const_tree node
)
852 struct symtab_node
*snode
= symtab_node::get (node
);
855 return snode
->get_comdat_group_id ();
858 /* When the target supports named section, return its name as IDENTIFIER_NODE
859 or NULL if it is in no section. */
861 decl_section_name (const_tree node
)
863 struct symtab_node
*snode
= symtab_node::get (node
);
866 return snode
->get_section ();
869 /* Set section name of NODE to VALUE (that is expected to be
872 set_decl_section_name (tree node
, const char *value
)
874 struct symtab_node
*snode
;
878 snode
= symtab_node::get (node
);
882 else if (VAR_P (node
))
883 snode
= varpool_node::get_create (node
);
885 snode
= cgraph_node::get_create (node
);
886 snode
->set_section (value
);
889 /* Set section name of NODE to match the section name of OTHER.
891 set_decl_section_name (decl, other) is equivalent to
892 set_decl_section_name (decl, DECL_SECTION_NAME (other)), but possibly more
895 set_decl_section_name (tree decl
, const_tree other
)
897 struct symtab_node
*other_node
= symtab_node::get (other
);
900 struct symtab_node
*decl_node
;
902 decl_node
= varpool_node::get_create (decl
);
904 decl_node
= cgraph_node::get_create (decl
);
905 decl_node
->set_section (*other_node
);
909 struct symtab_node
*decl_node
= symtab_node::get (decl
);
912 decl_node
->set_section (NULL
);
916 /* Return TLS model of a variable NODE. */
918 decl_tls_model (const_tree node
)
920 struct varpool_node
*snode
= varpool_node::get (node
);
922 return TLS_MODEL_NONE
;
923 return snode
->tls_model
;
926 /* Set TLS model of variable NODE to MODEL. */
928 set_decl_tls_model (tree node
, enum tls_model model
)
930 struct varpool_node
*vnode
;
932 if (model
== TLS_MODEL_NONE
)
934 vnode
= varpool_node::get (node
);
939 vnode
= varpool_node::get_create (node
);
940 vnode
->tls_model
= model
;
943 /* Compute the number of bytes occupied by a tree with code CODE.
944 This function cannot be used for nodes that have variable sizes,
945 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
947 tree_code_size (enum tree_code code
)
949 switch (TREE_CODE_CLASS (code
))
951 case tcc_declaration
: /* A decl node */
954 case FIELD_DECL
: return sizeof (tree_field_decl
);
955 case PARM_DECL
: return sizeof (tree_parm_decl
);
956 case VAR_DECL
: return sizeof (tree_var_decl
);
957 case LABEL_DECL
: return sizeof (tree_label_decl
);
958 case RESULT_DECL
: return sizeof (tree_result_decl
);
959 case CONST_DECL
: return sizeof (tree_const_decl
);
960 case TYPE_DECL
: return sizeof (tree_type_decl
);
961 case FUNCTION_DECL
: return sizeof (tree_function_decl
);
962 case DEBUG_EXPR_DECL
: return sizeof (tree_decl_with_rtl
);
963 case TRANSLATION_UNIT_DECL
: return sizeof (tree_translation_unit_decl
);
966 case NAMELIST_DECL
: return sizeof (tree_decl_non_common
);
968 gcc_checking_assert (code
>= NUM_TREE_CODES
);
969 return lang_hooks
.tree_size (code
);
972 case tcc_type
: /* a type node */
984 case FIXED_POINT_TYPE
:
990 case QUAL_UNION_TYPE
:
994 case LANG_TYPE
: return sizeof (tree_type_non_common
);
996 gcc_checking_assert (code
>= NUM_TREE_CODES
);
997 return lang_hooks
.tree_size (code
);
1000 case tcc_reference
: /* a reference */
1001 case tcc_expression
: /* an expression */
1002 case tcc_statement
: /* an expression with side effects */
1003 case tcc_comparison
: /* a comparison expression */
1004 case tcc_unary
: /* a unary arithmetic expression */
1005 case tcc_binary
: /* a binary arithmetic expression */
1006 return (sizeof (struct tree_exp
)
1007 + (TREE_CODE_LENGTH (code
) - 1) * sizeof (tree
));
1009 case tcc_constant
: /* a constant */
1012 case VOID_CST
: return sizeof (tree_typed
);
1013 case INTEGER_CST
: gcc_unreachable ();
1014 case POLY_INT_CST
: return sizeof (tree_poly_int_cst
);
1015 case REAL_CST
: return sizeof (tree_real_cst
);
1016 case FIXED_CST
: return sizeof (tree_fixed_cst
);
1017 case COMPLEX_CST
: return sizeof (tree_complex
);
1018 case VECTOR_CST
: gcc_unreachable ();
1019 case STRING_CST
: gcc_unreachable ();
1021 gcc_checking_assert (code
>= NUM_TREE_CODES
);
1022 return lang_hooks
.tree_size (code
);
1025 case tcc_exceptional
: /* something random, like an identifier. */
1028 case IDENTIFIER_NODE
: return lang_hooks
.identifier_size
;
1029 case TREE_LIST
: return sizeof (tree_list
);
1032 case PLACEHOLDER_EXPR
: return sizeof (tree_common
);
1034 case TREE_VEC
: gcc_unreachable ();
1035 case OMP_CLAUSE
: gcc_unreachable ();
1037 case SSA_NAME
: return sizeof (tree_ssa_name
);
1039 case STATEMENT_LIST
: return sizeof (tree_statement_list
);
1040 case BLOCK
: return sizeof (struct tree_block
);
1041 case CONSTRUCTOR
: return sizeof (tree_constructor
);
1042 case OPTIMIZATION_NODE
: return sizeof (tree_optimization_option
);
1043 case TARGET_OPTION_NODE
: return sizeof (tree_target_option
);
1046 gcc_checking_assert (code
>= NUM_TREE_CODES
);
1047 return lang_hooks
.tree_size (code
);
1055 /* Compute the number of bytes occupied by NODE. This routine only
1056 looks at TREE_CODE, except for those nodes that have variable sizes. */
1058 tree_size (const_tree node
)
1060 const enum tree_code code
= TREE_CODE (node
);
1064 return (sizeof (struct tree_int_cst
)
1065 + (TREE_INT_CST_EXT_NUNITS (node
) - 1) * sizeof (HOST_WIDE_INT
));
1068 return (offsetof (struct tree_binfo
, base_binfos
)
1070 ::embedded_size (BINFO_N_BASE_BINFOS (node
)));
1073 return (sizeof (struct tree_vec
)
1074 + (TREE_VEC_LENGTH (node
) - 1) * sizeof (tree
));
1077 return (sizeof (struct tree_vector
)
1078 + (vector_cst_encoded_nelts (node
) - 1) * sizeof (tree
));
1081 return TREE_STRING_LENGTH (node
) + offsetof (struct tree_string
, str
) + 1;
1084 return (sizeof (struct tree_omp_clause
)
1085 + (omp_clause_num_ops
[OMP_CLAUSE_CODE (node
)] - 1)
1089 if (TREE_CODE_CLASS (code
) == tcc_vl_exp
)
1090 return (sizeof (struct tree_exp
)
1091 + (VL_EXP_OPERAND_LENGTH (node
) - 1) * sizeof (tree
));
1093 return tree_code_size (code
);
1097 /* Return tree node kind based on tree CODE. */
1099 static tree_node_kind
1100 get_stats_node_kind (enum tree_code code
)
1102 enum tree_code_class type
= TREE_CODE_CLASS (code
);
1106 case tcc_declaration
: /* A decl node */
1108 case tcc_type
: /* a type node */
1110 case tcc_statement
: /* an expression with side effects */
1112 case tcc_reference
: /* a reference */
1114 case tcc_expression
: /* an expression */
1115 case tcc_comparison
: /* a comparison expression */
1116 case tcc_unary
: /* a unary arithmetic expression */
1117 case tcc_binary
: /* a binary arithmetic expression */
1119 case tcc_constant
: /* a constant */
1121 case tcc_exceptional
: /* something random, like an identifier. */
1124 case IDENTIFIER_NODE
:
1131 return ssa_name_kind
;
1137 return omp_clause_kind
;
1149 /* Record interesting allocation statistics for a tree node with CODE
1153 record_node_allocation_statistics (enum tree_code code
, size_t length
)
1155 if (!GATHER_STATISTICS
)
1158 tree_node_kind kind
= get_stats_node_kind (code
);
1160 tree_code_counts
[(int) code
]++;
1161 tree_node_counts
[(int) kind
]++;
1162 tree_node_sizes
[(int) kind
] += length
;
1165 /* Allocate and return a new UID from the DECL_UID namespace. */
1168 allocate_decl_uid (void)
1170 return next_decl_uid
++;
1173 /* Return a newly allocated node of code CODE. For decl and type
1174 nodes, some other fields are initialized. The rest of the node is
1175 initialized to zero. This function cannot be used for TREE_VEC,
1176 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1179 Achoo! I got a code in the node. */
1182 make_node (enum tree_code code MEM_STAT_DECL
)
1185 enum tree_code_class type
= TREE_CODE_CLASS (code
);
1186 size_t length
= tree_code_size (code
);
1188 record_node_allocation_statistics (code
, length
);
1190 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
1191 TREE_SET_CODE (t
, code
);
1196 if (code
!= DEBUG_BEGIN_STMT
)
1197 TREE_SIDE_EFFECTS (t
) = 1;
1200 case tcc_declaration
:
1201 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_COMMON
))
1203 if (code
== FUNCTION_DECL
)
1205 SET_DECL_ALIGN (t
, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY
));
1206 SET_DECL_MODE (t
, FUNCTION_MODE
);
1209 SET_DECL_ALIGN (t
, 1);
1211 DECL_SOURCE_LOCATION (t
) = input_location
;
1212 if (TREE_CODE (t
) == DEBUG_EXPR_DECL
)
1213 DECL_UID (t
) = --next_debug_decl_uid
;
1216 DECL_UID (t
) = allocate_decl_uid ();
1217 SET_DECL_PT_UID (t
, -1);
1219 if (TREE_CODE (t
) == LABEL_DECL
)
1220 LABEL_DECL_UID (t
) = -1;
1225 TYPE_UID (t
) = next_type_uid
++;
1226 SET_TYPE_ALIGN (t
, BITS_PER_UNIT
);
1227 TYPE_USER_ALIGN (t
) = 0;
1228 TYPE_MAIN_VARIANT (t
) = t
;
1229 TYPE_CANONICAL (t
) = t
;
1231 /* Default to no attributes for type, but let target change that. */
1232 TYPE_ATTRIBUTES (t
) = NULL_TREE
;
1233 targetm
.set_default_type_attributes (t
);
1235 /* We have not yet computed the alias set for this type. */
1236 TYPE_ALIAS_SET (t
) = -1;
1240 TREE_CONSTANT (t
) = 1;
1243 case tcc_expression
:
1249 case PREDECREMENT_EXPR
:
1250 case PREINCREMENT_EXPR
:
1251 case POSTDECREMENT_EXPR
:
1252 case POSTINCREMENT_EXPR
:
1253 /* All of these have side-effects, no matter what their
1255 TREE_SIDE_EFFECTS (t
) = 1;
1263 case tcc_exceptional
:
1266 case TARGET_OPTION_NODE
:
1267 TREE_TARGET_OPTION(t
)
1268 = ggc_cleared_alloc
<struct cl_target_option
> ();
1271 case OPTIMIZATION_NODE
:
1272 TREE_OPTIMIZATION (t
)
1273 = ggc_cleared_alloc
<struct cl_optimization
> ();
1282 /* Other classes need no special treatment. */
1289 /* Free tree node. */
1292 free_node (tree node
)
1294 enum tree_code code
= TREE_CODE (node
);
1295 if (GATHER_STATISTICS
)
1297 enum tree_node_kind kind
= get_stats_node_kind (code
);
1299 gcc_checking_assert (tree_code_counts
[(int) TREE_CODE (node
)] != 0);
1300 gcc_checking_assert (tree_node_counts
[(int) kind
] != 0);
1301 gcc_checking_assert (tree_node_sizes
[(int) kind
] >= tree_size (node
));
1303 tree_code_counts
[(int) TREE_CODE (node
)]--;
1304 tree_node_counts
[(int) kind
]--;
1305 tree_node_sizes
[(int) kind
] -= tree_size (node
);
1307 if (CODE_CONTAINS_STRUCT (code
, TS_CONSTRUCTOR
))
1308 vec_free (CONSTRUCTOR_ELTS (node
));
1309 else if (code
== BLOCK
)
1310 vec_free (BLOCK_NONLOCALIZED_VARS (node
));
1311 else if (code
== TREE_BINFO
)
1312 vec_free (BINFO_BASE_ACCESSES (node
));
1313 else if (code
== OPTIMIZATION_NODE
)
1314 cl_optimization_option_free (TREE_OPTIMIZATION (node
));
1315 else if (code
== TARGET_OPTION_NODE
)
1316 cl_target_option_free (TREE_TARGET_OPTION (node
));
1320 /* Return a new node with the same contents as NODE except that its
1321 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1324 copy_node (tree node MEM_STAT_DECL
)
1327 enum tree_code code
= TREE_CODE (node
);
1330 gcc_assert (code
!= STATEMENT_LIST
);
1332 length
= tree_size (node
);
1333 record_node_allocation_statistics (code
, length
);
1334 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
1335 memcpy (t
, node
, length
);
1337 if (CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
1339 TREE_ASM_WRITTEN (t
) = 0;
1340 TREE_VISITED (t
) = 0;
1342 if (TREE_CODE_CLASS (code
) == tcc_declaration
)
1344 if (code
== DEBUG_EXPR_DECL
)
1345 DECL_UID (t
) = --next_debug_decl_uid
;
1348 DECL_UID (t
) = allocate_decl_uid ();
1349 if (DECL_PT_UID_SET_P (node
))
1350 SET_DECL_PT_UID (t
, DECL_PT_UID (node
));
1352 if ((TREE_CODE (node
) == PARM_DECL
|| VAR_P (node
))
1353 && DECL_HAS_VALUE_EXPR_P (node
))
1355 SET_DECL_VALUE_EXPR (t
, DECL_VALUE_EXPR (node
));
1356 DECL_HAS_VALUE_EXPR_P (t
) = 1;
1358 /* DECL_DEBUG_EXPR is copied explicitly by callers. */
1361 DECL_HAS_DEBUG_EXPR_P (t
) = 0;
1362 t
->decl_with_vis
.symtab_node
= NULL
;
1364 if (VAR_P (node
) && DECL_HAS_INIT_PRIORITY_P (node
))
1366 SET_DECL_INIT_PRIORITY (t
, DECL_INIT_PRIORITY (node
));
1367 DECL_HAS_INIT_PRIORITY_P (t
) = 1;
1369 if (TREE_CODE (node
) == FUNCTION_DECL
)
1371 DECL_STRUCT_FUNCTION (t
) = NULL
;
1372 t
->decl_with_vis
.symtab_node
= NULL
;
1375 else if (TREE_CODE_CLASS (code
) == tcc_type
)
1377 TYPE_UID (t
) = next_type_uid
++;
1378 /* The following is so that the debug code for
1379 the copy is different from the original type.
1380 The two statements usually duplicate each other
1381 (because they clear fields of the same union),
1382 but the optimizer should catch that. */
1383 TYPE_SYMTAB_ADDRESS (t
) = 0;
1384 TYPE_SYMTAB_DIE (t
) = 0;
1386 /* Do not copy the values cache. */
1387 if (TYPE_CACHED_VALUES_P (t
))
1389 TYPE_CACHED_VALUES_P (t
) = 0;
1390 TYPE_CACHED_VALUES (t
) = NULL_TREE
;
1393 else if (code
== TARGET_OPTION_NODE
)
1395 TREE_TARGET_OPTION (t
) = ggc_alloc
<struct cl_target_option
>();
1396 memcpy (TREE_TARGET_OPTION (t
), TREE_TARGET_OPTION (node
),
1397 sizeof (struct cl_target_option
));
1399 else if (code
== OPTIMIZATION_NODE
)
1401 TREE_OPTIMIZATION (t
) = ggc_alloc
<struct cl_optimization
>();
1402 memcpy (TREE_OPTIMIZATION (t
), TREE_OPTIMIZATION (node
),
1403 sizeof (struct cl_optimization
));
1409 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1410 For example, this can copy a list made of TREE_LIST nodes. */
1413 copy_list (tree list
)
1421 head
= prev
= copy_node (list
);
1422 next
= TREE_CHAIN (list
);
1425 TREE_CHAIN (prev
) = copy_node (next
);
1426 prev
= TREE_CHAIN (prev
);
1427 next
= TREE_CHAIN (next
);
1433 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1434 INTEGER_CST with value CST and type TYPE. */
1437 get_int_cst_ext_nunits (tree type
, const wide_int
&cst
)
1439 gcc_checking_assert (cst
.get_precision () == TYPE_PRECISION (type
));
1440 /* We need extra HWIs if CST is an unsigned integer with its
1442 if (TYPE_UNSIGNED (type
) && wi::neg_p (cst
))
1443 return cst
.get_precision () / HOST_BITS_PER_WIDE_INT
+ 1;
1444 return cst
.get_len ();
1447 /* Return a new INTEGER_CST with value CST and type TYPE. */
1450 build_new_int_cst (tree type
, const wide_int
&cst
)
1452 unsigned int len
= cst
.get_len ();
1453 unsigned int ext_len
= get_int_cst_ext_nunits (type
, cst
);
1454 tree nt
= make_int_cst (len
, ext_len
);
1459 TREE_INT_CST_ELT (nt
, ext_len
)
1460 = zext_hwi (-1, cst
.get_precision () % HOST_BITS_PER_WIDE_INT
);
1461 for (unsigned int i
= len
; i
< ext_len
; ++i
)
1462 TREE_INT_CST_ELT (nt
, i
) = -1;
1464 else if (TYPE_UNSIGNED (type
)
1465 && cst
.get_precision () < len
* HOST_BITS_PER_WIDE_INT
)
1468 TREE_INT_CST_ELT (nt
, len
)
1469 = zext_hwi (cst
.elt (len
),
1470 cst
.get_precision () % HOST_BITS_PER_WIDE_INT
);
1473 for (unsigned int i
= 0; i
< len
; i
++)
1474 TREE_INT_CST_ELT (nt
, i
) = cst
.elt (i
);
1475 TREE_TYPE (nt
) = type
;
1479 /* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
1482 build_new_poly_int_cst (tree type
, tree (&coeffs
)[NUM_POLY_INT_COEFFS
]
1485 size_t length
= sizeof (struct tree_poly_int_cst
);
1486 record_node_allocation_statistics (POLY_INT_CST
, length
);
1488 tree t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
1490 TREE_SET_CODE (t
, POLY_INT_CST
);
1491 TREE_CONSTANT (t
) = 1;
1492 TREE_TYPE (t
) = type
;
1493 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1494 POLY_INT_CST_COEFF (t
, i
) = coeffs
[i
];
1498 /* Create a constant tree that contains CST sign-extended to TYPE. */
1501 build_int_cst (tree type
, poly_int64 cst
)
1503 /* Support legacy code. */
1505 type
= integer_type_node
;
1507 return wide_int_to_tree (type
, wi::shwi (cst
, TYPE_PRECISION (type
)));
1510 /* Create a constant tree that contains CST zero-extended to TYPE. */
1513 build_int_cstu (tree type
, poly_uint64 cst
)
1515 return wide_int_to_tree (type
, wi::uhwi (cst
, TYPE_PRECISION (type
)));
1518 /* Create a constant tree that contains CST sign-extended to TYPE. */
1521 build_int_cst_type (tree type
, poly_int64 cst
)
1524 return wide_int_to_tree (type
, wi::shwi (cst
, TYPE_PRECISION (type
)));
1527 /* Constructs tree in type TYPE from with value given by CST. Signedness
1528 of CST is assumed to be the same as the signedness of TYPE. */
1531 double_int_to_tree (tree type
, double_int cst
)
1533 return wide_int_to_tree (type
, widest_int::from (cst
, TYPE_SIGN (type
)));
1536 /* We force the wide_int CST to the range of the type TYPE by sign or
1537 zero extending it. OVERFLOWABLE indicates if we are interested in
1538 overflow of the value, when >0 we are only interested in signed
1539 overflow, for <0 we are interested in any overflow. OVERFLOWED
1540 indicates whether overflow has already occurred. CONST_OVERFLOWED
1541 indicates whether constant overflow has already occurred. We force
1542 T's value to be within range of T's type (by setting to 0 or 1 all
1543 the bits outside the type's range). We set TREE_OVERFLOWED if,
1544 OVERFLOWED is nonzero,
1545 or OVERFLOWABLE is >0 and signed overflow occurs
1546 or OVERFLOWABLE is <0 and any overflow occurs
1547 We return a new tree node for the extended wide_int. The node
1548 is shared if no overflow flags are set. */
1552 force_fit_type (tree type
, const poly_wide_int_ref
&cst
,
1553 int overflowable
, bool overflowed
)
1555 signop sign
= TYPE_SIGN (type
);
1557 /* If we need to set overflow flags, return a new unshared node. */
1558 if (overflowed
|| !wi::fits_to_tree_p (cst
, type
))
1562 || (overflowable
> 0 && sign
== SIGNED
))
1564 poly_wide_int tmp
= poly_wide_int::from (cst
, TYPE_PRECISION (type
),
1567 if (tmp
.is_constant ())
1568 t
= build_new_int_cst (type
, tmp
.coeffs
[0]);
1571 tree coeffs
[NUM_POLY_INT_COEFFS
];
1572 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1574 coeffs
[i
] = build_new_int_cst (type
, tmp
.coeffs
[i
]);
1575 TREE_OVERFLOW (coeffs
[i
]) = 1;
1577 t
= build_new_poly_int_cst (type
, coeffs
);
1579 TREE_OVERFLOW (t
) = 1;
1584 /* Else build a shared node. */
1585 return wide_int_to_tree (type
, cst
);
1588 /* These are the hash table functions for the hash table of INTEGER_CST
1589 nodes of a sizetype. */
1591 /* Return the hash code X, an INTEGER_CST. */
1594 int_cst_hasher::hash (tree x
)
1596 const_tree
const t
= x
;
1597 hashval_t code
= TYPE_UID (TREE_TYPE (t
));
1600 for (i
= 0; i
< TREE_INT_CST_NUNITS (t
); i
++)
1601 code
= iterative_hash_host_wide_int (TREE_INT_CST_ELT(t
, i
), code
);
1606 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1607 is the same as that given by *Y, which is the same. */
1610 int_cst_hasher::equal (tree x
, tree y
)
1612 const_tree
const xt
= x
;
1613 const_tree
const yt
= y
;
1615 if (TREE_TYPE (xt
) != TREE_TYPE (yt
)
1616 || TREE_INT_CST_NUNITS (xt
) != TREE_INT_CST_NUNITS (yt
)
1617 || TREE_INT_CST_EXT_NUNITS (xt
) != TREE_INT_CST_EXT_NUNITS (yt
))
1620 for (int i
= 0; i
< TREE_INT_CST_NUNITS (xt
); i
++)
1621 if (TREE_INT_CST_ELT (xt
, i
) != TREE_INT_CST_ELT (yt
, i
))
1627 /* Cache wide_int CST into the TYPE_CACHED_VALUES cache for TYPE.
1628 SLOT is the slot entry to store it in, and MAX_SLOTS is the maximum
1629 number of slots that can be cached for the type. */
1632 cache_wide_int_in_type_cache (tree type
, const wide_int
&cst
,
1633 int slot
, int max_slots
)
1635 gcc_checking_assert (slot
>= 0);
1636 /* Initialize cache. */
1637 if (!TYPE_CACHED_VALUES_P (type
))
1639 TYPE_CACHED_VALUES_P (type
) = 1;
1640 TYPE_CACHED_VALUES (type
) = make_tree_vec (max_slots
);
1642 tree t
= TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), slot
);
1645 /* Create a new shared int. */
1646 t
= build_new_int_cst (type
, cst
);
1647 TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), slot
) = t
;
1652 /* Create an INT_CST node of TYPE and value CST.
1653 The returned node is always shared. For small integers we use a
1654 per-type vector cache, for larger ones we use a single hash table.
1655 The value is extended from its precision according to the sign of
1656 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1657 the upper bits and ensures that hashing and value equality based
1658 upon the underlying HOST_WIDE_INTs works without masking. */
1661 wide_int_to_tree_1 (tree type
, const wide_int_ref
&pcst
)
1668 unsigned int prec
= TYPE_PRECISION (type
);
1669 signop sgn
= TYPE_SIGN (type
);
1671 /* Verify that everything is canonical. */
1672 int l
= pcst
.get_len ();
1675 if (pcst
.elt (l
- 1) == 0)
1676 gcc_checking_assert (pcst
.elt (l
- 2) < 0);
1677 if (pcst
.elt (l
- 1) == HOST_WIDE_INT_M1
)
1678 gcc_checking_assert (pcst
.elt (l
- 2) >= 0);
1681 wide_int cst
= wide_int::from (pcst
, prec
, sgn
);
1682 unsigned int ext_len
= get_int_cst_ext_nunits (type
, cst
);
1684 enum tree_code code
= TREE_CODE (type
);
1685 if (code
== POINTER_TYPE
|| code
== REFERENCE_TYPE
)
1687 /* Cache NULL pointer and zero bounds. */
1690 /* Cache upper bounds of pointers. */
1691 else if (cst
== wi::max_value (prec
, sgn
))
1693 /* Cache 1 which is used for a non-zero range. */
1699 t
= cache_wide_int_in_type_cache (type
, cst
, ix
, 3);
1700 /* Make sure no one is clobbering the shared constant. */
1701 gcc_checking_assert (TREE_TYPE (t
) == type
1702 && cst
== wi::to_wide (t
));
1708 /* We just need to store a single HOST_WIDE_INT. */
1710 if (TYPE_UNSIGNED (type
))
1711 hwi
= cst
.to_uhwi ();
1713 hwi
= cst
.to_shwi ();
1718 gcc_assert (hwi
== 0);
1722 case REFERENCE_TYPE
:
1723 /* Ignore pointers, as they were already handled above. */
1727 /* Cache false or true. */
1729 if (IN_RANGE (hwi
, 0, 1))
1735 if (TYPE_SIGN (type
) == UNSIGNED
)
1738 limit
= param_integer_share_limit
;
1739 if (IN_RANGE (hwi
, 0, param_integer_share_limit
- 1))
1744 /* Cache [-1, N). */
1745 limit
= param_integer_share_limit
+ 1;
1746 if (IN_RANGE (hwi
, -1, param_integer_share_limit
- 1))
1760 t
= cache_wide_int_in_type_cache (type
, cst
, ix
, limit
);
1761 /* Make sure no one is clobbering the shared constant. */
1762 gcc_checking_assert (TREE_TYPE (t
) == type
1763 && TREE_INT_CST_NUNITS (t
) == 1
1764 && TREE_INT_CST_OFFSET_NUNITS (t
) == 1
1765 && TREE_INT_CST_EXT_NUNITS (t
) == 1
1766 && TREE_INT_CST_ELT (t
, 0) == hwi
);
1771 /* Use the cache of larger shared ints, using int_cst_node as
1774 TREE_INT_CST_ELT (int_cst_node
, 0) = hwi
;
1775 TREE_TYPE (int_cst_node
) = type
;
1777 tree
*slot
= int_cst_hash_table
->find_slot (int_cst_node
, INSERT
);
1781 /* Insert this one into the hash table. */
1784 /* Make a new node for next time round. */
1785 int_cst_node
= make_int_cst (1, 1);
1791 /* The value either hashes properly or we drop it on the floor
1792 for the gc to take care of. There will not be enough of them
1795 tree nt
= build_new_int_cst (type
, cst
);
1796 tree
*slot
= int_cst_hash_table
->find_slot (nt
, INSERT
);
1800 /* Insert this one into the hash table. */
1812 poly_int_cst_hasher::hash (tree t
)
1814 inchash::hash hstate
;
1816 hstate
.add_int (TYPE_UID (TREE_TYPE (t
)));
1817 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1818 hstate
.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t
, i
)));
1820 return hstate
.end ();
1824 poly_int_cst_hasher::equal (tree x
, const compare_type
&y
)
1826 if (TREE_TYPE (x
) != y
.first
)
1828 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1829 if (wi::to_wide (POLY_INT_CST_COEFF (x
, i
)) != y
.second
->coeffs
[i
])
1834 /* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
1835 The elements must also have type TYPE. */
1838 build_poly_int_cst (tree type
, const poly_wide_int_ref
&values
)
1840 unsigned int prec
= TYPE_PRECISION (type
);
1841 gcc_assert (prec
<= values
.coeffs
[0].get_precision ());
1842 poly_wide_int c
= poly_wide_int::from (values
, prec
, SIGNED
);
1845 h
.add_int (TYPE_UID (type
));
1846 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1847 h
.add_wide_int (c
.coeffs
[i
]);
1848 poly_int_cst_hasher::compare_type
comp (type
, &c
);
1849 tree
*slot
= poly_int_cst_hash_table
->find_slot_with_hash (comp
, h
.end (),
1851 if (*slot
== NULL_TREE
)
1853 tree coeffs
[NUM_POLY_INT_COEFFS
];
1854 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1855 coeffs
[i
] = wide_int_to_tree_1 (type
, c
.coeffs
[i
]);
1856 *slot
= build_new_poly_int_cst (type
, coeffs
);
1861 /* Create a constant tree with value VALUE in type TYPE. */
1864 wide_int_to_tree (tree type
, const poly_wide_int_ref
&value
)
1866 if (value
.is_constant ())
1867 return wide_int_to_tree_1 (type
, value
.coeffs
[0]);
1868 return build_poly_int_cst (type
, value
);
1871 /* Insert INTEGER_CST T into a cache of integer constants. And return
1872 the cached constant (which may or may not be T). If MIGHT_DUPLICATE
1873 is false, and T falls into the type's 'smaller values' range, there
1874 cannot be an existing entry. Otherwise, if MIGHT_DUPLICATE is true,
1875 or the value is large, should an existing entry exist, it is
1876 returned (rather than inserting T). */
1879 cache_integer_cst (tree t
, bool might_duplicate ATTRIBUTE_UNUSED
)
1881 tree type
= TREE_TYPE (t
);
1884 int prec
= TYPE_PRECISION (type
);
1886 gcc_assert (!TREE_OVERFLOW (t
));
1888 /* The caching indices here must match those in
1889 wide_int_to_type_1. */
1890 switch (TREE_CODE (type
))
1893 gcc_checking_assert (integer_zerop (t
));
1897 case REFERENCE_TYPE
:
1899 if (integer_zerop (t
))
1901 else if (integer_onep (t
))
1910 /* Cache false or true. */
1912 if (wi::ltu_p (wi::to_wide (t
), 2))
1913 ix
= TREE_INT_CST_ELT (t
, 0);
1918 if (TYPE_UNSIGNED (type
))
1921 limit
= param_integer_share_limit
;
1923 /* This is a little hokie, but if the prec is smaller than
1924 what is necessary to hold param_integer_share_limit, then the
1925 obvious test will not get the correct answer. */
1926 if (prec
< HOST_BITS_PER_WIDE_INT
)
1928 if (tree_to_uhwi (t
)
1929 < (unsigned HOST_WIDE_INT
) param_integer_share_limit
)
1930 ix
= tree_to_uhwi (t
);
1932 else if (wi::ltu_p (wi::to_wide (t
), param_integer_share_limit
))
1933 ix
= tree_to_uhwi (t
);
1938 limit
= param_integer_share_limit
+ 1;
1940 if (integer_minus_onep (t
))
1942 else if (!wi::neg_p (wi::to_wide (t
)))
1944 if (prec
< HOST_BITS_PER_WIDE_INT
)
1946 if (tree_to_shwi (t
) < param_integer_share_limit
)
1947 ix
= tree_to_shwi (t
) + 1;
1949 else if (wi::ltu_p (wi::to_wide (t
), param_integer_share_limit
))
1950 ix
= tree_to_shwi (t
) + 1;
1956 /* The slot used by TYPE_CACHED_VALUES is used for the enum
1966 /* Look for it in the type's vector of small shared ints. */
1967 if (!TYPE_CACHED_VALUES_P (type
))
1969 TYPE_CACHED_VALUES_P (type
) = 1;
1970 TYPE_CACHED_VALUES (type
) = make_tree_vec (limit
);
1973 if (tree r
= TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
))
1975 gcc_checking_assert (might_duplicate
);
1979 TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
) = t
;
1983 /* Use the cache of larger shared ints. */
1984 tree
*slot
= int_cst_hash_table
->find_slot (t
, INSERT
);
1987 /* If there is already an entry for the number verify it's the
1989 gcc_checking_assert (wi::to_wide (tree (r
)) == wi::to_wide (t
));
1990 /* And return the cached value. */
1994 /* Otherwise insert this one into the hash table. */
2002 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
2003 and the rest are zeros. */
2006 build_low_bits_mask (tree type
, unsigned bits
)
2008 gcc_assert (bits
<= TYPE_PRECISION (type
));
2010 return wide_int_to_tree (type
, wi::mask (bits
, false,
2011 TYPE_PRECISION (type
)));
2014 /* Checks that X is integer constant that can be expressed in (unsigned)
2015 HOST_WIDE_INT without loss of precision. */
2018 cst_and_fits_in_hwi (const_tree x
)
2020 return (TREE_CODE (x
) == INTEGER_CST
2021 && (tree_fits_shwi_p (x
) || tree_fits_uhwi_p (x
)));
2024 /* Build a newly constructed VECTOR_CST with the given values of
2025 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
2028 make_vector (unsigned log2_npatterns
,
2029 unsigned int nelts_per_pattern MEM_STAT_DECL
)
2031 gcc_assert (IN_RANGE (nelts_per_pattern
, 1, 3));
2033 unsigned npatterns
= 1 << log2_npatterns
;
2034 unsigned encoded_nelts
= npatterns
* nelts_per_pattern
;
2035 unsigned length
= (sizeof (struct tree_vector
)
2036 + (encoded_nelts
- 1) * sizeof (tree
));
2038 record_node_allocation_statistics (VECTOR_CST
, length
);
2040 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
2042 TREE_SET_CODE (t
, VECTOR_CST
);
2043 TREE_CONSTANT (t
) = 1;
2044 VECTOR_CST_LOG2_NPATTERNS (t
) = log2_npatterns
;
2045 VECTOR_CST_NELTS_PER_PATTERN (t
) = nelts_per_pattern
;
2050 /* Return a new VECTOR_CST node whose type is TYPE and whose values
2051 are extracted from V, a vector of CONSTRUCTOR_ELT. */
2054 build_vector_from_ctor (tree type
, const vec
<constructor_elt
, va_gc
> *v
)
2056 if (vec_safe_length (v
) == 0)
2057 return build_zero_cst (type
);
2059 unsigned HOST_WIDE_INT idx
, nelts
;
2062 /* We can't construct a VECTOR_CST for a variable number of elements. */
2063 nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
2064 tree_vector_builder
vec (type
, nelts
, 1);
2065 FOR_EACH_CONSTRUCTOR_VALUE (v
, idx
, value
)
2067 if (TREE_CODE (value
) == VECTOR_CST
)
2069 /* If NELTS is constant then this must be too. */
2070 unsigned int sub_nelts
= VECTOR_CST_NELTS (value
).to_constant ();
2071 for (unsigned i
= 0; i
< sub_nelts
; ++i
)
2072 vec
.quick_push (VECTOR_CST_ELT (value
, i
));
2075 vec
.quick_push (value
);
2077 while (vec
.length () < nelts
)
2078 vec
.quick_push (build_zero_cst (TREE_TYPE (type
)));
2080 return vec
.build ();
2083 /* Build a vector of type VECTYPE where all the elements are SCs. */
2085 build_vector_from_val (tree vectype
, tree sc
)
2087 unsigned HOST_WIDE_INT i
, nunits
;
2089 if (sc
== error_mark_node
)
2092 /* Verify that the vector type is suitable for SC. Note that there
2093 is some inconsistency in the type-system with respect to restrict
2094 qualifications of pointers. Vector types always have a main-variant
2095 element type and the qualification is applied to the vector-type.
2096 So TREE_TYPE (vector-type) does not return a properly qualified
2097 vector element-type. */
2098 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc
)),
2099 TREE_TYPE (vectype
)));
2101 if (CONSTANT_CLASS_P (sc
))
2103 tree_vector_builder
v (vectype
, 1, 1);
2107 else if (!TYPE_VECTOR_SUBPARTS (vectype
).is_constant (&nunits
))
2108 return fold_build1 (VEC_DUPLICATE_EXPR
, vectype
, sc
);
2111 vec
<constructor_elt
, va_gc
> *v
;
2112 vec_alloc (v
, nunits
);
2113 for (i
= 0; i
< nunits
; ++i
)
2114 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, sc
);
2115 return build_constructor (vectype
, v
);
2119 /* If TYPE is not a vector type, just return SC, otherwise return
2120 build_vector_from_val (TYPE, SC). */
2123 build_uniform_cst (tree type
, tree sc
)
2125 if (!VECTOR_TYPE_P (type
))
2128 return build_vector_from_val (type
, sc
);
2131 /* Build a vector series of type TYPE in which element I has the value
2132 BASE + I * STEP. The result is a constant if BASE and STEP are constant
2133 and a VEC_SERIES_EXPR otherwise. */
2136 build_vec_series (tree type
, tree base
, tree step
)
2138 if (integer_zerop (step
))
2139 return build_vector_from_val (type
, base
);
2140 if (TREE_CODE (base
) == INTEGER_CST
&& TREE_CODE (step
) == INTEGER_CST
)
2142 tree_vector_builder
builder (type
, 1, 3);
2143 tree elt1
= wide_int_to_tree (TREE_TYPE (base
),
2144 wi::to_wide (base
) + wi::to_wide (step
));
2145 tree elt2
= wide_int_to_tree (TREE_TYPE (base
),
2146 wi::to_wide (elt1
) + wi::to_wide (step
));
2147 builder
.quick_push (base
);
2148 builder
.quick_push (elt1
);
2149 builder
.quick_push (elt2
);
2150 return builder
.build ();
2152 return build2 (VEC_SERIES_EXPR
, type
, base
, step
);
2155 /* Return a vector with the same number of units and number of bits
2156 as VEC_TYPE, but in which the elements are a linear series of unsigned
2157 integers { BASE, BASE + STEP, BASE + STEP * 2, ... }. */
2160 build_index_vector (tree vec_type
, poly_uint64 base
, poly_uint64 step
)
2162 tree index_vec_type
= vec_type
;
2163 tree index_elt_type
= TREE_TYPE (vec_type
);
2164 poly_uint64 nunits
= TYPE_VECTOR_SUBPARTS (vec_type
);
2165 if (!INTEGRAL_TYPE_P (index_elt_type
) || !TYPE_UNSIGNED (index_elt_type
))
2167 index_elt_type
= build_nonstandard_integer_type
2168 (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (index_elt_type
)), true);
2169 index_vec_type
= build_vector_type (index_elt_type
, nunits
);
2172 tree_vector_builder
v (index_vec_type
, 1, 3);
2173 for (unsigned int i
= 0; i
< 3; ++i
)
2174 v
.quick_push (build_int_cstu (index_elt_type
, base
+ i
* step
));
2178 /* Return a VECTOR_CST of type VEC_TYPE in which the first NUM_A
2179 elements are A and the rest are B. */
2182 build_vector_a_then_b (tree vec_type
, unsigned int num_a
, tree a
, tree b
)
2184 gcc_assert (known_le (num_a
, TYPE_VECTOR_SUBPARTS (vec_type
)));
2185 unsigned int count
= constant_lower_bound (TYPE_VECTOR_SUBPARTS (vec_type
));
2186 /* Optimize the constant case. */
2187 if ((count
& 1) == 0 && TYPE_VECTOR_SUBPARTS (vec_type
).is_constant ())
2189 tree_vector_builder
builder (vec_type
, count
, 2);
2190 for (unsigned int i
= 0; i
< count
* 2; ++i
)
2191 builder
.quick_push (i
< num_a
? a
: b
);
2192 return builder
.build ();
2195 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
2196 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
2199 recompute_constructor_flags (tree c
)
2203 bool constant_p
= true;
2204 bool side_effects_p
= false;
2205 vec
<constructor_elt
, va_gc
> *vals
= CONSTRUCTOR_ELTS (c
);
2207 FOR_EACH_CONSTRUCTOR_VALUE (vals
, i
, val
)
2209 /* Mostly ctors will have elts that don't have side-effects, so
2210 the usual case is to scan all the elements. Hence a single
2211 loop for both const and side effects, rather than one loop
2212 each (with early outs). */
2213 if (!TREE_CONSTANT (val
))
2215 if (TREE_SIDE_EFFECTS (val
))
2216 side_effects_p
= true;
2219 TREE_SIDE_EFFECTS (c
) = side_effects_p
;
2220 TREE_CONSTANT (c
) = constant_p
;
2223 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
2227 verify_constructor_flags (tree c
)
2231 bool constant_p
= TREE_CONSTANT (c
);
2232 bool side_effects_p
= TREE_SIDE_EFFECTS (c
);
2233 vec
<constructor_elt
, va_gc
> *vals
= CONSTRUCTOR_ELTS (c
);
2235 FOR_EACH_CONSTRUCTOR_VALUE (vals
, i
, val
)
2237 if (constant_p
&& !TREE_CONSTANT (val
))
2238 internal_error ("non-constant element in constant CONSTRUCTOR");
2239 if (!side_effects_p
&& TREE_SIDE_EFFECTS (val
))
2240 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
2244 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2245 are in the vec pointed to by VALS. */
2247 build_constructor (tree type
, vec
<constructor_elt
, va_gc
> *vals MEM_STAT_DECL
)
2249 tree c
= make_node (CONSTRUCTOR PASS_MEM_STAT
);
2251 TREE_TYPE (c
) = type
;
2252 CONSTRUCTOR_ELTS (c
) = vals
;
2254 recompute_constructor_flags (c
);
2259 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
2262 build_constructor_single (tree type
, tree index
, tree value
)
2264 vec
<constructor_elt
, va_gc
> *v
;
2265 constructor_elt elt
= {index
, value
};
2268 v
->quick_push (elt
);
2270 return build_constructor (type
, v
);
2274 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2275 are in a list pointed to by VALS. */
2277 build_constructor_from_list (tree type
, tree vals
)
2280 vec
<constructor_elt
, va_gc
> *v
= NULL
;
2284 vec_alloc (v
, list_length (vals
));
2285 for (t
= vals
; t
; t
= TREE_CHAIN (t
))
2286 CONSTRUCTOR_APPEND_ELT (v
, TREE_PURPOSE (t
), TREE_VALUE (t
));
2289 return build_constructor (type
, v
);
2292 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
2293 are in a vector pointed to by VALS. Note that the TREE_PURPOSE
2294 fields in the constructor remain null. */
2297 build_constructor_from_vec (tree type
, const vec
<tree
, va_gc
> *vals
)
2299 vec
<constructor_elt
, va_gc
> *v
= NULL
;
2302 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, t
);
2304 return build_constructor (type
, v
);
2307 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
2308 of elements, provided as index/value pairs. */
2311 build_constructor_va (tree type
, int nelts
, ...)
2313 vec
<constructor_elt
, va_gc
> *v
= NULL
;
2316 va_start (p
, nelts
);
2317 vec_alloc (v
, nelts
);
2320 tree index
= va_arg (p
, tree
);
2321 tree value
= va_arg (p
, tree
);
2322 CONSTRUCTOR_APPEND_ELT (v
, index
, value
);
2325 return build_constructor (type
, v
);
2328 /* Return a node of type TYPE for which TREE_CLOBBER_P is true. */
2331 build_clobber (tree type
)
2333 tree clobber
= build_constructor (type
, NULL
);
2334 TREE_THIS_VOLATILE (clobber
) = true;
2338 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
2341 build_fixed (tree type
, FIXED_VALUE_TYPE f
)
2344 FIXED_VALUE_TYPE
*fp
;
2346 v
= make_node (FIXED_CST
);
2347 fp
= ggc_alloc
<fixed_value
> ();
2348 memcpy (fp
, &f
, sizeof (FIXED_VALUE_TYPE
));
2350 TREE_TYPE (v
) = type
;
2351 TREE_FIXED_CST_PTR (v
) = fp
;
2355 /* Return a new REAL_CST node whose type is TYPE and value is D. */
2358 build_real (tree type
, REAL_VALUE_TYPE d
)
2361 REAL_VALUE_TYPE
*dp
;
2364 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
2365 Consider doing it via real_convert now. */
2367 v
= make_node (REAL_CST
);
2368 dp
= ggc_alloc
<real_value
> ();
2369 memcpy (dp
, &d
, sizeof (REAL_VALUE_TYPE
));
2371 TREE_TYPE (v
) = type
;
2372 TREE_REAL_CST_PTR (v
) = dp
;
2373 TREE_OVERFLOW (v
) = overflow
;
2377 /* Like build_real, but first truncate D to the type. */
2380 build_real_truncate (tree type
, REAL_VALUE_TYPE d
)
2382 return build_real (type
, real_value_truncate (TYPE_MODE (type
), d
));
2385 /* Return a new REAL_CST node whose type is TYPE
2386 and whose value is the integer value of the INTEGER_CST node I. */
2389 real_value_from_int_cst (const_tree type
, const_tree i
)
2393 /* Clear all bits of the real value type so that we can later do
2394 bitwise comparisons to see if two values are the same. */
2395 memset (&d
, 0, sizeof d
);
2397 real_from_integer (&d
, type
? TYPE_MODE (type
) : VOIDmode
, wi::to_wide (i
),
2398 TYPE_SIGN (TREE_TYPE (i
)));
2402 /* Given a tree representing an integer constant I, return a tree
2403 representing the same value as a floating-point constant of type TYPE. */
2406 build_real_from_int_cst (tree type
, const_tree i
)
2409 int overflow
= TREE_OVERFLOW (i
);
2411 v
= build_real (type
, real_value_from_int_cst (type
, i
));
2413 TREE_OVERFLOW (v
) |= overflow
;
2417 /* Return a new REAL_CST node whose type is TYPE
2418 and whose value is the integer value I which has sign SGN. */
2421 build_real_from_wide (tree type
, const wide_int_ref
&i
, signop sgn
)
2425 /* Clear all bits of the real value type so that we can later do
2426 bitwise comparisons to see if two values are the same. */
2427 memset (&d
, 0, sizeof d
);
2429 real_from_integer (&d
, TYPE_MODE (type
), i
, sgn
);
2430 return build_real (type
, d
);
2433 /* Return a newly constructed STRING_CST node whose value is the LEN
2434 characters at STR when STR is nonnull, or all zeros otherwise.
2435 Note that for a C string literal, LEN should include the trailing NUL.
2436 The TREE_TYPE is not initialized. */
2439 build_string (unsigned len
, const char *str
/*= NULL */)
2441 /* Do not waste bytes provided by padding of struct tree_string. */
2442 unsigned size
= len
+ offsetof (struct tree_string
, str
) + 1;
2444 record_node_allocation_statistics (STRING_CST
, size
);
2446 tree s
= (tree
) ggc_internal_alloc (size
);
2448 memset (s
, 0, sizeof (struct tree_typed
));
2449 TREE_SET_CODE (s
, STRING_CST
);
2450 TREE_CONSTANT (s
) = 1;
2451 TREE_STRING_LENGTH (s
) = len
;
2453 memcpy (s
->string
.str
, str
, len
);
2455 memset (s
->string
.str
, 0, len
);
2456 s
->string
.str
[len
] = '\0';
2461 /* Return a newly constructed COMPLEX_CST node whose value is
2462 specified by the real and imaginary parts REAL and IMAG.
2463 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2464 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2467 build_complex (tree type
, tree real
, tree imag
)
2469 gcc_assert (CONSTANT_CLASS_P (real
));
2470 gcc_assert (CONSTANT_CLASS_P (imag
));
2472 tree t
= make_node (COMPLEX_CST
);
2474 TREE_REALPART (t
) = real
;
2475 TREE_IMAGPART (t
) = imag
;
2476 TREE_TYPE (t
) = type
? type
: build_complex_type (TREE_TYPE (real
));
2477 TREE_OVERFLOW (t
) = TREE_OVERFLOW (real
) | TREE_OVERFLOW (imag
);
2481 /* Build a complex (inf +- 0i), such as for the result of cproj.
2482 TYPE is the complex tree type of the result. If NEG is true, the
2483 imaginary zero is negative. */
2486 build_complex_inf (tree type
, bool neg
)
2488 REAL_VALUE_TYPE rinf
, rzero
= dconst0
;
2492 return build_complex (type
, build_real (TREE_TYPE (type
), rinf
),
2493 build_real (TREE_TYPE (type
), rzero
));
2496 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2497 element is set to 1. In particular, this is 1 + i for complex types. */
2500 build_each_one_cst (tree type
)
2502 if (TREE_CODE (type
) == COMPLEX_TYPE
)
2504 tree scalar
= build_one_cst (TREE_TYPE (type
));
2505 return build_complex (type
, scalar
, scalar
);
2508 return build_one_cst (type
);
2511 /* Return a constant of arithmetic type TYPE which is the
2512 multiplicative identity of the set TYPE. */
2515 build_one_cst (tree type
)
2517 switch (TREE_CODE (type
))
2519 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2520 case POINTER_TYPE
: case REFERENCE_TYPE
:
2522 return build_int_cst (type
, 1);
2525 return build_real (type
, dconst1
);
2527 case FIXED_POINT_TYPE
:
2528 /* We can only generate 1 for accum types. */
2529 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type
)));
2530 return build_fixed (type
, FCONST1 (TYPE_MODE (type
)));
2534 tree scalar
= build_one_cst (TREE_TYPE (type
));
2536 return build_vector_from_val (type
, scalar
);
2540 return build_complex (type
,
2541 build_one_cst (TREE_TYPE (type
)),
2542 build_zero_cst (TREE_TYPE (type
)));
2549 /* Return an integer of type TYPE containing all 1's in as much precision as
2550 it contains, or a complex or vector whose subparts are such integers. */
2553 build_all_ones_cst (tree type
)
2555 if (TREE_CODE (type
) == COMPLEX_TYPE
)
2557 tree scalar
= build_all_ones_cst (TREE_TYPE (type
));
2558 return build_complex (type
, scalar
, scalar
);
2561 return build_minus_one_cst (type
);
2564 /* Return a constant of arithmetic type TYPE which is the
2565 opposite of the multiplicative identity of the set TYPE. */
2568 build_minus_one_cst (tree type
)
2570 switch (TREE_CODE (type
))
2572 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2573 case POINTER_TYPE
: case REFERENCE_TYPE
:
2575 return build_int_cst (type
, -1);
2578 return build_real (type
, dconstm1
);
2580 case FIXED_POINT_TYPE
:
2581 /* We can only generate 1 for accum types. */
2582 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type
)));
2583 return build_fixed (type
,
2584 fixed_from_double_int (double_int_minus_one
,
2585 SCALAR_TYPE_MODE (type
)));
2589 tree scalar
= build_minus_one_cst (TREE_TYPE (type
));
2591 return build_vector_from_val (type
, scalar
);
2595 return build_complex (type
,
2596 build_minus_one_cst (TREE_TYPE (type
)),
2597 build_zero_cst (TREE_TYPE (type
)));
2604 /* Build 0 constant of type TYPE. This is used by constructor folding
2605 and thus the constant should be represented in memory by
2609 build_zero_cst (tree type
)
2611 switch (TREE_CODE (type
))
2613 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2614 case POINTER_TYPE
: case REFERENCE_TYPE
:
2615 case OFFSET_TYPE
: case NULLPTR_TYPE
:
2616 return build_int_cst (type
, 0);
2619 return build_real (type
, dconst0
);
2621 case FIXED_POINT_TYPE
:
2622 return build_fixed (type
, FCONST0 (TYPE_MODE (type
)));
2626 tree scalar
= build_zero_cst (TREE_TYPE (type
));
2628 return build_vector_from_val (type
, scalar
);
2633 tree zero
= build_zero_cst (TREE_TYPE (type
));
2635 return build_complex (type
, zero
, zero
);
2639 if (!AGGREGATE_TYPE_P (type
))
2640 return fold_convert (type
, integer_zero_node
);
2641 return build_constructor (type
, NULL
);
2646 /* Build a BINFO with LEN language slots. */
2649 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL
)
2652 size_t length
= (offsetof (struct tree_binfo
, base_binfos
)
2653 + vec
<tree
, va_gc
>::embedded_size (base_binfos
));
2655 record_node_allocation_statistics (TREE_BINFO
, length
);
2657 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
2659 memset (t
, 0, offsetof (struct tree_binfo
, base_binfos
));
2661 TREE_SET_CODE (t
, TREE_BINFO
);
2663 BINFO_BASE_BINFOS (t
)->embedded_init (base_binfos
);
2668 /* Create a CASE_LABEL_EXPR tree node and return it. */
2671 build_case_label (tree low_value
, tree high_value
, tree label_decl
)
2673 tree t
= make_node (CASE_LABEL_EXPR
);
2675 TREE_TYPE (t
) = void_type_node
;
2676 SET_EXPR_LOCATION (t
, DECL_SOURCE_LOCATION (label_decl
));
2678 CASE_LOW (t
) = low_value
;
2679 CASE_HIGH (t
) = high_value
;
2680 CASE_LABEL (t
) = label_decl
;
2681 CASE_CHAIN (t
) = NULL_TREE
;
2686 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2687 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2688 The latter determines the length of the HOST_WIDE_INT vector. */
2691 make_int_cst (int len
, int ext_len MEM_STAT_DECL
)
2694 int length
= ((ext_len
- 1) * sizeof (HOST_WIDE_INT
)
2695 + sizeof (struct tree_int_cst
));
2698 record_node_allocation_statistics (INTEGER_CST
, length
);
2700 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
2702 TREE_SET_CODE (t
, INTEGER_CST
);
2703 TREE_INT_CST_NUNITS (t
) = len
;
2704 TREE_INT_CST_EXT_NUNITS (t
) = ext_len
;
2705 /* to_offset can only be applied to trees that are offset_int-sized
2706 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2707 must be exactly the precision of offset_int and so LEN is correct. */
2708 if (ext_len
<= OFFSET_INT_ELTS
)
2709 TREE_INT_CST_OFFSET_NUNITS (t
) = ext_len
;
2711 TREE_INT_CST_OFFSET_NUNITS (t
) = len
;
2713 TREE_CONSTANT (t
) = 1;
2718 /* Build a newly constructed TREE_VEC node of length LEN. */
2721 make_tree_vec (int len MEM_STAT_DECL
)
2724 size_t length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2726 record_node_allocation_statistics (TREE_VEC
, length
);
2728 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
2730 TREE_SET_CODE (t
, TREE_VEC
);
2731 TREE_VEC_LENGTH (t
) = len
;
2736 /* Grow a TREE_VEC node to new length LEN. */
2739 grow_tree_vec (tree v
, int len MEM_STAT_DECL
)
2741 gcc_assert (TREE_CODE (v
) == TREE_VEC
);
2743 int oldlen
= TREE_VEC_LENGTH (v
);
2744 gcc_assert (len
> oldlen
);
2746 size_t oldlength
= (oldlen
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2747 size_t length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2749 record_node_allocation_statistics (TREE_VEC
, length
- oldlength
);
2751 v
= (tree
) ggc_realloc (v
, length PASS_MEM_STAT
);
2753 TREE_VEC_LENGTH (v
) = len
;
2758 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2759 fixed, and scalar, complex or vector. */
2762 zerop (const_tree expr
)
2764 return (integer_zerop (expr
)
2765 || real_zerop (expr
)
2766 || fixed_zerop (expr
));
2769 /* Return 1 if EXPR is the integer constant zero or a complex constant
2770 of zero, or a location wrapper for such a constant. */
2773 integer_zerop (const_tree expr
)
2775 STRIP_ANY_LOCATION_WRAPPER (expr
);
2777 switch (TREE_CODE (expr
))
2780 return wi::to_wide (expr
) == 0;
2782 return (integer_zerop (TREE_REALPART (expr
))
2783 && integer_zerop (TREE_IMAGPART (expr
)));
2785 return (VECTOR_CST_NPATTERNS (expr
) == 1
2786 && VECTOR_CST_DUPLICATE_P (expr
)
2787 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr
, 0)));
2793 /* Return 1 if EXPR is the integer constant one or the corresponding
2794 complex constant, or a location wrapper for such a constant. */
2797 integer_onep (const_tree expr
)
2799 STRIP_ANY_LOCATION_WRAPPER (expr
);
2801 switch (TREE_CODE (expr
))
2804 return wi::eq_p (wi::to_widest (expr
), 1);
2806 return (integer_onep (TREE_REALPART (expr
))
2807 && integer_zerop (TREE_IMAGPART (expr
)));
2809 return (VECTOR_CST_NPATTERNS (expr
) == 1
2810 && VECTOR_CST_DUPLICATE_P (expr
)
2811 && integer_onep (VECTOR_CST_ENCODED_ELT (expr
, 0)));
2817 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2818 return 1 if every piece is the integer constant one.
2819 Also return 1 for location wrappers for such a constant. */
2822 integer_each_onep (const_tree expr
)
2824 STRIP_ANY_LOCATION_WRAPPER (expr
);
2826 if (TREE_CODE (expr
) == COMPLEX_CST
)
2827 return (integer_onep (TREE_REALPART (expr
))
2828 && integer_onep (TREE_IMAGPART (expr
)));
2830 return integer_onep (expr
);
2833 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2834 it contains, or a complex or vector whose subparts are such integers,
2835 or a location wrapper for such a constant. */
2838 integer_all_onesp (const_tree expr
)
2840 STRIP_ANY_LOCATION_WRAPPER (expr
);
2842 if (TREE_CODE (expr
) == COMPLEX_CST
2843 && integer_all_onesp (TREE_REALPART (expr
))
2844 && integer_all_onesp (TREE_IMAGPART (expr
)))
2847 else if (TREE_CODE (expr
) == VECTOR_CST
)
2848 return (VECTOR_CST_NPATTERNS (expr
) == 1
2849 && VECTOR_CST_DUPLICATE_P (expr
)
2850 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr
, 0)));
2852 else if (TREE_CODE (expr
) != INTEGER_CST
)
2855 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr
)), UNSIGNED
)
2856 == wi::to_wide (expr
));
2859 /* Return 1 if EXPR is the integer constant minus one, or a location wrapper
2860 for such a constant. */
2863 integer_minus_onep (const_tree expr
)
2865 STRIP_ANY_LOCATION_WRAPPER (expr
);
2867 if (TREE_CODE (expr
) == COMPLEX_CST
)
2868 return (integer_all_onesp (TREE_REALPART (expr
))
2869 && integer_zerop (TREE_IMAGPART (expr
)));
2871 return integer_all_onesp (expr
);
2874 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2875 one bit on), or a location wrapper for such a constant. */
2878 integer_pow2p (const_tree expr
)
2880 STRIP_ANY_LOCATION_WRAPPER (expr
);
2882 if (TREE_CODE (expr
) == COMPLEX_CST
2883 && integer_pow2p (TREE_REALPART (expr
))
2884 && integer_zerop (TREE_IMAGPART (expr
)))
2887 if (TREE_CODE (expr
) != INTEGER_CST
)
2890 return wi::popcount (wi::to_wide (expr
)) == 1;
2893 /* Return 1 if EXPR is an integer constant other than zero or a
2894 complex constant other than zero, or a location wrapper for such a
2898 integer_nonzerop (const_tree expr
)
2900 STRIP_ANY_LOCATION_WRAPPER (expr
);
2902 return ((TREE_CODE (expr
) == INTEGER_CST
2903 && wi::to_wide (expr
) != 0)
2904 || (TREE_CODE (expr
) == COMPLEX_CST
2905 && (integer_nonzerop (TREE_REALPART (expr
))
2906 || integer_nonzerop (TREE_IMAGPART (expr
)))));
2909 /* Return 1 if EXPR is the integer constant one. For vector,
2910 return 1 if every piece is the integer constant minus one
2911 (representing the value TRUE).
2912 Also return 1 for location wrappers for such a constant. */
2915 integer_truep (const_tree expr
)
2917 STRIP_ANY_LOCATION_WRAPPER (expr
);
2919 if (TREE_CODE (expr
) == VECTOR_CST
)
2920 return integer_all_onesp (expr
);
2921 return integer_onep (expr
);
2924 /* Return 1 if EXPR is the fixed-point constant zero, or a location wrapper
2925 for such a constant. */
2928 fixed_zerop (const_tree expr
)
2930 STRIP_ANY_LOCATION_WRAPPER (expr
);
2932 return (TREE_CODE (expr
) == FIXED_CST
2933 && TREE_FIXED_CST (expr
).data
.is_zero ());
2936 /* Return the power of two represented by a tree node known to be a
2940 tree_log2 (const_tree expr
)
2942 if (TREE_CODE (expr
) == COMPLEX_CST
)
2943 return tree_log2 (TREE_REALPART (expr
));
2945 return wi::exact_log2 (wi::to_wide (expr
));
2948 /* Similar, but return the largest integer Y such that 2 ** Y is less
2949 than or equal to EXPR. */
2952 tree_floor_log2 (const_tree expr
)
2954 if (TREE_CODE (expr
) == COMPLEX_CST
)
2955 return tree_log2 (TREE_REALPART (expr
));
2957 return wi::floor_log2 (wi::to_wide (expr
));
2960 /* Return number of known trailing zero bits in EXPR, or, if the value of
2961 EXPR is known to be zero, the precision of it's type. */
2964 tree_ctz (const_tree expr
)
2966 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr
))
2967 && !POINTER_TYPE_P (TREE_TYPE (expr
)))
2970 unsigned int ret1
, ret2
, prec
= TYPE_PRECISION (TREE_TYPE (expr
));
2971 switch (TREE_CODE (expr
))
2974 ret1
= wi::ctz (wi::to_wide (expr
));
2975 return MIN (ret1
, prec
);
2977 ret1
= wi::ctz (get_nonzero_bits (expr
));
2978 return MIN (ret1
, prec
);
2985 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2988 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2989 return MIN (ret1
, ret2
);
2990 case POINTER_PLUS_EXPR
:
2991 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2992 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2993 /* Second operand is sizetype, which could be in theory
2994 wider than pointer's precision. Make sure we never
2995 return more than prec. */
2996 ret2
= MIN (ret2
, prec
);
2997 return MIN (ret1
, ret2
);
2999 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3000 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
3001 return MAX (ret1
, ret2
);
3003 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3004 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
3005 return MIN (ret1
+ ret2
, prec
);
3007 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3008 if (tree_fits_uhwi_p (TREE_OPERAND (expr
, 1))
3009 && (tree_to_uhwi (TREE_OPERAND (expr
, 1)) < prec
))
3011 ret2
= tree_to_uhwi (TREE_OPERAND (expr
, 1));
3012 return MIN (ret1
+ ret2
, prec
);
3016 if (tree_fits_uhwi_p (TREE_OPERAND (expr
, 1))
3017 && (tree_to_uhwi (TREE_OPERAND (expr
, 1)) < prec
))
3019 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3020 ret2
= tree_to_uhwi (TREE_OPERAND (expr
, 1));
3025 case TRUNC_DIV_EXPR
:
3027 case FLOOR_DIV_EXPR
:
3028 case ROUND_DIV_EXPR
:
3029 case EXACT_DIV_EXPR
:
3030 if (TREE_CODE (TREE_OPERAND (expr
, 1)) == INTEGER_CST
3031 && tree_int_cst_sgn (TREE_OPERAND (expr
, 1)) == 1)
3033 int l
= tree_log2 (TREE_OPERAND (expr
, 1));
3036 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3044 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
3045 if (ret1
&& ret1
== TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr
, 0))))
3047 return MIN (ret1
, prec
);
3049 return tree_ctz (TREE_OPERAND (expr
, 0));
3051 ret1
= tree_ctz (TREE_OPERAND (expr
, 1));
3054 ret2
= tree_ctz (TREE_OPERAND (expr
, 2));
3055 return MIN (ret1
, ret2
);
3057 return tree_ctz (TREE_OPERAND (expr
, 1));
3059 ret1
= get_pointer_alignment (CONST_CAST_TREE (expr
));
3060 if (ret1
> BITS_PER_UNIT
)
3062 ret1
= ctz_hwi (ret1
/ BITS_PER_UNIT
);
3063 return MIN (ret1
, prec
);
3071 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
3072 decimal float constants, so don't return 1 for them.
3073 Also return 1 for location wrappers around such a constant. */
3076 real_zerop (const_tree expr
)
3078 STRIP_ANY_LOCATION_WRAPPER (expr
);
3080 switch (TREE_CODE (expr
))
3083 return real_equal (&TREE_REAL_CST (expr
), &dconst0
)
3084 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
3086 return real_zerop (TREE_REALPART (expr
))
3087 && real_zerop (TREE_IMAGPART (expr
));
3090 /* Don't simply check for a duplicate because the predicate
3091 accepts both +0.0 and -0.0. */
3092 unsigned count
= vector_cst_encoded_nelts (expr
);
3093 for (unsigned int i
= 0; i
< count
; ++i
)
3094 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr
, i
)))
3103 /* Return 1 if EXPR is the real constant one in real or complex form.
3104 Trailing zeroes matter for decimal float constants, so don't return
3106 Also return 1 for location wrappers around such a constant. */
3109 real_onep (const_tree expr
)
3111 STRIP_ANY_LOCATION_WRAPPER (expr
);
3113 switch (TREE_CODE (expr
))
3116 return real_equal (&TREE_REAL_CST (expr
), &dconst1
)
3117 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
3119 return real_onep (TREE_REALPART (expr
))
3120 && real_zerop (TREE_IMAGPART (expr
));
3122 return (VECTOR_CST_NPATTERNS (expr
) == 1
3123 && VECTOR_CST_DUPLICATE_P (expr
)
3124 && real_onep (VECTOR_CST_ENCODED_ELT (expr
, 0)));
3130 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
3131 matter for decimal float constants, so don't return 1 for them.
3132 Also return 1 for location wrappers around such a constant. */
3135 real_minus_onep (const_tree expr
)
3137 STRIP_ANY_LOCATION_WRAPPER (expr
);
3139 switch (TREE_CODE (expr
))
3142 return real_equal (&TREE_REAL_CST (expr
), &dconstm1
)
3143 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
3145 return real_minus_onep (TREE_REALPART (expr
))
3146 && real_zerop (TREE_IMAGPART (expr
));
3148 return (VECTOR_CST_NPATTERNS (expr
) == 1
3149 && VECTOR_CST_DUPLICATE_P (expr
)
3150 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr
, 0)));
3156 /* Nonzero if EXP is a constant or a cast of a constant. */
3159 really_constant_p (const_tree exp
)
3161 /* This is not quite the same as STRIP_NOPS. It does more. */
3162 while (CONVERT_EXPR_P (exp
)
3163 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
3164 exp
= TREE_OPERAND (exp
, 0);
3165 return TREE_CONSTANT (exp
);
3168 /* Return true if T holds a polynomial pointer difference, storing it in
3169 *VALUE if so. A true return means that T's precision is no greater
3170 than 64 bits, which is the largest address space we support, so *VALUE
3171 never loses precision. However, the signedness of the result does
3172 not necessarily match the signedness of T: sometimes an unsigned type
3173 like sizetype is used to encode a value that is actually negative. */
3176 ptrdiff_tree_p (const_tree t
, poly_int64_pod
*value
)
3180 if (TREE_CODE (t
) == INTEGER_CST
)
3182 if (!cst_and_fits_in_hwi (t
))
3184 *value
= int_cst_value (t
);
3187 if (POLY_INT_CST_P (t
))
3189 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
3190 if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t
, i
)))
3192 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
3193 value
->coeffs
[i
] = int_cst_value (POLY_INT_CST_COEFF (t
, i
));
3200 tree_to_poly_int64 (const_tree t
)
3202 gcc_assert (tree_fits_poly_int64_p (t
));
3203 if (POLY_INT_CST_P (t
))
3204 return poly_int_cst_value (t
).force_shwi ();
3205 return TREE_INT_CST_LOW (t
);
3209 tree_to_poly_uint64 (const_tree t
)
3211 gcc_assert (tree_fits_poly_uint64_p (t
));
3212 if (POLY_INT_CST_P (t
))
3213 return poly_int_cst_value (t
).force_uhwi ();
3214 return TREE_INT_CST_LOW (t
);
3217 /* Return first list element whose TREE_VALUE is ELEM.
3218 Return 0 if ELEM is not in LIST. */
3221 value_member (tree elem
, tree list
)
3225 if (elem
== TREE_VALUE (list
))
3227 list
= TREE_CHAIN (list
);
3232 /* Return first list element whose TREE_PURPOSE is ELEM.
3233 Return 0 if ELEM is not in LIST. */
3236 purpose_member (const_tree elem
, tree list
)
3240 if (elem
== TREE_PURPOSE (list
))
3242 list
= TREE_CHAIN (list
);
3247 /* Return true if ELEM is in V. */
3250 vec_member (const_tree elem
, vec
<tree
, va_gc
> *v
)
3254 FOR_EACH_VEC_SAFE_ELT (v
, ix
, t
)
3260 /* Returns element number IDX (zero-origin) of chain CHAIN, or
3264 chain_index (int idx
, tree chain
)
3266 for (; chain
&& idx
> 0; --idx
)
3267 chain
= TREE_CHAIN (chain
);
3271 /* Return nonzero if ELEM is part of the chain CHAIN. */
3274 chain_member (const_tree elem
, const_tree chain
)
3280 chain
= DECL_CHAIN (chain
);
3286 /* Return the length of a chain of nodes chained through TREE_CHAIN.
3287 We expect a null pointer to mark the end of the chain.
3288 This is the Lisp primitive `length'. */
3291 list_length (const_tree t
)
3294 #ifdef ENABLE_TREE_CHECKING
3302 #ifdef ENABLE_TREE_CHECKING
3305 gcc_assert (p
!= q
);
3313 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3314 UNION_TYPE TYPE, or NULL_TREE if none. */
3317 first_field (const_tree type
)
3319 tree t
= TYPE_FIELDS (type
);
3320 while (t
&& TREE_CODE (t
) != FIELD_DECL
)
3325 /* Returns the last FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
3326 UNION_TYPE TYPE, or NULL_TREE if none. */
3329 last_field (const_tree type
)
3331 tree last
= NULL_TREE
;
3333 for (tree fld
= TYPE_FIELDS (type
); fld
; fld
= TREE_CHAIN (fld
))
3335 if (TREE_CODE (fld
) != FIELD_DECL
)
3344 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
3345 by modifying the last node in chain 1 to point to chain 2.
3346 This is the Lisp primitive `nconc'. */
3349 chainon (tree op1
, tree op2
)
3358 for (t1
= op1
; TREE_CHAIN (t1
); t1
= TREE_CHAIN (t1
))
3360 TREE_CHAIN (t1
) = op2
;
3362 #ifdef ENABLE_TREE_CHECKING
3365 for (t2
= op2
; t2
; t2
= TREE_CHAIN (t2
))
3366 gcc_assert (t2
!= t1
);
3373 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
3376 tree_last (tree chain
)
3380 while ((next
= TREE_CHAIN (chain
)))
3385 /* Reverse the order of elements in the chain T,
3386 and return the new head of the chain (old last element). */
3391 tree prev
= 0, decl
, next
;
3392 for (decl
= t
; decl
; decl
= next
)
3394 /* We shouldn't be using this function to reverse BLOCK chains; we
3395 have blocks_nreverse for that. */
3396 gcc_checking_assert (TREE_CODE (decl
) != BLOCK
);
3397 next
= TREE_CHAIN (decl
);
3398 TREE_CHAIN (decl
) = prev
;
3404 /* Return a newly created TREE_LIST node whose
3405 purpose and value fields are PARM and VALUE. */
3408 build_tree_list (tree parm
, tree value MEM_STAT_DECL
)
3410 tree t
= make_node (TREE_LIST PASS_MEM_STAT
);
3411 TREE_PURPOSE (t
) = parm
;
3412 TREE_VALUE (t
) = value
;
3416 /* Build a chain of TREE_LIST nodes from a vector. */
3419 build_tree_list_vec (const vec
<tree
, va_gc
> *vec MEM_STAT_DECL
)
3421 tree ret
= NULL_TREE
;
3425 FOR_EACH_VEC_SAFE_ELT (vec
, i
, t
)
3427 *pp
= build_tree_list (NULL
, t PASS_MEM_STAT
);
3428 pp
= &TREE_CHAIN (*pp
);
3433 /* Return a newly created TREE_LIST node whose
3434 purpose and value fields are PURPOSE and VALUE
3435 and whose TREE_CHAIN is CHAIN. */
3438 tree_cons (tree purpose
, tree value
, tree chain MEM_STAT_DECL
)
3442 node
= ggc_alloc_tree_node_stat (sizeof (struct tree_list
) PASS_MEM_STAT
);
3443 memset (node
, 0, sizeof (struct tree_common
));
3445 record_node_allocation_statistics (TREE_LIST
, sizeof (struct tree_list
));
3447 TREE_SET_CODE (node
, TREE_LIST
);
3448 TREE_CHAIN (node
) = chain
;
3449 TREE_PURPOSE (node
) = purpose
;
3450 TREE_VALUE (node
) = value
;
3454 /* Return the values of the elements of a CONSTRUCTOR as a vector of
3458 ctor_to_vec (tree ctor
)
3460 vec
<tree
, va_gc
> *vec
;
3461 vec_alloc (vec
, CONSTRUCTOR_NELTS (ctor
));
3465 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor
), ix
, val
)
3466 vec
->quick_push (val
);
3471 /* Return the size nominally occupied by an object of type TYPE
3472 when it resides in memory. The value is measured in units of bytes,
3473 and its data type is that normally used for type sizes
3474 (which is the first type created by make_signed_type or
3475 make_unsigned_type). */
3478 size_in_bytes_loc (location_t loc
, const_tree type
)
3482 if (type
== error_mark_node
)
3483 return integer_zero_node
;
3485 type
= TYPE_MAIN_VARIANT (type
);
3486 t
= TYPE_SIZE_UNIT (type
);
3490 lang_hooks
.types
.incomplete_type_error (loc
, NULL_TREE
, type
);
3491 return size_zero_node
;
3497 /* Return the size of TYPE (in bytes) as a wide integer
3498 or return -1 if the size can vary or is larger than an integer. */
3501 int_size_in_bytes (const_tree type
)
3505 if (type
== error_mark_node
)
3508 type
= TYPE_MAIN_VARIANT (type
);
3509 t
= TYPE_SIZE_UNIT (type
);
3511 if (t
&& tree_fits_uhwi_p (t
))
3512 return TREE_INT_CST_LOW (t
);
3517 /* Return the maximum size of TYPE (in bytes) as a wide integer
3518 or return -1 if the size can vary or is larger than an integer. */
3521 max_int_size_in_bytes (const_tree type
)
3523 HOST_WIDE_INT size
= -1;
3526 /* If this is an array type, check for a possible MAX_SIZE attached. */
3528 if (TREE_CODE (type
) == ARRAY_TYPE
)
3530 size_tree
= TYPE_ARRAY_MAX_SIZE (type
);
3532 if (size_tree
&& tree_fits_uhwi_p (size_tree
))
3533 size
= tree_to_uhwi (size_tree
);
3536 /* If we still haven't been able to get a size, see if the language
3537 can compute a maximum size. */
3541 size_tree
= lang_hooks
.types
.max_size (type
);
3543 if (size_tree
&& tree_fits_uhwi_p (size_tree
))
3544 size
= tree_to_uhwi (size_tree
);
3550 /* Return the bit position of FIELD, in bits from the start of the record.
3551 This is a tree of type bitsizetype. */
3554 bit_position (const_tree field
)
3556 return bit_from_pos (DECL_FIELD_OFFSET (field
),
3557 DECL_FIELD_BIT_OFFSET (field
));
3560 /* Return the byte position of FIELD, in bytes from the start of the record.
3561 This is a tree of type sizetype. */
3564 byte_position (const_tree field
)
3566 return byte_from_pos (DECL_FIELD_OFFSET (field
),
3567 DECL_FIELD_BIT_OFFSET (field
));
3570 /* Likewise, but return as an integer. It must be representable in
3571 that way (since it could be a signed value, we don't have the
3572 option of returning -1 like int_size_in_byte can. */
3575 int_byte_position (const_tree field
)
3577 return tree_to_shwi (byte_position (field
));
3580 /* Return, as a tree node, the number of elements for TYPE (which is an
3581 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3584 array_type_nelts (const_tree type
)
3586 tree index_type
, min
, max
;
3588 /* If they did it with unspecified bounds, then we should have already
3589 given an error about it before we got here. */
3590 if (! TYPE_DOMAIN (type
))
3591 return error_mark_node
;
3593 index_type
= TYPE_DOMAIN (type
);
3594 min
= TYPE_MIN_VALUE (index_type
);
3595 max
= TYPE_MAX_VALUE (index_type
);
3597 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3600 /* zero sized arrays are represented from C FE as complete types with
3601 NULL TYPE_MAX_VALUE and zero TYPE_SIZE, while C++ FE represents
3602 them as min 0, max -1. */
3603 if (COMPLETE_TYPE_P (type
)
3604 && integer_zerop (TYPE_SIZE (type
))
3605 && integer_zerop (min
))
3606 return build_int_cst (TREE_TYPE (min
), -1);
3608 return error_mark_node
;
3611 return (integer_zerop (min
)
3613 : fold_build2 (MINUS_EXPR
, TREE_TYPE (max
), max
, min
));
3616 /* If arg is static -- a reference to an object in static storage -- then
3617 return the object. This is not the same as the C meaning of `static'.
3618 If arg isn't static, return NULL. */
3623 switch (TREE_CODE (arg
))
3626 /* Nested functions are static, even though taking their address will
3627 involve a trampoline as we unnest the nested function and create
3628 the trampoline on the tree level. */
3632 return ((TREE_STATIC (arg
) || DECL_EXTERNAL (arg
))
3633 && ! DECL_THREAD_LOCAL_P (arg
)
3634 && ! DECL_DLLIMPORT_P (arg
)
3638 return ((TREE_STATIC (arg
) || DECL_EXTERNAL (arg
))
3642 return TREE_STATIC (arg
) ? arg
: NULL
;
3649 /* If the thing being referenced is not a field, then it is
3650 something language specific. */
3651 gcc_assert (TREE_CODE (TREE_OPERAND (arg
, 1)) == FIELD_DECL
);
3653 /* If we are referencing a bitfield, we can't evaluate an
3654 ADDR_EXPR at compile time and so it isn't a constant. */
3655 if (DECL_BIT_FIELD (TREE_OPERAND (arg
, 1)))
3658 return staticp (TREE_OPERAND (arg
, 0));
3664 return TREE_CONSTANT (TREE_OPERAND (arg
, 0)) ? arg
: NULL
;
3667 case ARRAY_RANGE_REF
:
3668 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg
))) == INTEGER_CST
3669 && TREE_CODE (TREE_OPERAND (arg
, 1)) == INTEGER_CST
)
3670 return staticp (TREE_OPERAND (arg
, 0));
3674 case COMPOUND_LITERAL_EXPR
:
3675 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg
)) ? arg
: NULL
;
3685 /* Return whether OP is a DECL whose address is function-invariant. */
3688 decl_address_invariant_p (const_tree op
)
3690 /* The conditions below are slightly less strict than the one in
3693 switch (TREE_CODE (op
))
3702 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3703 || DECL_THREAD_LOCAL_P (op
)
3704 || DECL_CONTEXT (op
) == current_function_decl
3705 || decl_function_context (op
) == current_function_decl
)
3710 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3711 || decl_function_context (op
) == current_function_decl
)
3722 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3725 decl_address_ip_invariant_p (const_tree op
)
3727 /* The conditions below are slightly less strict than the one in
3730 switch (TREE_CODE (op
))
3738 if (((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3739 && !DECL_DLLIMPORT_P (op
))
3740 || DECL_THREAD_LOCAL_P (op
))
3745 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
)))
3757 /* Return true if T is function-invariant (internal function, does
3758 not handle arithmetic; that's handled in skip_simple_arithmetic and
3759 tree_invariant_p). */
3762 tree_invariant_p_1 (tree t
)
3766 if (TREE_CONSTANT (t
)
3767 || (TREE_READONLY (t
) && !TREE_SIDE_EFFECTS (t
)))
3770 switch (TREE_CODE (t
))
3776 op
= TREE_OPERAND (t
, 0);
3777 while (handled_component_p (op
))
3779 switch (TREE_CODE (op
))
3782 case ARRAY_RANGE_REF
:
3783 if (!tree_invariant_p (TREE_OPERAND (op
, 1))
3784 || TREE_OPERAND (op
, 2) != NULL_TREE
3785 || TREE_OPERAND (op
, 3) != NULL_TREE
)
3790 if (TREE_OPERAND (op
, 2) != NULL_TREE
)
3796 op
= TREE_OPERAND (op
, 0);
3799 return CONSTANT_CLASS_P (op
) || decl_address_invariant_p (op
);
3808 /* Return true if T is function-invariant. */
3811 tree_invariant_p (tree t
)
3813 tree inner
= skip_simple_arithmetic (t
);
3814 return tree_invariant_p_1 (inner
);
3817 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3818 Do this to any expression which may be used in more than one place,
3819 but must be evaluated only once.
3821 Normally, expand_expr would reevaluate the expression each time.
3822 Calling save_expr produces something that is evaluated and recorded
3823 the first time expand_expr is called on it. Subsequent calls to
3824 expand_expr just reuse the recorded value.
3826 The call to expand_expr that generates code that actually computes
3827 the value is the first call *at compile time*. Subsequent calls
3828 *at compile time* generate code to use the saved value.
3829 This produces correct result provided that *at run time* control
3830 always flows through the insns made by the first expand_expr
3831 before reaching the other places where the save_expr was evaluated.
3832 You, the caller of save_expr, must make sure this is so.
3834 Constants, and certain read-only nodes, are returned with no
3835 SAVE_EXPR because that is safe. Expressions containing placeholders
3836 are not touched; see tree.def for an explanation of what these
3840 save_expr (tree expr
)
3844 /* If the tree evaluates to a constant, then we don't want to hide that
3845 fact (i.e. this allows further folding, and direct checks for constants).
3846 However, a read-only object that has side effects cannot be bypassed.
3847 Since it is no problem to reevaluate literals, we just return the
3849 inner
= skip_simple_arithmetic (expr
);
3850 if (TREE_CODE (inner
) == ERROR_MARK
)
3853 if (tree_invariant_p_1 (inner
))
3856 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3857 it means that the size or offset of some field of an object depends on
3858 the value within another field.
3860 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3861 and some variable since it would then need to be both evaluated once and
3862 evaluated more than once. Front-ends must assure this case cannot
3863 happen by surrounding any such subexpressions in their own SAVE_EXPR
3864 and forcing evaluation at the proper time. */
3865 if (contains_placeholder_p (inner
))
3868 expr
= build1_loc (EXPR_LOCATION (expr
), SAVE_EXPR
, TREE_TYPE (expr
), expr
);
3870 /* This expression might be placed ahead of a jump to ensure that the
3871 value was computed on both sides of the jump. So make sure it isn't
3872 eliminated as dead. */
3873 TREE_SIDE_EFFECTS (expr
) = 1;
3877 /* Look inside EXPR into any simple arithmetic operations. Return the
3878 outermost non-arithmetic or non-invariant node. */
3881 skip_simple_arithmetic (tree expr
)
3883 /* We don't care about whether this can be used as an lvalue in this
3885 while (TREE_CODE (expr
) == NON_LVALUE_EXPR
)
3886 expr
= TREE_OPERAND (expr
, 0);
3888 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3889 a constant, it will be more efficient to not make another SAVE_EXPR since
3890 it will allow better simplification and GCSE will be able to merge the
3891 computations if they actually occur. */
3894 if (UNARY_CLASS_P (expr
))
3895 expr
= TREE_OPERAND (expr
, 0);
3896 else if (BINARY_CLASS_P (expr
))
3898 if (tree_invariant_p (TREE_OPERAND (expr
, 1)))
3899 expr
= TREE_OPERAND (expr
, 0);
3900 else if (tree_invariant_p (TREE_OPERAND (expr
, 0)))
3901 expr
= TREE_OPERAND (expr
, 1);
3912 /* Look inside EXPR into simple arithmetic operations involving constants.
3913 Return the outermost non-arithmetic or non-constant node. */
3916 skip_simple_constant_arithmetic (tree expr
)
3918 while (TREE_CODE (expr
) == NON_LVALUE_EXPR
)
3919 expr
= TREE_OPERAND (expr
, 0);
3923 if (UNARY_CLASS_P (expr
))
3924 expr
= TREE_OPERAND (expr
, 0);
3925 else if (BINARY_CLASS_P (expr
))
3927 if (TREE_CONSTANT (TREE_OPERAND (expr
, 1)))
3928 expr
= TREE_OPERAND (expr
, 0);
3929 else if (TREE_CONSTANT (TREE_OPERAND (expr
, 0)))
3930 expr
= TREE_OPERAND (expr
, 1);
3941 /* Return which tree structure is used by T. */
3943 enum tree_node_structure_enum
3944 tree_node_structure (const_tree t
)
3946 const enum tree_code code
= TREE_CODE (t
);
3947 return tree_node_structure_for_code (code
);
3950 /* Set various status flags when building a CALL_EXPR object T. */
3953 process_call_operands (tree t
)
3955 bool side_effects
= TREE_SIDE_EFFECTS (t
);
3956 bool read_only
= false;
3957 int i
= call_expr_flags (t
);
3959 /* Calls have side-effects, except those to const or pure functions. */
3960 if ((i
& ECF_LOOPING_CONST_OR_PURE
) || !(i
& (ECF_CONST
| ECF_PURE
)))
3961 side_effects
= true;
3962 /* Propagate TREE_READONLY of arguments for const functions. */
3966 if (!side_effects
|| read_only
)
3967 for (i
= 1; i
< TREE_OPERAND_LENGTH (t
); i
++)
3969 tree op
= TREE_OPERAND (t
, i
);
3970 if (op
&& TREE_SIDE_EFFECTS (op
))
3971 side_effects
= true;
3972 if (op
&& !TREE_READONLY (op
) && !CONSTANT_CLASS_P (op
))
3976 TREE_SIDE_EFFECTS (t
) = side_effects
;
3977 TREE_READONLY (t
) = read_only
;
3980 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3981 size or offset that depends on a field within a record. */
3984 contains_placeholder_p (const_tree exp
)
3986 enum tree_code code
;
3991 code
= TREE_CODE (exp
);
3992 if (code
== PLACEHOLDER_EXPR
)
3995 switch (TREE_CODE_CLASS (code
))
3998 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3999 position computations since they will be converted into a
4000 WITH_RECORD_EXPR involving the reference, which will assume
4001 here will be valid. */
4002 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0));
4004 case tcc_exceptional
:
4005 if (code
== TREE_LIST
)
4006 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp
))
4007 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp
)));
4012 case tcc_comparison
:
4013 case tcc_expression
:
4017 /* Ignoring the first operand isn't quite right, but works best. */
4018 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1));
4021 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0))
4022 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1))
4023 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 2)));
4026 /* The save_expr function never wraps anything containing
4027 a PLACEHOLDER_EXPR. */
4034 switch (TREE_CODE_LENGTH (code
))
4037 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0));
4039 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0))
4040 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1)));
4051 const_call_expr_arg_iterator iter
;
4052 FOR_EACH_CONST_CALL_EXPR_ARG (arg
, iter
, exp
)
4053 if (CONTAINS_PLACEHOLDER_P (arg
))
4067 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
4068 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
4072 type_contains_placeholder_1 (const_tree type
)
4074 /* If the size contains a placeholder or the parent type (component type in
4075 the case of arrays) type involves a placeholder, this type does. */
4076 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type
))
4077 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type
))
4078 || (!POINTER_TYPE_P (type
)
4080 && type_contains_placeholder_p (TREE_TYPE (type
))))
4083 /* Now do type-specific checks. Note that the last part of the check above
4084 greatly limits what we have to do below. */
4085 switch (TREE_CODE (type
))
4094 case REFERENCE_TYPE
:
4103 case FIXED_POINT_TYPE
:
4104 /* Here we just check the bounds. */
4105 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type
))
4106 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type
)));
4109 /* We have already checked the component type above, so just check
4110 the domain type. Flexible array members have a null domain. */
4111 return TYPE_DOMAIN (type
) ?
4112 type_contains_placeholder_p (TYPE_DOMAIN (type
)) : false;
4116 case QUAL_UNION_TYPE
:
4120 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
4121 if (TREE_CODE (field
) == FIELD_DECL
4122 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field
))
4123 || (TREE_CODE (type
) == QUAL_UNION_TYPE
4124 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field
)))
4125 || type_contains_placeholder_p (TREE_TYPE (field
))))
4136 /* Wrapper around above function used to cache its result. */
4139 type_contains_placeholder_p (tree type
)
4143 /* If the contains_placeholder_bits field has been initialized,
4144 then we know the answer. */
4145 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) > 0)
4146 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) - 1;
4148 /* Indicate that we've seen this type node, and the answer is false.
4149 This is what we want to return if we run into recursion via fields. */
4150 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) = 1;
4152 /* Compute the real value. */
4153 result
= type_contains_placeholder_1 (type
);
4155 /* Store the real value. */
4156 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) = result
+ 1;
4161 /* Push tree EXP onto vector QUEUE if it is not already present. */
4164 push_without_duplicates (tree exp
, vec
<tree
> *queue
)
4169 FOR_EACH_VEC_ELT (*queue
, i
, iter
)
4170 if (simple_cst_equal (iter
, exp
) == 1)
4174 queue
->safe_push (exp
);
4177 /* Given a tree EXP, find all occurrences of references to fields
4178 in a PLACEHOLDER_EXPR and place them in vector REFS without
4179 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
4180 we assume here that EXP contains only arithmetic expressions
4181 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
4185 find_placeholder_in_expr (tree exp
, vec
<tree
> *refs
)
4187 enum tree_code code
= TREE_CODE (exp
);
4191 /* We handle TREE_LIST and COMPONENT_REF separately. */
4192 if (code
== TREE_LIST
)
4194 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp
), refs
);
4195 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp
), refs
);
4197 else if (code
== COMPONENT_REF
)
4199 for (inner
= TREE_OPERAND (exp
, 0);
4200 REFERENCE_CLASS_P (inner
);
4201 inner
= TREE_OPERAND (inner
, 0))
4204 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
4205 push_without_duplicates (exp
, refs
);
4207 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), refs
);
4210 switch (TREE_CODE_CLASS (code
))
4215 case tcc_declaration
:
4216 /* Variables allocated to static storage can stay. */
4217 if (!TREE_STATIC (exp
))
4218 push_without_duplicates (exp
, refs
);
4221 case tcc_expression
:
4222 /* This is the pattern built in ada/make_aligning_type. */
4223 if (code
== ADDR_EXPR
4224 && TREE_CODE (TREE_OPERAND (exp
, 0)) == PLACEHOLDER_EXPR
)
4226 push_without_duplicates (exp
, refs
);
4232 case tcc_exceptional
:
4235 case tcc_comparison
:
4237 for (i
= 0; i
< TREE_CODE_LENGTH (code
); i
++)
4238 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, i
), refs
);
4242 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
4243 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, i
), refs
);
4251 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
4252 return a tree with all occurrences of references to F in a
4253 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
4254 CONST_DECLs. Note that we assume here that EXP contains only
4255 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
4256 occurring only in their argument list. */
4259 substitute_in_expr (tree exp
, tree f
, tree r
)
4261 enum tree_code code
= TREE_CODE (exp
);
4262 tree op0
, op1
, op2
, op3
;
4265 /* We handle TREE_LIST and COMPONENT_REF separately. */
4266 if (code
== TREE_LIST
)
4268 op0
= SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp
), f
, r
);
4269 op1
= SUBSTITUTE_IN_EXPR (TREE_VALUE (exp
), f
, r
);
4270 if (op0
== TREE_CHAIN (exp
) && op1
== TREE_VALUE (exp
))
4273 return tree_cons (TREE_PURPOSE (exp
), op1
, op0
);
4275 else if (code
== COMPONENT_REF
)
4279 /* If this expression is getting a value from a PLACEHOLDER_EXPR
4280 and it is the right field, replace it with R. */
4281 for (inner
= TREE_OPERAND (exp
, 0);
4282 REFERENCE_CLASS_P (inner
);
4283 inner
= TREE_OPERAND (inner
, 0))
4287 op1
= TREE_OPERAND (exp
, 1);
4289 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
&& op1
== f
)
4292 /* If this expression hasn't been completed let, leave it alone. */
4293 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
&& !TREE_TYPE (inner
))
4296 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4297 if (op0
== TREE_OPERAND (exp
, 0))
4301 = fold_build3 (COMPONENT_REF
, TREE_TYPE (exp
), op0
, op1
, NULL_TREE
);
4304 switch (TREE_CODE_CLASS (code
))
4309 case tcc_declaration
:
4315 case tcc_expression
:
4321 case tcc_exceptional
:
4324 case tcc_comparison
:
4326 switch (TREE_CODE_LENGTH (code
))
4332 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4333 if (op0
== TREE_OPERAND (exp
, 0))
4336 new_tree
= fold_build1 (code
, TREE_TYPE (exp
), op0
);
4340 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4341 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
4343 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1))
4346 new_tree
= fold_build2 (code
, TREE_TYPE (exp
), op0
, op1
);
4350 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4351 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
4352 op2
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 2), f
, r
);
4354 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4355 && op2
== TREE_OPERAND (exp
, 2))
4358 new_tree
= fold_build3 (code
, TREE_TYPE (exp
), op0
, op1
, op2
);
4362 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
4363 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
4364 op2
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 2), f
, r
);
4365 op3
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 3), f
, r
);
4367 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4368 && op2
== TREE_OPERAND (exp
, 2)
4369 && op3
== TREE_OPERAND (exp
, 3))
4373 = fold (build4 (code
, TREE_TYPE (exp
), op0
, op1
, op2
, op3
));
4385 new_tree
= NULL_TREE
;
4387 /* If we are trying to replace F with a constant or with another
4388 instance of one of the arguments of the call, inline back
4389 functions which do nothing else than computing a value from
4390 the arguments they are passed. This makes it possible to
4391 fold partially or entirely the replacement expression. */
4392 if (code
== CALL_EXPR
)
4394 bool maybe_inline
= false;
4395 if (CONSTANT_CLASS_P (r
))
4396 maybe_inline
= true;
4398 for (i
= 3; i
< TREE_OPERAND_LENGTH (exp
); i
++)
4399 if (operand_equal_p (TREE_OPERAND (exp
, i
), r
, 0))
4401 maybe_inline
= true;
4406 tree t
= maybe_inline_call_in_expr (exp
);
4408 return SUBSTITUTE_IN_EXPR (t
, f
, r
);
4412 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
4414 tree op
= TREE_OPERAND (exp
, i
);
4415 tree new_op
= SUBSTITUTE_IN_EXPR (op
, f
, r
);
4419 new_tree
= copy_node (exp
);
4420 TREE_OPERAND (new_tree
, i
) = new_op
;
4426 new_tree
= fold (new_tree
);
4427 if (TREE_CODE (new_tree
) == CALL_EXPR
)
4428 process_call_operands (new_tree
);
4439 TREE_READONLY (new_tree
) |= TREE_READONLY (exp
);
4441 if (code
== INDIRECT_REF
|| code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
4442 TREE_THIS_NOTRAP (new_tree
) |= TREE_THIS_NOTRAP (exp
);
4447 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
4448 for it within OBJ, a tree that is an object or a chain of references. */
4451 substitute_placeholder_in_expr (tree exp
, tree obj
)
4453 enum tree_code code
= TREE_CODE (exp
);
4454 tree op0
, op1
, op2
, op3
;
4457 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
4458 in the chain of OBJ. */
4459 if (code
== PLACEHOLDER_EXPR
)
4461 tree need_type
= TYPE_MAIN_VARIANT (TREE_TYPE (exp
));
4464 for (elt
= obj
; elt
!= 0;
4465 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
4466 || TREE_CODE (elt
) == COND_EXPR
)
4467 ? TREE_OPERAND (elt
, 1)
4468 : (REFERENCE_CLASS_P (elt
)
4469 || UNARY_CLASS_P (elt
)
4470 || BINARY_CLASS_P (elt
)
4471 || VL_EXP_CLASS_P (elt
)
4472 || EXPRESSION_CLASS_P (elt
))
4473 ? TREE_OPERAND (elt
, 0) : 0))
4474 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
4477 for (elt
= obj
; elt
!= 0;
4478 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
4479 || TREE_CODE (elt
) == COND_EXPR
)
4480 ? TREE_OPERAND (elt
, 1)
4481 : (REFERENCE_CLASS_P (elt
)
4482 || UNARY_CLASS_P (elt
)
4483 || BINARY_CLASS_P (elt
)
4484 || VL_EXP_CLASS_P (elt
)
4485 || EXPRESSION_CLASS_P (elt
))
4486 ? TREE_OPERAND (elt
, 0) : 0))
4487 if (POINTER_TYPE_P (TREE_TYPE (elt
))
4488 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
4490 return fold_build1 (INDIRECT_REF
, need_type
, elt
);
4492 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
4493 survives until RTL generation, there will be an error. */
4497 /* TREE_LIST is special because we need to look at TREE_VALUE
4498 and TREE_CHAIN, not TREE_OPERANDS. */
4499 else if (code
== TREE_LIST
)
4501 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp
), obj
);
4502 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp
), obj
);
4503 if (op0
== TREE_CHAIN (exp
) && op1
== TREE_VALUE (exp
))
4506 return tree_cons (TREE_PURPOSE (exp
), op1
, op0
);
4509 switch (TREE_CODE_CLASS (code
))
4512 case tcc_declaration
:
4515 case tcc_exceptional
:
4518 case tcc_comparison
:
4519 case tcc_expression
:
4522 switch (TREE_CODE_LENGTH (code
))
4528 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4529 if (op0
== TREE_OPERAND (exp
, 0))
4532 new_tree
= fold_build1 (code
, TREE_TYPE (exp
), op0
);
4536 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4537 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
4539 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1))
4542 new_tree
= fold_build2 (code
, TREE_TYPE (exp
), op0
, op1
);
4546 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4547 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
4548 op2
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 2), obj
);
4550 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4551 && op2
== TREE_OPERAND (exp
, 2))
4554 new_tree
= fold_build3 (code
, TREE_TYPE (exp
), op0
, op1
, op2
);
4558 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4559 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
4560 op2
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 2), obj
);
4561 op3
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 3), obj
);
4563 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4564 && op2
== TREE_OPERAND (exp
, 2)
4565 && op3
== TREE_OPERAND (exp
, 3))
4569 = fold (build4 (code
, TREE_TYPE (exp
), op0
, op1
, op2
, op3
));
4581 new_tree
= NULL_TREE
;
4583 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
4585 tree op
= TREE_OPERAND (exp
, i
);
4586 tree new_op
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (op
, obj
);
4590 new_tree
= copy_node (exp
);
4591 TREE_OPERAND (new_tree
, i
) = new_op
;
4597 new_tree
= fold (new_tree
);
4598 if (TREE_CODE (new_tree
) == CALL_EXPR
)
4599 process_call_operands (new_tree
);
4610 TREE_READONLY (new_tree
) |= TREE_READONLY (exp
);
4612 if (code
== INDIRECT_REF
|| code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
4613 TREE_THIS_NOTRAP (new_tree
) |= TREE_THIS_NOTRAP (exp
);
4619 /* Subroutine of stabilize_reference; this is called for subtrees of
4620 references. Any expression with side-effects must be put in a SAVE_EXPR
4621 to ensure that it is only evaluated once.
4623 We don't put SAVE_EXPR nodes around everything, because assigning very
4624 simple expressions to temporaries causes us to miss good opportunities
4625 for optimizations. Among other things, the opportunity to fold in the
4626 addition of a constant into an addressing mode often gets lost, e.g.
4627 "y[i+1] += x;". In general, we take the approach that we should not make
4628 an assignment unless we are forced into it - i.e., that any non-side effect
4629 operator should be allowed, and that cse should take care of coalescing
4630 multiple utterances of the same expression should that prove fruitful. */
4633 stabilize_reference_1 (tree e
)
4636 enum tree_code code
= TREE_CODE (e
);
4638 /* We cannot ignore const expressions because it might be a reference
4639 to a const array but whose index contains side-effects. But we can
4640 ignore things that are actual constant or that already have been
4641 handled by this function. */
4643 if (tree_invariant_p (e
))
4646 switch (TREE_CODE_CLASS (code
))
4648 case tcc_exceptional
:
4649 /* Always wrap STATEMENT_LIST into SAVE_EXPR, even if it doesn't
4650 have side-effects. */
4651 if (code
== STATEMENT_LIST
)
4652 return save_expr (e
);
4655 case tcc_declaration
:
4656 case tcc_comparison
:
4658 case tcc_expression
:
4661 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4662 so that it will only be evaluated once. */
4663 /* The reference (r) and comparison (<) classes could be handled as
4664 below, but it is generally faster to only evaluate them once. */
4665 if (TREE_SIDE_EFFECTS (e
))
4666 return save_expr (e
);
4670 /* Constants need no processing. In fact, we should never reach
4675 /* Division is slow and tends to be compiled with jumps,
4676 especially the division by powers of 2 that is often
4677 found inside of an array reference. So do it just once. */
4678 if (code
== TRUNC_DIV_EXPR
|| code
== TRUNC_MOD_EXPR
4679 || code
== FLOOR_DIV_EXPR
|| code
== FLOOR_MOD_EXPR
4680 || code
== CEIL_DIV_EXPR
|| code
== CEIL_MOD_EXPR
4681 || code
== ROUND_DIV_EXPR
|| code
== ROUND_MOD_EXPR
)
4682 return save_expr (e
);
4683 /* Recursively stabilize each operand. */
4684 result
= build_nt (code
, stabilize_reference_1 (TREE_OPERAND (e
, 0)),
4685 stabilize_reference_1 (TREE_OPERAND (e
, 1)));
4689 /* Recursively stabilize each operand. */
4690 result
= build_nt (code
, stabilize_reference_1 (TREE_OPERAND (e
, 0)));
4697 TREE_TYPE (result
) = TREE_TYPE (e
);
4698 TREE_READONLY (result
) = TREE_READONLY (e
);
4699 TREE_SIDE_EFFECTS (result
) = TREE_SIDE_EFFECTS (e
);
4700 TREE_THIS_VOLATILE (result
) = TREE_THIS_VOLATILE (e
);
4705 /* Stabilize a reference so that we can use it any number of times
4706 without causing its operands to be evaluated more than once.
4707 Returns the stabilized reference. This works by means of save_expr,
4708 so see the caveats in the comments about save_expr.
4710 Also allows conversion expressions whose operands are references.
4711 Any other kind of expression is returned unchanged. */
4714 stabilize_reference (tree ref
)
4717 enum tree_code code
= TREE_CODE (ref
);
4724 /* No action is needed in this case. */
4729 case FIX_TRUNC_EXPR
:
4730 result
= build_nt (code
, stabilize_reference (TREE_OPERAND (ref
, 0)));
4734 result
= build_nt (INDIRECT_REF
,
4735 stabilize_reference_1 (TREE_OPERAND (ref
, 0)));
4739 result
= build_nt (COMPONENT_REF
,
4740 stabilize_reference (TREE_OPERAND (ref
, 0)),
4741 TREE_OPERAND (ref
, 1), NULL_TREE
);
4745 result
= build_nt (BIT_FIELD_REF
,
4746 stabilize_reference (TREE_OPERAND (ref
, 0)),
4747 TREE_OPERAND (ref
, 1), TREE_OPERAND (ref
, 2));
4748 REF_REVERSE_STORAGE_ORDER (result
) = REF_REVERSE_STORAGE_ORDER (ref
);
4752 result
= build_nt (ARRAY_REF
,
4753 stabilize_reference (TREE_OPERAND (ref
, 0)),
4754 stabilize_reference_1 (TREE_OPERAND (ref
, 1)),
4755 TREE_OPERAND (ref
, 2), TREE_OPERAND (ref
, 3));
4758 case ARRAY_RANGE_REF
:
4759 result
= build_nt (ARRAY_RANGE_REF
,
4760 stabilize_reference (TREE_OPERAND (ref
, 0)),
4761 stabilize_reference_1 (TREE_OPERAND (ref
, 1)),
4762 TREE_OPERAND (ref
, 2), TREE_OPERAND (ref
, 3));
4766 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4767 it wouldn't be ignored. This matters when dealing with
4769 return stabilize_reference_1 (ref
);
4771 /* If arg isn't a kind of lvalue we recognize, make no change.
4772 Caller should recognize the error for an invalid lvalue. */
4777 return error_mark_node
;
4780 TREE_TYPE (result
) = TREE_TYPE (ref
);
4781 TREE_READONLY (result
) = TREE_READONLY (ref
);
4782 TREE_SIDE_EFFECTS (result
) = TREE_SIDE_EFFECTS (ref
);
4783 TREE_THIS_VOLATILE (result
) = TREE_THIS_VOLATILE (ref
);
4788 /* Low-level constructors for expressions. */
4790 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4791 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4794 recompute_tree_invariant_for_addr_expr (tree t
)
4797 bool tc
= true, se
= false;
4799 gcc_assert (TREE_CODE (t
) == ADDR_EXPR
);
4801 /* We started out assuming this address is both invariant and constant, but
4802 does not have side effects. Now go down any handled components and see if
4803 any of them involve offsets that are either non-constant or non-invariant.
4804 Also check for side-effects.
4806 ??? Note that this code makes no attempt to deal with the case where
4807 taking the address of something causes a copy due to misalignment. */
4809 #define UPDATE_FLAGS(NODE) \
4810 do { tree _node = (NODE); \
4811 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4812 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4814 for (node
= TREE_OPERAND (t
, 0); handled_component_p (node
);
4815 node
= TREE_OPERAND (node
, 0))
4817 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4818 array reference (probably made temporarily by the G++ front end),
4819 so ignore all the operands. */
4820 if ((TREE_CODE (node
) == ARRAY_REF
4821 || TREE_CODE (node
) == ARRAY_RANGE_REF
)
4822 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node
, 0))) == ARRAY_TYPE
)
4824 UPDATE_FLAGS (TREE_OPERAND (node
, 1));
4825 if (TREE_OPERAND (node
, 2))
4826 UPDATE_FLAGS (TREE_OPERAND (node
, 2));
4827 if (TREE_OPERAND (node
, 3))
4828 UPDATE_FLAGS (TREE_OPERAND (node
, 3));
4830 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4831 FIELD_DECL, apparently. The G++ front end can put something else
4832 there, at least temporarily. */
4833 else if (TREE_CODE (node
) == COMPONENT_REF
4834 && TREE_CODE (TREE_OPERAND (node
, 1)) == FIELD_DECL
)
4836 if (TREE_OPERAND (node
, 2))
4837 UPDATE_FLAGS (TREE_OPERAND (node
, 2));
4841 node
= lang_hooks
.expr_to_decl (node
, &tc
, &se
);
4843 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4844 the address, since &(*a)->b is a form of addition. If it's a constant, the
4845 address is constant too. If it's a decl, its address is constant if the
4846 decl is static. Everything else is not constant and, furthermore,
4847 taking the address of a volatile variable is not volatile. */
4848 if (TREE_CODE (node
) == INDIRECT_REF
4849 || TREE_CODE (node
) == MEM_REF
)
4850 UPDATE_FLAGS (TREE_OPERAND (node
, 0));
4851 else if (CONSTANT_CLASS_P (node
))
4853 else if (DECL_P (node
))
4854 tc
&= (staticp (node
) != NULL_TREE
);
4858 se
|= TREE_SIDE_EFFECTS (node
);
4862 TREE_CONSTANT (t
) = tc
;
4863 TREE_SIDE_EFFECTS (t
) = se
;
4867 /* Build an expression of code CODE, data type TYPE, and operands as
4868 specified. Expressions and reference nodes can be created this way.
4869 Constants, decls, types and misc nodes cannot be.
4871 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4872 enough for all extant tree codes. */
4875 build0 (enum tree_code code
, tree tt MEM_STAT_DECL
)
4879 gcc_assert (TREE_CODE_LENGTH (code
) == 0);
4881 t
= make_node (code PASS_MEM_STAT
);
4888 build1 (enum tree_code code
, tree type
, tree node MEM_STAT_DECL
)
4890 int length
= sizeof (struct tree_exp
);
4893 record_node_allocation_statistics (code
, length
);
4895 gcc_assert (TREE_CODE_LENGTH (code
) == 1);
4897 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
4899 memset (t
, 0, sizeof (struct tree_common
));
4901 TREE_SET_CODE (t
, code
);
4903 TREE_TYPE (t
) = type
;
4904 SET_EXPR_LOCATION (t
, UNKNOWN_LOCATION
);
4905 TREE_OPERAND (t
, 0) = node
;
4906 if (node
&& !TYPE_P (node
))
4908 TREE_SIDE_EFFECTS (t
) = TREE_SIDE_EFFECTS (node
);
4909 TREE_READONLY (t
) = TREE_READONLY (node
);
4912 if (TREE_CODE_CLASS (code
) == tcc_statement
)
4914 if (code
!= DEBUG_BEGIN_STMT
)
4915 TREE_SIDE_EFFECTS (t
) = 1;
4920 /* All of these have side-effects, no matter what their
4922 TREE_SIDE_EFFECTS (t
) = 1;
4923 TREE_READONLY (t
) = 0;
4927 /* Whether a dereference is readonly has nothing to do with whether
4928 its operand is readonly. */
4929 TREE_READONLY (t
) = 0;
4934 recompute_tree_invariant_for_addr_expr (t
);
4938 if ((TREE_CODE_CLASS (code
) == tcc_unary
|| code
== VIEW_CONVERT_EXPR
)
4939 && node
&& !TYPE_P (node
)
4940 && TREE_CONSTANT (node
))
4941 TREE_CONSTANT (t
) = 1;
4942 if (TREE_CODE_CLASS (code
) == tcc_reference
4943 && node
&& TREE_THIS_VOLATILE (node
))
4944 TREE_THIS_VOLATILE (t
) = 1;
4951 #define PROCESS_ARG(N) \
4953 TREE_OPERAND (t, N) = arg##N; \
4954 if (arg##N &&!TYPE_P (arg##N)) \
4956 if (TREE_SIDE_EFFECTS (arg##N)) \
4958 if (!TREE_READONLY (arg##N) \
4959 && !CONSTANT_CLASS_P (arg##N)) \
4960 (void) (read_only = 0); \
4961 if (!TREE_CONSTANT (arg##N)) \
4962 (void) (constant = 0); \
4967 build2 (enum tree_code code
, tree tt
, tree arg0
, tree arg1 MEM_STAT_DECL
)
4969 bool constant
, read_only
, side_effects
, div_by_zero
;
4972 gcc_assert (TREE_CODE_LENGTH (code
) == 2);
4974 if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
|| code
== MULT_EXPR
)
4975 && arg0
&& arg1
&& tt
&& POINTER_TYPE_P (tt
)
4976 /* When sizetype precision doesn't match that of pointers
4977 we need to be able to build explicit extensions or truncations
4978 of the offset argument. */
4979 && TYPE_PRECISION (sizetype
) == TYPE_PRECISION (tt
))
4980 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
4981 && TREE_CODE (arg1
) == INTEGER_CST
);
4983 if (code
== POINTER_PLUS_EXPR
&& arg0
&& arg1
&& tt
)
4984 gcc_assert (POINTER_TYPE_P (tt
) && POINTER_TYPE_P (TREE_TYPE (arg0
))
4985 && ptrofftype_p (TREE_TYPE (arg1
)));
4987 t
= make_node (code PASS_MEM_STAT
);
4990 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4991 result based on those same flags for the arguments. But if the
4992 arguments aren't really even `tree' expressions, we shouldn't be trying
4995 /* Expressions without side effects may be constant if their
4996 arguments are as well. */
4997 constant
= (TREE_CODE_CLASS (code
) == tcc_comparison
4998 || TREE_CODE_CLASS (code
) == tcc_binary
);
5000 side_effects
= TREE_SIDE_EFFECTS (t
);
5004 case TRUNC_DIV_EXPR
:
5006 case FLOOR_DIV_EXPR
:
5007 case ROUND_DIV_EXPR
:
5008 case EXACT_DIV_EXPR
:
5010 case FLOOR_MOD_EXPR
:
5011 case ROUND_MOD_EXPR
:
5012 case TRUNC_MOD_EXPR
:
5013 div_by_zero
= integer_zerop (arg1
);
5016 div_by_zero
= false;
5022 TREE_SIDE_EFFECTS (t
) = side_effects
;
5023 if (code
== MEM_REF
)
5025 if (arg0
&& TREE_CODE (arg0
) == ADDR_EXPR
)
5027 tree o
= TREE_OPERAND (arg0
, 0);
5028 TREE_READONLY (t
) = TREE_READONLY (o
);
5029 TREE_THIS_VOLATILE (t
) = TREE_THIS_VOLATILE (o
);
5034 TREE_READONLY (t
) = read_only
;
5035 /* Don't mark X / 0 as constant. */
5036 TREE_CONSTANT (t
) = constant
&& !div_by_zero
;
5037 TREE_THIS_VOLATILE (t
)
5038 = (TREE_CODE_CLASS (code
) == tcc_reference
5039 && arg0
&& TREE_THIS_VOLATILE (arg0
));
5047 build3 (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
5048 tree arg2 MEM_STAT_DECL
)
5050 bool constant
, read_only
, side_effects
;
5053 gcc_assert (TREE_CODE_LENGTH (code
) == 3);
5054 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
5056 t
= make_node (code PASS_MEM_STAT
);
5061 /* As a special exception, if COND_EXPR has NULL branches, we
5062 assume that it is a gimple statement and always consider
5063 it to have side effects. */
5064 if (code
== COND_EXPR
5065 && tt
== void_type_node
5066 && arg1
== NULL_TREE
5067 && arg2
== NULL_TREE
)
5068 side_effects
= true;
5070 side_effects
= TREE_SIDE_EFFECTS (t
);
5076 if (code
== COND_EXPR
)
5077 TREE_READONLY (t
) = read_only
;
5079 TREE_SIDE_EFFECTS (t
) = side_effects
;
5080 TREE_THIS_VOLATILE (t
)
5081 = (TREE_CODE_CLASS (code
) == tcc_reference
5082 && arg0
&& TREE_THIS_VOLATILE (arg0
));
5088 build4 (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
5089 tree arg2
, tree arg3 MEM_STAT_DECL
)
5091 bool constant
, read_only
, side_effects
;
5094 gcc_assert (TREE_CODE_LENGTH (code
) == 4);
5096 t
= make_node (code PASS_MEM_STAT
);
5099 side_effects
= TREE_SIDE_EFFECTS (t
);
5106 TREE_SIDE_EFFECTS (t
) = side_effects
;
5107 TREE_THIS_VOLATILE (t
)
5108 = (TREE_CODE_CLASS (code
) == tcc_reference
5109 && arg0
&& TREE_THIS_VOLATILE (arg0
));
5115 build5 (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
5116 tree arg2
, tree arg3
, tree arg4 MEM_STAT_DECL
)
5118 bool constant
, read_only
, side_effects
;
5121 gcc_assert (TREE_CODE_LENGTH (code
) == 5);
5123 t
= make_node (code PASS_MEM_STAT
);
5126 side_effects
= TREE_SIDE_EFFECTS (t
);
5134 TREE_SIDE_EFFECTS (t
) = side_effects
;
5135 if (code
== TARGET_MEM_REF
)
5137 if (arg0
&& TREE_CODE (arg0
) == ADDR_EXPR
)
5139 tree o
= TREE_OPERAND (arg0
, 0);
5140 TREE_READONLY (t
) = TREE_READONLY (o
);
5141 TREE_THIS_VOLATILE (t
) = TREE_THIS_VOLATILE (o
);
5145 TREE_THIS_VOLATILE (t
)
5146 = (TREE_CODE_CLASS (code
) == tcc_reference
5147 && arg0
&& TREE_THIS_VOLATILE (arg0
));
5152 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
5153 on the pointer PTR. */
5156 build_simple_mem_ref_loc (location_t loc
, tree ptr
)
5158 poly_int64 offset
= 0;
5159 tree ptype
= TREE_TYPE (ptr
);
5161 /* For convenience allow addresses that collapse to a simple base
5163 if (TREE_CODE (ptr
) == ADDR_EXPR
5164 && (handled_component_p (TREE_OPERAND (ptr
, 0))
5165 || TREE_CODE (TREE_OPERAND (ptr
, 0)) == MEM_REF
))
5167 ptr
= get_addr_base_and_unit_offset (TREE_OPERAND (ptr
, 0), &offset
);
5169 if (TREE_CODE (ptr
) == MEM_REF
)
5171 offset
+= mem_ref_offset (ptr
).force_shwi ();
5172 ptr
= TREE_OPERAND (ptr
, 0);
5175 ptr
= build_fold_addr_expr (ptr
);
5176 gcc_assert (is_gimple_reg (ptr
) || is_gimple_min_invariant (ptr
));
5178 tem
= build2 (MEM_REF
, TREE_TYPE (ptype
),
5179 ptr
, build_int_cst (ptype
, offset
));
5180 SET_EXPR_LOCATION (tem
, loc
);
5184 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
5187 mem_ref_offset (const_tree t
)
5189 return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t
, 1)),
5193 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
5194 offsetted by OFFSET units. */
5197 build_invariant_address (tree type
, tree base
, poly_int64 offset
)
5199 tree ref
= fold_build2 (MEM_REF
, TREE_TYPE (type
),
5200 build_fold_addr_expr (base
),
5201 build_int_cst (ptr_type_node
, offset
));
5202 tree addr
= build1 (ADDR_EXPR
, type
, ref
);
5203 recompute_tree_invariant_for_addr_expr (addr
);
5207 /* Similar except don't specify the TREE_TYPE
5208 and leave the TREE_SIDE_EFFECTS as 0.
5209 It is permissible for arguments to be null,
5210 or even garbage if their values do not matter. */
5213 build_nt (enum tree_code code
, ...)
5220 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
5224 t
= make_node (code
);
5225 length
= TREE_CODE_LENGTH (code
);
5227 for (i
= 0; i
< length
; i
++)
5228 TREE_OPERAND (t
, i
) = va_arg (p
, tree
);
5234 /* Similar to build_nt, but for creating a CALL_EXPR object with a
5238 build_nt_call_vec (tree fn
, vec
<tree
, va_gc
> *args
)
5243 ret
= build_vl_exp (CALL_EXPR
, vec_safe_length (args
) + 3);
5244 CALL_EXPR_FN (ret
) = fn
;
5245 CALL_EXPR_STATIC_CHAIN (ret
) = NULL_TREE
;
5246 FOR_EACH_VEC_SAFE_ELT (args
, ix
, t
)
5247 CALL_EXPR_ARG (ret
, ix
) = t
;
5251 /* Create a DECL_... node of code CODE, name NAME (if non-null)
5253 We do NOT enter this node in any sort of symbol table.
5255 LOC is the location of the decl.
5257 layout_decl is used to set up the decl's storage layout.
5258 Other slots are initialized to 0 or null pointers. */
5261 build_decl (location_t loc
, enum tree_code code
, tree name
,
5262 tree type MEM_STAT_DECL
)
5266 t
= make_node (code PASS_MEM_STAT
);
5267 DECL_SOURCE_LOCATION (t
) = loc
;
5269 /* if (type == error_mark_node)
5270 type = integer_type_node; */
5271 /* That is not done, deliberately, so that having error_mark_node
5272 as the type can suppress useless errors in the use of this variable. */
5274 DECL_NAME (t
) = name
;
5275 TREE_TYPE (t
) = type
;
5277 if (code
== VAR_DECL
|| code
== PARM_DECL
|| code
== RESULT_DECL
)
5283 /* Builds and returns function declaration with NAME and TYPE. */
5286 build_fn_decl (const char *name
, tree type
)
5288 tree id
= get_identifier (name
);
5289 tree decl
= build_decl (input_location
, FUNCTION_DECL
, id
, type
);
5291 DECL_EXTERNAL (decl
) = 1;
5292 TREE_PUBLIC (decl
) = 1;
5293 DECL_ARTIFICIAL (decl
) = 1;
5294 TREE_NOTHROW (decl
) = 1;
5299 vec
<tree
, va_gc
> *all_translation_units
;
5301 /* Builds a new translation-unit decl with name NAME, queues it in the
5302 global list of translation-unit decls and returns it. */
5305 build_translation_unit_decl (tree name
)
5307 tree tu
= build_decl (UNKNOWN_LOCATION
, TRANSLATION_UNIT_DECL
,
5309 TRANSLATION_UNIT_LANGUAGE (tu
) = lang_hooks
.name
;
5310 vec_safe_push (all_translation_units
, tu
);
5315 /* BLOCK nodes are used to represent the structure of binding contours
5316 and declarations, once those contours have been exited and their contents
5317 compiled. This information is used for outputting debugging info. */
5320 build_block (tree vars
, tree subblocks
, tree supercontext
, tree chain
)
5322 tree block
= make_node (BLOCK
);
5324 BLOCK_VARS (block
) = vars
;
5325 BLOCK_SUBBLOCKS (block
) = subblocks
;
5326 BLOCK_SUPERCONTEXT (block
) = supercontext
;
5327 BLOCK_CHAIN (block
) = chain
;
5332 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
5334 LOC is the location to use in tree T. */
5337 protected_set_expr_location (tree t
, location_t loc
)
5339 if (CAN_HAVE_LOCATION_P (t
))
5340 SET_EXPR_LOCATION (t
, loc
);
5341 else if (t
&& TREE_CODE (t
) == STATEMENT_LIST
)
5343 t
= expr_single (t
);
5344 if (t
&& CAN_HAVE_LOCATION_P (t
))
5345 SET_EXPR_LOCATION (t
, loc
);
5349 /* Like PROTECTED_SET_EXPR_LOCATION, but only do that if T has
5350 UNKNOWN_LOCATION. */
5353 protected_set_expr_location_if_unset (tree t
, location_t loc
)
5355 t
= expr_single (t
);
5356 if (t
&& !EXPR_HAS_LOCATION (t
))
5357 protected_set_expr_location (t
, loc
);
5360 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5361 of the various TYPE_QUAL values. */
5364 set_type_quals (tree type
, int type_quals
)
5366 TYPE_READONLY (type
) = (type_quals
& TYPE_QUAL_CONST
) != 0;
5367 TYPE_VOLATILE (type
) = (type_quals
& TYPE_QUAL_VOLATILE
) != 0;
5368 TYPE_RESTRICT (type
) = (type_quals
& TYPE_QUAL_RESTRICT
) != 0;
5369 TYPE_ATOMIC (type
) = (type_quals
& TYPE_QUAL_ATOMIC
) != 0;
5370 TYPE_ADDR_SPACE (type
) = DECODE_QUAL_ADDR_SPACE (type_quals
);
5373 /* Returns true iff CAND and BASE have equivalent language-specific
5377 check_lang_type (const_tree cand
, const_tree base
)
5379 if (lang_hooks
.types
.type_hash_eq
== NULL
)
5381 /* type_hash_eq currently only applies to these types. */
5382 if (TREE_CODE (cand
) != FUNCTION_TYPE
5383 && TREE_CODE (cand
) != METHOD_TYPE
)
5385 return lang_hooks
.types
.type_hash_eq (cand
, base
);
5388 /* This function checks to see if TYPE matches the size one of the built-in
5389 atomic types, and returns that core atomic type. */
5392 find_atomic_core_type (const_tree type
)
5394 tree base_atomic_type
;
5396 /* Only handle complete types. */
5397 if (!tree_fits_uhwi_p (TYPE_SIZE (type
)))
5400 switch (tree_to_uhwi (TYPE_SIZE (type
)))
5403 base_atomic_type
= atomicQI_type_node
;
5407 base_atomic_type
= atomicHI_type_node
;
5411 base_atomic_type
= atomicSI_type_node
;
5415 base_atomic_type
= atomicDI_type_node
;
5419 base_atomic_type
= atomicTI_type_node
;
5423 base_atomic_type
= NULL_TREE
;
5426 return base_atomic_type
;
5429 /* Returns true iff unqualified CAND and BASE are equivalent. */
5432 check_base_type (const_tree cand
, const_tree base
)
5434 if (TYPE_NAME (cand
) != TYPE_NAME (base
)
5435 /* Apparently this is needed for Objective-C. */
5436 || TYPE_CONTEXT (cand
) != TYPE_CONTEXT (base
)
5437 || !attribute_list_equal (TYPE_ATTRIBUTES (cand
),
5438 TYPE_ATTRIBUTES (base
)))
5440 /* Check alignment. */
5441 if (TYPE_ALIGN (cand
) == TYPE_ALIGN (base
)
5442 && TYPE_USER_ALIGN (cand
) == TYPE_USER_ALIGN (base
))
5444 /* Atomic types increase minimal alignment. We must to do so as well
5445 or we get duplicated canonical types. See PR88686. */
5446 if ((TYPE_QUALS (cand
) & TYPE_QUAL_ATOMIC
))
5448 /* See if this object can map to a basic atomic type. */
5449 tree atomic_type
= find_atomic_core_type (cand
);
5450 if (atomic_type
&& TYPE_ALIGN (atomic_type
) == TYPE_ALIGN (cand
))
5456 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5459 check_qualified_type (const_tree cand
, const_tree base
, int type_quals
)
5461 return (TYPE_QUALS (cand
) == type_quals
5462 && check_base_type (cand
, base
)
5463 && check_lang_type (cand
, base
));
5466 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
5469 check_aligned_type (const_tree cand
, const_tree base
, unsigned int align
)
5471 return (TYPE_QUALS (cand
) == TYPE_QUALS (base
)
5472 && TYPE_NAME (cand
) == TYPE_NAME (base
)
5473 /* Apparently this is needed for Objective-C. */
5474 && TYPE_CONTEXT (cand
) == TYPE_CONTEXT (base
)
5475 /* Check alignment. */
5476 && TYPE_ALIGN (cand
) == align
5477 /* Check this is a user-aligned type as build_aligned_type
5479 && TYPE_USER_ALIGN (cand
)
5480 && attribute_list_equal (TYPE_ATTRIBUTES (cand
),
5481 TYPE_ATTRIBUTES (base
))
5482 && check_lang_type (cand
, base
));
5485 /* Return a version of the TYPE, qualified as indicated by the
5486 TYPE_QUALS, if one exists. If no qualified version exists yet,
5487 return NULL_TREE. */
5490 get_qualified_type (tree type
, int type_quals
)
5492 if (TYPE_QUALS (type
) == type_quals
)
5495 tree mv
= TYPE_MAIN_VARIANT (type
);
5496 if (check_qualified_type (mv
, type
, type_quals
))
5499 /* Search the chain of variants to see if there is already one there just
5500 like the one we need to have. If so, use that existing one. We must
5501 preserve the TYPE_NAME, since there is code that depends on this. */
5502 for (tree
*tp
= &TYPE_NEXT_VARIANT (mv
); *tp
; tp
= &TYPE_NEXT_VARIANT (*tp
))
5503 if (check_qualified_type (*tp
, type
, type_quals
))
5505 /* Put the found variant at the head of the variant list so
5506 frequently searched variants get found faster. The C++ FE
5507 benefits greatly from this. */
5509 *tp
= TYPE_NEXT_VARIANT (t
);
5510 TYPE_NEXT_VARIANT (t
) = TYPE_NEXT_VARIANT (mv
);
5511 TYPE_NEXT_VARIANT (mv
) = t
;
5518 /* Like get_qualified_type, but creates the type if it does not
5519 exist. This function never returns NULL_TREE. */
5522 build_qualified_type (tree type
, int type_quals MEM_STAT_DECL
)
5526 /* See if we already have the appropriate qualified variant. */
5527 t
= get_qualified_type (type
, type_quals
);
5529 /* If not, build it. */
5532 t
= build_variant_type_copy (type PASS_MEM_STAT
);
5533 set_type_quals (t
, type_quals
);
5535 if (((type_quals
& TYPE_QUAL_ATOMIC
) == TYPE_QUAL_ATOMIC
))
5537 /* See if this object can map to a basic atomic type. */
5538 tree atomic_type
= find_atomic_core_type (type
);
5541 /* Ensure the alignment of this type is compatible with
5542 the required alignment of the atomic type. */
5543 if (TYPE_ALIGN (atomic_type
) > TYPE_ALIGN (t
))
5544 SET_TYPE_ALIGN (t
, TYPE_ALIGN (atomic_type
));
5548 if (TYPE_STRUCTURAL_EQUALITY_P (type
))
5549 /* Propagate structural equality. */
5550 SET_TYPE_STRUCTURAL_EQUALITY (t
);
5551 else if (TYPE_CANONICAL (type
) != type
)
5552 /* Build the underlying canonical type, since it is different
5555 tree c
= build_qualified_type (TYPE_CANONICAL (type
), type_quals
);
5556 TYPE_CANONICAL (t
) = TYPE_CANONICAL (c
);
5559 /* T is its own canonical type. */
5560 TYPE_CANONICAL (t
) = t
;
5567 /* Create a variant of type T with alignment ALIGN. */
5570 build_aligned_type (tree type
, unsigned int align
)
5574 if (TYPE_PACKED (type
)
5575 || TYPE_ALIGN (type
) == align
)
5578 for (t
= TYPE_MAIN_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
5579 if (check_aligned_type (t
, type
, align
))
5582 t
= build_variant_type_copy (type
);
5583 SET_TYPE_ALIGN (t
, align
);
5584 TYPE_USER_ALIGN (t
) = 1;
5589 /* Create a new distinct copy of TYPE. The new type is made its own
5590 MAIN_VARIANT. If TYPE requires structural equality checks, the
5591 resulting type requires structural equality checks; otherwise, its
5592 TYPE_CANONICAL points to itself. */
5595 build_distinct_type_copy (tree type MEM_STAT_DECL
)
5597 tree t
= copy_node (type PASS_MEM_STAT
);
5599 TYPE_POINTER_TO (t
) = 0;
5600 TYPE_REFERENCE_TO (t
) = 0;
5602 /* Set the canonical type either to a new equivalence class, or
5603 propagate the need for structural equality checks. */
5604 if (TYPE_STRUCTURAL_EQUALITY_P (type
))
5605 SET_TYPE_STRUCTURAL_EQUALITY (t
);
5607 TYPE_CANONICAL (t
) = t
;
5609 /* Make it its own variant. */
5610 TYPE_MAIN_VARIANT (t
) = t
;
5611 TYPE_NEXT_VARIANT (t
) = 0;
5613 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
5614 whose TREE_TYPE is not t. This can also happen in the Ada
5615 frontend when using subtypes. */
5620 /* Create a new variant of TYPE, equivalent but distinct. This is so
5621 the caller can modify it. TYPE_CANONICAL for the return type will
5622 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
5623 are considered equal by the language itself (or that both types
5624 require structural equality checks). */
5627 build_variant_type_copy (tree type MEM_STAT_DECL
)
5629 tree t
, m
= TYPE_MAIN_VARIANT (type
);
5631 t
= build_distinct_type_copy (type PASS_MEM_STAT
);
5633 /* Since we're building a variant, assume that it is a non-semantic
5634 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
5635 TYPE_CANONICAL (t
) = TYPE_CANONICAL (type
);
5636 /* Type variants have no alias set defined. */
5637 TYPE_ALIAS_SET (t
) = -1;
5639 /* Add the new type to the chain of variants of TYPE. */
5640 TYPE_NEXT_VARIANT (t
) = TYPE_NEXT_VARIANT (m
);
5641 TYPE_NEXT_VARIANT (m
) = t
;
5642 TYPE_MAIN_VARIANT (t
) = m
;
5647 /* Return true if the from tree in both tree maps are equal. */
5650 tree_map_base_eq (const void *va
, const void *vb
)
5652 const struct tree_map_base
*const a
= (const struct tree_map_base
*) va
,
5653 *const b
= (const struct tree_map_base
*) vb
;
5654 return (a
->from
== b
->from
);
5657 /* Hash a from tree in a tree_base_map. */
5660 tree_map_base_hash (const void *item
)
5662 return htab_hash_pointer (((const struct tree_map_base
*)item
)->from
);
5665 /* Return true if this tree map structure is marked for garbage collection
5666 purposes. We simply return true if the from tree is marked, so that this
5667 structure goes away when the from tree goes away. */
5670 tree_map_base_marked_p (const void *p
)
5672 return ggc_marked_p (((const struct tree_map_base
*) p
)->from
);
5675 /* Hash a from tree in a tree_map. */
5678 tree_map_hash (const void *item
)
5680 return (((const struct tree_map
*) item
)->hash
);
5683 /* Hash a from tree in a tree_decl_map. */
5686 tree_decl_map_hash (const void *item
)
5688 return DECL_UID (((const struct tree_decl_map
*) item
)->base
.from
);
5691 /* Return the initialization priority for DECL. */
5694 decl_init_priority_lookup (tree decl
)
5696 symtab_node
*snode
= symtab_node::get (decl
);
5699 return DEFAULT_INIT_PRIORITY
;
5701 snode
->get_init_priority ();
5704 /* Return the finalization priority for DECL. */
5707 decl_fini_priority_lookup (tree decl
)
5709 cgraph_node
*node
= cgraph_node::get (decl
);
5712 return DEFAULT_INIT_PRIORITY
;
5714 node
->get_fini_priority ();
5717 /* Set the initialization priority for DECL to PRIORITY. */
5720 decl_init_priority_insert (tree decl
, priority_type priority
)
5722 struct symtab_node
*snode
;
5724 if (priority
== DEFAULT_INIT_PRIORITY
)
5726 snode
= symtab_node::get (decl
);
5730 else if (VAR_P (decl
))
5731 snode
= varpool_node::get_create (decl
);
5733 snode
= cgraph_node::get_create (decl
);
5734 snode
->set_init_priority (priority
);
5737 /* Set the finalization priority for DECL to PRIORITY. */
5740 decl_fini_priority_insert (tree decl
, priority_type priority
)
5742 struct cgraph_node
*node
;
5744 if (priority
== DEFAULT_INIT_PRIORITY
)
5746 node
= cgraph_node::get (decl
);
5751 node
= cgraph_node::get_create (decl
);
5752 node
->set_fini_priority (priority
);
5755 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
5758 print_debug_expr_statistics (void)
5760 fprintf (stderr
, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
5761 (long) debug_expr_for_decl
->size (),
5762 (long) debug_expr_for_decl
->elements (),
5763 debug_expr_for_decl
->collisions ());
5766 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
5769 print_value_expr_statistics (void)
5771 fprintf (stderr
, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
5772 (long) value_expr_for_decl
->size (),
5773 (long) value_expr_for_decl
->elements (),
5774 value_expr_for_decl
->collisions ());
5777 /* Lookup a debug expression for FROM, and return it if we find one. */
5780 decl_debug_expr_lookup (tree from
)
5782 struct tree_decl_map
*h
, in
;
5783 in
.base
.from
= from
;
5785 h
= debug_expr_for_decl
->find_with_hash (&in
, DECL_UID (from
));
5791 /* Insert a mapping FROM->TO in the debug expression hashtable. */
5794 decl_debug_expr_insert (tree from
, tree to
)
5796 struct tree_decl_map
*h
;
5798 h
= ggc_alloc
<tree_decl_map
> ();
5799 h
->base
.from
= from
;
5801 *debug_expr_for_decl
->find_slot_with_hash (h
, DECL_UID (from
), INSERT
) = h
;
5804 /* Lookup a value expression for FROM, and return it if we find one. */
5807 decl_value_expr_lookup (tree from
)
5809 struct tree_decl_map
*h
, in
;
5810 in
.base
.from
= from
;
5812 h
= value_expr_for_decl
->find_with_hash (&in
, DECL_UID (from
));
5818 /* Insert a mapping FROM->TO in the value expression hashtable. */
5821 decl_value_expr_insert (tree from
, tree to
)
5823 struct tree_decl_map
*h
;
5825 h
= ggc_alloc
<tree_decl_map
> ();
5826 h
->base
.from
= from
;
5828 *value_expr_for_decl
->find_slot_with_hash (h
, DECL_UID (from
), INSERT
) = h
;
5831 /* Lookup a vector of debug arguments for FROM, and return it if we
5835 decl_debug_args_lookup (tree from
)
5837 struct tree_vec_map
*h
, in
;
5839 if (!DECL_HAS_DEBUG_ARGS_P (from
))
5841 gcc_checking_assert (debug_args_for_decl
!= NULL
);
5842 in
.base
.from
= from
;
5843 h
= debug_args_for_decl
->find_with_hash (&in
, DECL_UID (from
));
5849 /* Insert a mapping FROM->empty vector of debug arguments in the value
5850 expression hashtable. */
5853 decl_debug_args_insert (tree from
)
5855 struct tree_vec_map
*h
;
5858 if (DECL_HAS_DEBUG_ARGS_P (from
))
5859 return decl_debug_args_lookup (from
);
5860 if (debug_args_for_decl
== NULL
)
5861 debug_args_for_decl
= hash_table
<tree_vec_map_cache_hasher
>::create_ggc (64);
5862 h
= ggc_alloc
<tree_vec_map
> ();
5863 h
->base
.from
= from
;
5865 loc
= debug_args_for_decl
->find_slot_with_hash (h
, DECL_UID (from
), INSERT
);
5867 DECL_HAS_DEBUG_ARGS_P (from
) = 1;
5871 /* Hashing of types so that we don't make duplicates.
5872 The entry point is `type_hash_canon'. */
5874 /* Generate the default hash code for TYPE. This is designed for
5875 speed, rather than maximum entropy. */
5878 type_hash_canon_hash (tree type
)
5880 inchash::hash hstate
;
5882 hstate
.add_int (TREE_CODE (type
));
5884 if (TREE_TYPE (type
))
5885 hstate
.add_object (TYPE_HASH (TREE_TYPE (type
)));
5887 for (tree t
= TYPE_ATTRIBUTES (type
); t
; t
= TREE_CHAIN (t
))
5888 /* Just the identifier is adequate to distinguish. */
5889 hstate
.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t
)));
5891 switch (TREE_CODE (type
))
5894 hstate
.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type
)));
5897 for (tree t
= TYPE_ARG_TYPES (type
); t
; t
= TREE_CHAIN (t
))
5898 if (TREE_VALUE (t
) != error_mark_node
)
5899 hstate
.add_object (TYPE_HASH (TREE_VALUE (t
)));
5903 hstate
.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type
)));
5908 if (TYPE_DOMAIN (type
))
5909 hstate
.add_object (TYPE_HASH (TYPE_DOMAIN (type
)));
5910 if (!AGGREGATE_TYPE_P (TREE_TYPE (type
)))
5912 unsigned typeless
= TYPE_TYPELESS_STORAGE (type
);
5913 hstate
.add_object (typeless
);
5920 tree t
= TYPE_MAX_VALUE (type
);
5922 t
= TYPE_MIN_VALUE (type
);
5923 for (int i
= 0; i
< TREE_INT_CST_NUNITS (t
); i
++)
5924 hstate
.add_object (TREE_INT_CST_ELT (t
, i
));
5929 case FIXED_POINT_TYPE
:
5931 unsigned prec
= TYPE_PRECISION (type
);
5932 hstate
.add_object (prec
);
5937 hstate
.add_poly_int (TYPE_VECTOR_SUBPARTS (type
));
5944 return hstate
.end ();
5947 /* These are the Hashtable callback functions. */
5949 /* Returns true iff the types are equivalent. */
5952 type_cache_hasher::equal (type_hash
*a
, type_hash
*b
)
5954 /* First test the things that are the same for all types. */
5955 if (a
->hash
!= b
->hash
5956 || TREE_CODE (a
->type
) != TREE_CODE (b
->type
)
5957 || TREE_TYPE (a
->type
) != TREE_TYPE (b
->type
)
5958 || !attribute_list_equal (TYPE_ATTRIBUTES (a
->type
),
5959 TYPE_ATTRIBUTES (b
->type
))
5960 || (TREE_CODE (a
->type
) != COMPLEX_TYPE
5961 && TYPE_NAME (a
->type
) != TYPE_NAME (b
->type
)))
5964 /* Be careful about comparing arrays before and after the element type
5965 has been completed; don't compare TYPE_ALIGN unless both types are
5967 if (COMPLETE_TYPE_P (a
->type
) && COMPLETE_TYPE_P (b
->type
)
5968 && (TYPE_ALIGN (a
->type
) != TYPE_ALIGN (b
->type
)
5969 || TYPE_MODE (a
->type
) != TYPE_MODE (b
->type
)))
5972 switch (TREE_CODE (a
->type
))
5978 case REFERENCE_TYPE
:
5983 return known_eq (TYPE_VECTOR_SUBPARTS (a
->type
),
5984 TYPE_VECTOR_SUBPARTS (b
->type
));
5987 if (TYPE_VALUES (a
->type
) != TYPE_VALUES (b
->type
)
5988 && !(TYPE_VALUES (a
->type
)
5989 && TREE_CODE (TYPE_VALUES (a
->type
)) == TREE_LIST
5990 && TYPE_VALUES (b
->type
)
5991 && TREE_CODE (TYPE_VALUES (b
->type
)) == TREE_LIST
5992 && type_list_equal (TYPE_VALUES (a
->type
),
5993 TYPE_VALUES (b
->type
))))
6001 if (TYPE_PRECISION (a
->type
) != TYPE_PRECISION (b
->type
))
6003 return ((TYPE_MAX_VALUE (a
->type
) == TYPE_MAX_VALUE (b
->type
)
6004 || tree_int_cst_equal (TYPE_MAX_VALUE (a
->type
),
6005 TYPE_MAX_VALUE (b
->type
)))
6006 && (TYPE_MIN_VALUE (a
->type
) == TYPE_MIN_VALUE (b
->type
)
6007 || tree_int_cst_equal (TYPE_MIN_VALUE (a
->type
),
6008 TYPE_MIN_VALUE (b
->type
))));
6010 case FIXED_POINT_TYPE
:
6011 return TYPE_SATURATING (a
->type
) == TYPE_SATURATING (b
->type
);
6014 return TYPE_OFFSET_BASETYPE (a
->type
) == TYPE_OFFSET_BASETYPE (b
->type
);
6017 if (TYPE_METHOD_BASETYPE (a
->type
) == TYPE_METHOD_BASETYPE (b
->type
)
6018 && (TYPE_ARG_TYPES (a
->type
) == TYPE_ARG_TYPES (b
->type
)
6019 || (TYPE_ARG_TYPES (a
->type
)
6020 && TREE_CODE (TYPE_ARG_TYPES (a
->type
)) == TREE_LIST
6021 && TYPE_ARG_TYPES (b
->type
)
6022 && TREE_CODE (TYPE_ARG_TYPES (b
->type
)) == TREE_LIST
6023 && type_list_equal (TYPE_ARG_TYPES (a
->type
),
6024 TYPE_ARG_TYPES (b
->type
)))))
6028 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6029 where the flag should be inherited from the element type
6030 and can change after ARRAY_TYPEs are created; on non-aggregates
6031 compare it and hash it, scalars will never have that flag set
6032 and we need to differentiate between arrays created by different
6033 front-ends or middle-end created arrays. */
6034 return (TYPE_DOMAIN (a
->type
) == TYPE_DOMAIN (b
->type
)
6035 && (AGGREGATE_TYPE_P (TREE_TYPE (a
->type
))
6036 || (TYPE_TYPELESS_STORAGE (a
->type
)
6037 == TYPE_TYPELESS_STORAGE (b
->type
))));
6041 case QUAL_UNION_TYPE
:
6042 return (TYPE_FIELDS (a
->type
) == TYPE_FIELDS (b
->type
)
6043 || (TYPE_FIELDS (a
->type
)
6044 && TREE_CODE (TYPE_FIELDS (a
->type
)) == TREE_LIST
6045 && TYPE_FIELDS (b
->type
)
6046 && TREE_CODE (TYPE_FIELDS (b
->type
)) == TREE_LIST
6047 && type_list_equal (TYPE_FIELDS (a
->type
),
6048 TYPE_FIELDS (b
->type
))));
6051 if (TYPE_ARG_TYPES (a
->type
) == TYPE_ARG_TYPES (b
->type
)
6052 || (TYPE_ARG_TYPES (a
->type
)
6053 && TREE_CODE (TYPE_ARG_TYPES (a
->type
)) == TREE_LIST
6054 && TYPE_ARG_TYPES (b
->type
)
6055 && TREE_CODE (TYPE_ARG_TYPES (b
->type
)) == TREE_LIST
6056 && type_list_equal (TYPE_ARG_TYPES (a
->type
),
6057 TYPE_ARG_TYPES (b
->type
))))
6065 if (lang_hooks
.types
.type_hash_eq
!= NULL
)
6066 return lang_hooks
.types
.type_hash_eq (a
->type
, b
->type
);
6071 /* Given TYPE, and HASHCODE its hash code, return the canonical
6072 object for an identical type if one already exists.
6073 Otherwise, return TYPE, and record it as the canonical object.
6075 To use this function, first create a type of the sort you want.
6076 Then compute its hash code from the fields of the type that
6077 make it different from other similar types.
6078 Then call this function and use the value. */
6081 type_hash_canon (unsigned int hashcode
, tree type
)
6086 /* The hash table only contains main variants, so ensure that's what we're
6088 gcc_assert (TYPE_MAIN_VARIANT (type
) == type
);
6090 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6091 must call that routine before comparing TYPE_ALIGNs. */
6097 loc
= type_hash_table
->find_slot_with_hash (&in
, hashcode
, INSERT
);
6100 tree t1
= ((type_hash
*) *loc
)->type
;
6101 gcc_assert (TYPE_MAIN_VARIANT (t1
) == t1
6103 if (TYPE_UID (type
) + 1 == next_type_uid
)
6105 /* Free also min/max values and the cache for integer
6106 types. This can't be done in free_node, as LTO frees
6107 those on its own. */
6108 if (TREE_CODE (type
) == INTEGER_TYPE
)
6110 if (TYPE_MIN_VALUE (type
)
6111 && TREE_TYPE (TYPE_MIN_VALUE (type
)) == type
)
6113 /* Zero is always in TYPE_CACHED_VALUES. */
6114 if (! TYPE_UNSIGNED (type
))
6115 int_cst_hash_table
->remove_elt (TYPE_MIN_VALUE (type
));
6116 ggc_free (TYPE_MIN_VALUE (type
));
6118 if (TYPE_MAX_VALUE (type
)
6119 && TREE_TYPE (TYPE_MAX_VALUE (type
)) == type
)
6121 int_cst_hash_table
->remove_elt (TYPE_MAX_VALUE (type
));
6122 ggc_free (TYPE_MAX_VALUE (type
));
6124 if (TYPE_CACHED_VALUES_P (type
))
6125 ggc_free (TYPE_CACHED_VALUES (type
));
6132 struct type_hash
*h
;
6134 h
= ggc_alloc
<type_hash
> ();
6144 print_type_hash_statistics (void)
6146 fprintf (stderr
, "Type hash: size %ld, %ld elements, %f collisions\n",
6147 (long) type_hash_table
->size (),
6148 (long) type_hash_table
->elements (),
6149 type_hash_table
->collisions ());
6152 /* Given two lists of types
6153 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6154 return 1 if the lists contain the same types in the same order.
6155 Also, the TREE_PURPOSEs must match. */
6158 type_list_equal (const_tree l1
, const_tree l2
)
6162 for (t1
= l1
, t2
= l2
; t1
&& t2
; t1
= TREE_CHAIN (t1
), t2
= TREE_CHAIN (t2
))
6163 if (TREE_VALUE (t1
) != TREE_VALUE (t2
)
6164 || (TREE_PURPOSE (t1
) != TREE_PURPOSE (t2
)
6165 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1
), TREE_PURPOSE (t2
))
6166 && (TREE_TYPE (TREE_PURPOSE (t1
))
6167 == TREE_TYPE (TREE_PURPOSE (t2
))))))
6173 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6174 given by TYPE. If the argument list accepts variable arguments,
6175 then this function counts only the ordinary arguments. */
6178 type_num_arguments (const_tree fntype
)
6182 for (tree t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
6183 /* If the function does not take a variable number of arguments,
6184 the last element in the list will have type `void'. */
6185 if (VOID_TYPE_P (TREE_VALUE (t
)))
6193 /* Return the type of the function TYPE's argument ARGNO if known.
6194 For vararg function's where ARGNO refers to one of the variadic
6195 arguments return null. Otherwise, return a void_type_node for
6196 out-of-bounds ARGNO. */
6199 type_argument_type (const_tree fntype
, unsigned argno
)
6201 /* Treat zero the same as an out-of-bounds argument number. */
6203 return void_type_node
;
6205 function_args_iterator iter
;
6209 FOREACH_FUNCTION_ARGS (fntype
, argtype
, iter
)
6211 /* A vararg function's argument list ends in a null. Otherwise,
6212 an ordinary function's argument list ends with void. Return
6213 null if ARGNO refers to a vararg argument, void_type_node if
6214 it's out of bounds, and the formal argument type otherwise. */
6218 if (i
== argno
|| VOID_TYPE_P (argtype
))
6227 /* Nonzero if integer constants T1 and T2
6228 represent the same constant value. */
6231 tree_int_cst_equal (const_tree t1
, const_tree t2
)
6236 if (t1
== 0 || t2
== 0)
6239 STRIP_ANY_LOCATION_WRAPPER (t1
);
6240 STRIP_ANY_LOCATION_WRAPPER (t2
);
6242 if (TREE_CODE (t1
) == INTEGER_CST
6243 && TREE_CODE (t2
) == INTEGER_CST
6244 && wi::to_widest (t1
) == wi::to_widest (t2
))
6250 /* Return true if T is an INTEGER_CST whose numerical value (extended
6251 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6254 tree_fits_shwi_p (const_tree t
)
6256 return (t
!= NULL_TREE
6257 && TREE_CODE (t
) == INTEGER_CST
6258 && wi::fits_shwi_p (wi::to_widest (t
)));
6261 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6262 value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
6265 tree_fits_poly_int64_p (const_tree t
)
6269 if (POLY_INT_CST_P (t
))
6271 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; i
++)
6272 if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t
, i
))))
6276 return (TREE_CODE (t
) == INTEGER_CST
6277 && wi::fits_shwi_p (wi::to_widest (t
)));
6280 /* Return true if T is an INTEGER_CST whose numerical value (extended
6281 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6284 tree_fits_uhwi_p (const_tree t
)
6286 return (t
!= NULL_TREE
6287 && TREE_CODE (t
) == INTEGER_CST
6288 && wi::fits_uhwi_p (wi::to_widest (t
)));
6291 /* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
6292 value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
6295 tree_fits_poly_uint64_p (const_tree t
)
6299 if (POLY_INT_CST_P (t
))
6301 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; i
++)
6302 if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t
, i
))))
6306 return (TREE_CODE (t
) == INTEGER_CST
6307 && wi::fits_uhwi_p (wi::to_widest (t
)));
6310 /* T is an INTEGER_CST whose numerical value (extended according to
6311 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6315 tree_to_shwi (const_tree t
)
6317 gcc_assert (tree_fits_shwi_p (t
));
6318 return TREE_INT_CST_LOW (t
);
6321 /* T is an INTEGER_CST whose numerical value (extended according to
6322 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6325 unsigned HOST_WIDE_INT
6326 tree_to_uhwi (const_tree t
)
6328 gcc_assert (tree_fits_uhwi_p (t
));
6329 return TREE_INT_CST_LOW (t
);
6332 /* Return the most significant (sign) bit of T. */
6335 tree_int_cst_sign_bit (const_tree t
)
6337 unsigned bitno
= TYPE_PRECISION (TREE_TYPE (t
)) - 1;
6339 return wi::extract_uhwi (wi::to_wide (t
), bitno
, 1);
6342 /* Return an indication of the sign of the integer constant T.
6343 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6344 Note that -1 will never be returned if T's type is unsigned. */
6347 tree_int_cst_sgn (const_tree t
)
6349 if (wi::to_wide (t
) == 0)
6351 else if (TYPE_UNSIGNED (TREE_TYPE (t
)))
6353 else if (wi::neg_p (wi::to_wide (t
)))
6359 /* Return the minimum number of bits needed to represent VALUE in a
6360 signed or unsigned type, UNSIGNEDP says which. */
6363 tree_int_cst_min_precision (tree value
, signop sgn
)
6365 /* If the value is negative, compute its negative minus 1. The latter
6366 adjustment is because the absolute value of the largest negative value
6367 is one larger than the largest positive value. This is equivalent to
6368 a bit-wise negation, so use that operation instead. */
6370 if (tree_int_cst_sgn (value
) < 0)
6371 value
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (value
), value
);
6373 /* Return the number of bits needed, taking into account the fact
6374 that we need one more bit for a signed than unsigned type.
6375 If value is 0 or -1, the minimum precision is 1 no matter
6376 whether unsignedp is true or false. */
6378 if (integer_zerop (value
))
6381 return tree_floor_log2 (value
) + 1 + (sgn
== SIGNED
? 1 : 0) ;
6384 /* Return truthvalue of whether T1 is the same tree structure as T2.
6385 Return 1 if they are the same.
6386 Return 0 if they are understandably different.
6387 Return -1 if either contains tree structure not understood by
6391 simple_cst_equal (const_tree t1
, const_tree t2
)
6393 enum tree_code code1
, code2
;
6399 if (t1
== 0 || t2
== 0)
6402 /* For location wrappers to be the same, they must be at the same
6403 source location (and wrap the same thing). */
6404 if (location_wrapper_p (t1
) && location_wrapper_p (t2
))
6406 if (EXPR_LOCATION (t1
) != EXPR_LOCATION (t2
))
6408 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6411 code1
= TREE_CODE (t1
);
6412 code2
= TREE_CODE (t2
);
6414 if (CONVERT_EXPR_CODE_P (code1
) || code1
== NON_LVALUE_EXPR
)
6416 if (CONVERT_EXPR_CODE_P (code2
)
6417 || code2
== NON_LVALUE_EXPR
)
6418 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6420 return simple_cst_equal (TREE_OPERAND (t1
, 0), t2
);
6423 else if (CONVERT_EXPR_CODE_P (code2
)
6424 || code2
== NON_LVALUE_EXPR
)
6425 return simple_cst_equal (t1
, TREE_OPERAND (t2
, 0));
6433 return wi::to_widest (t1
) == wi::to_widest (t2
);
6436 return real_identical (&TREE_REAL_CST (t1
), &TREE_REAL_CST (t2
));
6439 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1
), TREE_FIXED_CST (t2
));
6442 return (TREE_STRING_LENGTH (t1
) == TREE_STRING_LENGTH (t2
)
6443 && ! memcmp (TREE_STRING_POINTER (t1
), TREE_STRING_POINTER (t2
),
6444 TREE_STRING_LENGTH (t1
)));
6448 unsigned HOST_WIDE_INT idx
;
6449 vec
<constructor_elt
, va_gc
> *v1
= CONSTRUCTOR_ELTS (t1
);
6450 vec
<constructor_elt
, va_gc
> *v2
= CONSTRUCTOR_ELTS (t2
);
6452 if (vec_safe_length (v1
) != vec_safe_length (v2
))
6455 for (idx
= 0; idx
< vec_safe_length (v1
); ++idx
)
6456 /* ??? Should we handle also fields here? */
6457 if (!simple_cst_equal ((*v1
)[idx
].value
, (*v2
)[idx
].value
))
6463 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6466 cmp
= simple_cst_equal (CALL_EXPR_FN (t1
), CALL_EXPR_FN (t2
));
6469 if (call_expr_nargs (t1
) != call_expr_nargs (t2
))
6472 const_tree arg1
, arg2
;
6473 const_call_expr_arg_iterator iter1
, iter2
;
6474 for (arg1
= first_const_call_expr_arg (t1
, &iter1
),
6475 arg2
= first_const_call_expr_arg (t2
, &iter2
);
6477 arg1
= next_const_call_expr_arg (&iter1
),
6478 arg2
= next_const_call_expr_arg (&iter2
))
6480 cmp
= simple_cst_equal (arg1
, arg2
);
6484 return arg1
== arg2
;
6488 /* Special case: if either target is an unallocated VAR_DECL,
6489 it means that it's going to be unified with whatever the
6490 TARGET_EXPR is really supposed to initialize, so treat it
6491 as being equivalent to anything. */
6492 if ((TREE_CODE (TREE_OPERAND (t1
, 0)) == VAR_DECL
6493 && DECL_NAME (TREE_OPERAND (t1
, 0)) == NULL_TREE
6494 && !DECL_RTL_SET_P (TREE_OPERAND (t1
, 0)))
6495 || (TREE_CODE (TREE_OPERAND (t2
, 0)) == VAR_DECL
6496 && DECL_NAME (TREE_OPERAND (t2
, 0)) == NULL_TREE
6497 && !DECL_RTL_SET_P (TREE_OPERAND (t2
, 0))))
6500 cmp
= simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6505 return simple_cst_equal (TREE_OPERAND (t1
, 1), TREE_OPERAND (t2
, 1));
6507 case WITH_CLEANUP_EXPR
:
6508 cmp
= simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6512 return simple_cst_equal (TREE_OPERAND (t1
, 1), TREE_OPERAND (t1
, 1));
6515 if (TREE_OPERAND (t1
, 1) == TREE_OPERAND (t2
, 1))
6516 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6527 if (POLY_INT_CST_P (t1
))
6528 /* A false return means maybe_ne rather than known_ne. */
6529 return known_eq (poly_widest_int::from (poly_int_cst_value (t1
),
6530 TYPE_SIGN (TREE_TYPE (t1
))),
6531 poly_widest_int::from (poly_int_cst_value (t2
),
6532 TYPE_SIGN (TREE_TYPE (t2
))));
6536 /* This general rule works for most tree codes. All exceptions should be
6537 handled above. If this is a language-specific tree code, we can't
6538 trust what might be in the operand, so say we don't know
6540 if ((int) code1
>= (int) LAST_AND_UNUSED_TREE_CODE
)
6543 switch (TREE_CODE_CLASS (code1
))
6547 case tcc_comparison
:
6548 case tcc_expression
:
6552 for (i
= 0; i
< TREE_CODE_LENGTH (code1
); i
++)
6554 cmp
= simple_cst_equal (TREE_OPERAND (t1
, i
), TREE_OPERAND (t2
, i
));
6566 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
6567 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
6568 than U, respectively. */
6571 compare_tree_int (const_tree t
, unsigned HOST_WIDE_INT u
)
6573 if (tree_int_cst_sgn (t
) < 0)
6575 else if (!tree_fits_uhwi_p (t
))
6577 else if (TREE_INT_CST_LOW (t
) == u
)
6579 else if (TREE_INT_CST_LOW (t
) < u
)
6585 /* Return true if SIZE represents a constant size that is in bounds of
6586 what the middle-end and the backend accepts (covering not more than
6587 half of the address-space).
6588 When PERR is non-null, set *PERR on failure to the description of
6589 why SIZE is not valid. */
6592 valid_constant_size_p (const_tree size
, cst_size_error
*perr
/* = NULL */)
6594 if (POLY_INT_CST_P (size
))
6596 if (TREE_OVERFLOW (size
))
6598 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
6599 if (!valid_constant_size_p (POLY_INT_CST_COEFF (size
, i
)))
6604 cst_size_error error
;
6608 if (TREE_CODE (size
) != INTEGER_CST
)
6610 *perr
= cst_size_not_constant
;
6614 if (TREE_OVERFLOW_P (size
))
6616 *perr
= cst_size_overflow
;
6620 if (tree_int_cst_sgn (size
) < 0)
6622 *perr
= cst_size_negative
;
6625 if (!tree_fits_uhwi_p (size
)
6626 || (wi::to_widest (TYPE_MAX_VALUE (sizetype
))
6627 < wi::to_widest (size
) * 2))
6629 *perr
= cst_size_too_big
;
6636 /* Return the precision of the type, or for a complex or vector type the
6637 precision of the type of its elements. */
6640 element_precision (const_tree type
)
6643 type
= TREE_TYPE (type
);
6644 enum tree_code code
= TREE_CODE (type
);
6645 if (code
== COMPLEX_TYPE
|| code
== VECTOR_TYPE
)
6646 type
= TREE_TYPE (type
);
6648 return TYPE_PRECISION (type
);
6651 /* Return true if CODE represents an associative tree code. Otherwise
6654 associative_tree_code (enum tree_code code
)
6673 /* Return true if CODE represents a commutative tree code. Otherwise
6676 commutative_tree_code (enum tree_code code
)
6682 case MULT_HIGHPART_EXPR
:
6690 case UNORDERED_EXPR
:
6694 case TRUTH_AND_EXPR
:
6695 case TRUTH_XOR_EXPR
:
6697 case WIDEN_MULT_EXPR
:
6698 case VEC_WIDEN_MULT_HI_EXPR
:
6699 case VEC_WIDEN_MULT_LO_EXPR
:
6700 case VEC_WIDEN_MULT_EVEN_EXPR
:
6701 case VEC_WIDEN_MULT_ODD_EXPR
:
6710 /* Return true if CODE represents a ternary tree code for which the
6711 first two operands are commutative. Otherwise return false. */
6713 commutative_ternary_tree_code (enum tree_code code
)
6717 case WIDEN_MULT_PLUS_EXPR
:
6718 case WIDEN_MULT_MINUS_EXPR
:
6728 /* Returns true if CODE can overflow. */
6731 operation_can_overflow (enum tree_code code
)
6739 /* Can overflow in various ways. */
6741 case TRUNC_DIV_EXPR
:
6742 case EXACT_DIV_EXPR
:
6743 case FLOOR_DIV_EXPR
:
6745 /* For INT_MIN / -1. */
6752 /* These operators cannot overflow. */
6757 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
6758 ftrapv doesn't generate trapping insns for CODE. */
6761 operation_no_trapping_overflow (tree type
, enum tree_code code
)
6763 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type
));
6765 /* We don't generate instructions that trap on overflow for complex or vector
6767 if (!INTEGRAL_TYPE_P (type
))
6770 if (!TYPE_OVERFLOW_TRAPS (type
))
6780 /* These operators can overflow, and -ftrapv generates trapping code for
6783 case TRUNC_DIV_EXPR
:
6784 case EXACT_DIV_EXPR
:
6785 case FLOOR_DIV_EXPR
:
6788 /* These operators can overflow, but -ftrapv does not generate trapping
6792 /* These operators cannot overflow. */
6797 /* Constructors for pointer, array and function types.
6798 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
6799 constructed by language-dependent code, not here.) */
6801 /* Construct, lay out and return the type of pointers to TO_TYPE with
6802 mode MODE. If MODE is VOIDmode, a pointer mode for the address
6803 space of TO_TYPE will be picked. If CAN_ALIAS_ALL is TRUE,
6804 indicate this type can reference all of memory. If such a type has
6805 already been constructed, reuse it. */
6808 build_pointer_type_for_mode (tree to_type
, machine_mode mode
,
6812 bool could_alias
= can_alias_all
;
6814 if (to_type
== error_mark_node
)
6815 return error_mark_node
;
6817 if (mode
== VOIDmode
)
6819 addr_space_t as
= TYPE_ADDR_SPACE (to_type
);
6820 mode
= targetm
.addr_space
.pointer_mode (as
);
6823 /* If the pointed-to type has the may_alias attribute set, force
6824 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6825 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type
)))
6826 can_alias_all
= true;
6828 /* In some cases, languages will have things that aren't a POINTER_TYPE
6829 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
6830 In that case, return that type without regard to the rest of our
6833 ??? This is a kludge, but consistent with the way this function has
6834 always operated and there doesn't seem to be a good way to avoid this
6836 if (TYPE_POINTER_TO (to_type
) != 0
6837 && TREE_CODE (TYPE_POINTER_TO (to_type
)) != POINTER_TYPE
)
6838 return TYPE_POINTER_TO (to_type
);
6840 /* First, if we already have a type for pointers to TO_TYPE and it's
6841 the proper mode, use it. */
6842 for (t
= TYPE_POINTER_TO (to_type
); t
; t
= TYPE_NEXT_PTR_TO (t
))
6843 if (TYPE_MODE (t
) == mode
&& TYPE_REF_CAN_ALIAS_ALL (t
) == can_alias_all
)
6846 t
= make_node (POINTER_TYPE
);
6848 TREE_TYPE (t
) = to_type
;
6849 SET_TYPE_MODE (t
, mode
);
6850 TYPE_REF_CAN_ALIAS_ALL (t
) = can_alias_all
;
6851 TYPE_NEXT_PTR_TO (t
) = TYPE_POINTER_TO (to_type
);
6852 TYPE_POINTER_TO (to_type
) = t
;
6854 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6855 if (TYPE_STRUCTURAL_EQUALITY_P (to_type
) || in_lto_p
)
6856 SET_TYPE_STRUCTURAL_EQUALITY (t
);
6857 else if (TYPE_CANONICAL (to_type
) != to_type
|| could_alias
)
6859 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type
),
6862 /* Lay out the type. This function has many callers that are concerned
6863 with expression-construction, and this simplifies them all. */
6869 /* By default build pointers in ptr_mode. */
6872 build_pointer_type (tree to_type
)
6874 return build_pointer_type_for_mode (to_type
, VOIDmode
, false);
6877 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
6880 build_reference_type_for_mode (tree to_type
, machine_mode mode
,
6884 bool could_alias
= can_alias_all
;
6886 if (to_type
== error_mark_node
)
6887 return error_mark_node
;
6889 if (mode
== VOIDmode
)
6891 addr_space_t as
= TYPE_ADDR_SPACE (to_type
);
6892 mode
= targetm
.addr_space
.pointer_mode (as
);
6895 /* If the pointed-to type has the may_alias attribute set, force
6896 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
6897 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type
)))
6898 can_alias_all
= true;
6900 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
6901 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
6902 In that case, return that type without regard to the rest of our
6905 ??? This is a kludge, but consistent with the way this function has
6906 always operated and there doesn't seem to be a good way to avoid this
6908 if (TYPE_REFERENCE_TO (to_type
) != 0
6909 && TREE_CODE (TYPE_REFERENCE_TO (to_type
)) != REFERENCE_TYPE
)
6910 return TYPE_REFERENCE_TO (to_type
);
6912 /* First, if we already have a type for pointers to TO_TYPE and it's
6913 the proper mode, use it. */
6914 for (t
= TYPE_REFERENCE_TO (to_type
); t
; t
= TYPE_NEXT_REF_TO (t
))
6915 if (TYPE_MODE (t
) == mode
&& TYPE_REF_CAN_ALIAS_ALL (t
) == can_alias_all
)
6918 t
= make_node (REFERENCE_TYPE
);
6920 TREE_TYPE (t
) = to_type
;
6921 SET_TYPE_MODE (t
, mode
);
6922 TYPE_REF_CAN_ALIAS_ALL (t
) = can_alias_all
;
6923 TYPE_NEXT_REF_TO (t
) = TYPE_REFERENCE_TO (to_type
);
6924 TYPE_REFERENCE_TO (to_type
) = t
;
6926 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
6927 if (TYPE_STRUCTURAL_EQUALITY_P (to_type
) || in_lto_p
)
6928 SET_TYPE_STRUCTURAL_EQUALITY (t
);
6929 else if (TYPE_CANONICAL (to_type
) != to_type
|| could_alias
)
6931 = build_reference_type_for_mode (TYPE_CANONICAL (to_type
),
6940 /* Build the node for the type of references-to-TO_TYPE by default
6944 build_reference_type (tree to_type
)
6946 return build_reference_type_for_mode (to_type
, VOIDmode
, false);
6949 #define MAX_INT_CACHED_PREC \
6950 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
6951 static GTY(()) tree nonstandard_integer_type_cache
[2 * MAX_INT_CACHED_PREC
+ 2];
6953 /* Builds a signed or unsigned integer type of precision PRECISION.
6954 Used for C bitfields whose precision does not match that of
6955 built-in target types. */
6957 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision
,
6963 unsignedp
= MAX_INT_CACHED_PREC
+ 1;
6965 if (precision
<= MAX_INT_CACHED_PREC
)
6967 itype
= nonstandard_integer_type_cache
[precision
+ unsignedp
];
6972 itype
= make_node (INTEGER_TYPE
);
6973 TYPE_PRECISION (itype
) = precision
;
6976 fixup_unsigned_type (itype
);
6978 fixup_signed_type (itype
);
6980 inchash::hash hstate
;
6981 inchash::add_expr (TYPE_MAX_VALUE (itype
), hstate
);
6982 ret
= type_hash_canon (hstate
.end (), itype
);
6983 if (precision
<= MAX_INT_CACHED_PREC
)
6984 nonstandard_integer_type_cache
[precision
+ unsignedp
] = ret
;
6989 #define MAX_BOOL_CACHED_PREC \
6990 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
6991 static GTY(()) tree nonstandard_boolean_type_cache
[MAX_BOOL_CACHED_PREC
+ 1];
6993 /* Builds a boolean type of precision PRECISION.
6994 Used for boolean vectors to choose proper vector element size. */
6996 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision
)
7000 if (precision
<= MAX_BOOL_CACHED_PREC
)
7002 type
= nonstandard_boolean_type_cache
[precision
];
7007 type
= make_node (BOOLEAN_TYPE
);
7008 TYPE_PRECISION (type
) = precision
;
7009 fixup_signed_type (type
);
7011 if (precision
<= MAX_INT_CACHED_PREC
)
7012 nonstandard_boolean_type_cache
[precision
] = type
;
7017 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7018 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7019 is true, reuse such a type that has already been constructed. */
7022 build_range_type_1 (tree type
, tree lowval
, tree highval
, bool shared
)
7024 tree itype
= make_node (INTEGER_TYPE
);
7026 TREE_TYPE (itype
) = type
;
7028 TYPE_MIN_VALUE (itype
) = fold_convert (type
, lowval
);
7029 TYPE_MAX_VALUE (itype
) = highval
? fold_convert (type
, highval
) : NULL
;
7031 TYPE_PRECISION (itype
) = TYPE_PRECISION (type
);
7032 SET_TYPE_MODE (itype
, TYPE_MODE (type
));
7033 TYPE_SIZE (itype
) = TYPE_SIZE (type
);
7034 TYPE_SIZE_UNIT (itype
) = TYPE_SIZE_UNIT (type
);
7035 SET_TYPE_ALIGN (itype
, TYPE_ALIGN (type
));
7036 TYPE_USER_ALIGN (itype
) = TYPE_USER_ALIGN (type
);
7037 SET_TYPE_WARN_IF_NOT_ALIGN (itype
, TYPE_WARN_IF_NOT_ALIGN (type
));
7042 if ((TYPE_MIN_VALUE (itype
)
7043 && TREE_CODE (TYPE_MIN_VALUE (itype
)) != INTEGER_CST
)
7044 || (TYPE_MAX_VALUE (itype
)
7045 && TREE_CODE (TYPE_MAX_VALUE (itype
)) != INTEGER_CST
))
7047 /* Since we cannot reliably merge this type, we need to compare it using
7048 structural equality checks. */
7049 SET_TYPE_STRUCTURAL_EQUALITY (itype
);
7053 hashval_t hash
= type_hash_canon_hash (itype
);
7054 itype
= type_hash_canon (hash
, itype
);
7059 /* Wrapper around build_range_type_1 with SHARED set to true. */
7062 build_range_type (tree type
, tree lowval
, tree highval
)
7064 return build_range_type_1 (type
, lowval
, highval
, true);
7067 /* Wrapper around build_range_type_1 with SHARED set to false. */
7070 build_nonshared_range_type (tree type
, tree lowval
, tree highval
)
7072 return build_range_type_1 (type
, lowval
, highval
, false);
7075 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7076 MAXVAL should be the maximum value in the domain
7077 (one less than the length of the array).
7079 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7080 We don't enforce this limit, that is up to caller (e.g. language front end).
7081 The limit exists because the result is a signed type and we don't handle
7082 sizes that use more than one HOST_WIDE_INT. */
7085 build_index_type (tree maxval
)
7087 return build_range_type (sizetype
, size_zero_node
, maxval
);
7090 /* Return true if the debug information for TYPE, a subtype, should be emitted
7091 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7092 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7093 debug info and doesn't reflect the source code. */
7096 subrange_type_for_debug_p (const_tree type
, tree
*lowval
, tree
*highval
)
7098 tree base_type
= TREE_TYPE (type
), low
, high
;
7100 /* Subrange types have a base type which is an integral type. */
7101 if (!INTEGRAL_TYPE_P (base_type
))
7104 /* Get the real bounds of the subtype. */
7105 if (lang_hooks
.types
.get_subrange_bounds
)
7106 lang_hooks
.types
.get_subrange_bounds (type
, &low
, &high
);
7109 low
= TYPE_MIN_VALUE (type
);
7110 high
= TYPE_MAX_VALUE (type
);
7113 /* If the type and its base type have the same representation and the same
7114 name, then the type is not a subrange but a copy of the base type. */
7115 if ((TREE_CODE (base_type
) == INTEGER_TYPE
7116 || TREE_CODE (base_type
) == BOOLEAN_TYPE
)
7117 && int_size_in_bytes (type
) == int_size_in_bytes (base_type
)
7118 && tree_int_cst_equal (low
, TYPE_MIN_VALUE (base_type
))
7119 && tree_int_cst_equal (high
, TYPE_MAX_VALUE (base_type
))
7120 && TYPE_IDENTIFIER (type
) == TYPE_IDENTIFIER (base_type
))
7130 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7131 and number of elements specified by the range of values of INDEX_TYPE.
7132 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7133 If SHARED is true, reuse such a type that has already been constructed.
7134 If SET_CANONICAL is true, compute TYPE_CANONICAL from the element type. */
7137 build_array_type_1 (tree elt_type
, tree index_type
, bool typeless_storage
,
7138 bool shared
, bool set_canonical
)
7142 if (TREE_CODE (elt_type
) == FUNCTION_TYPE
)
7144 error ("arrays of functions are not meaningful");
7145 elt_type
= integer_type_node
;
7148 t
= make_node (ARRAY_TYPE
);
7149 TREE_TYPE (t
) = elt_type
;
7150 TYPE_DOMAIN (t
) = index_type
;
7151 TYPE_ADDR_SPACE (t
) = TYPE_ADDR_SPACE (elt_type
);
7152 TYPE_TYPELESS_STORAGE (t
) = typeless_storage
;
7157 hashval_t hash
= type_hash_canon_hash (t
);
7158 t
= type_hash_canon (hash
, t
);
7161 if (TYPE_CANONICAL (t
) == t
&& set_canonical
)
7163 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type
)
7164 || (index_type
&& TYPE_STRUCTURAL_EQUALITY_P (index_type
))
7166 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7167 else if (TYPE_CANONICAL (elt_type
) != elt_type
7168 || (index_type
&& TYPE_CANONICAL (index_type
) != index_type
))
7170 = build_array_type_1 (TYPE_CANONICAL (elt_type
),
7172 ? TYPE_CANONICAL (index_type
) : NULL_TREE
,
7173 typeless_storage
, shared
, set_canonical
);
7179 /* Wrapper around build_array_type_1 with SHARED set to true. */
7182 build_array_type (tree elt_type
, tree index_type
, bool typeless_storage
)
7185 build_array_type_1 (elt_type
, index_type
, typeless_storage
, true, true);
7188 /* Wrapper around build_array_type_1 with SHARED set to false. */
7191 build_nonshared_array_type (tree elt_type
, tree index_type
)
7193 return build_array_type_1 (elt_type
, index_type
, false, false, true);
7196 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7200 build_array_type_nelts (tree elt_type
, poly_uint64 nelts
)
7202 return build_array_type (elt_type
, build_index_type (size_int (nelts
- 1)));
7205 /* Recursively examines the array elements of TYPE, until a non-array
7206 element type is found. */
7209 strip_array_types (tree type
)
7211 while (TREE_CODE (type
) == ARRAY_TYPE
)
7212 type
= TREE_TYPE (type
);
7217 /* Computes the canonical argument types from the argument type list
7220 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7221 on entry to this function, or if any of the ARGTYPES are
7224 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7225 true on entry to this function, or if any of the ARGTYPES are
7228 Returns a canonical argument list, which may be ARGTYPES when the
7229 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7230 true) or would not differ from ARGTYPES. */
7233 maybe_canonicalize_argtypes (tree argtypes
,
7234 bool *any_structural_p
,
7235 bool *any_noncanonical_p
)
7238 bool any_noncanonical_argtypes_p
= false;
7240 for (arg
= argtypes
; arg
&& !(*any_structural_p
); arg
= TREE_CHAIN (arg
))
7242 if (!TREE_VALUE (arg
) || TREE_VALUE (arg
) == error_mark_node
)
7243 /* Fail gracefully by stating that the type is structural. */
7244 *any_structural_p
= true;
7245 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg
)))
7246 *any_structural_p
= true;
7247 else if (TYPE_CANONICAL (TREE_VALUE (arg
)) != TREE_VALUE (arg
)
7248 || TREE_PURPOSE (arg
))
7249 /* If the argument has a default argument, we consider it
7250 non-canonical even though the type itself is canonical.
7251 That way, different variants of function and method types
7252 with default arguments will all point to the variant with
7253 no defaults as their canonical type. */
7254 any_noncanonical_argtypes_p
= true;
7257 if (*any_structural_p
)
7260 if (any_noncanonical_argtypes_p
)
7262 /* Build the canonical list of argument types. */
7263 tree canon_argtypes
= NULL_TREE
;
7264 bool is_void
= false;
7266 for (arg
= argtypes
; arg
; arg
= TREE_CHAIN (arg
))
7268 if (arg
== void_list_node
)
7271 canon_argtypes
= tree_cons (NULL_TREE
,
7272 TYPE_CANONICAL (TREE_VALUE (arg
)),
7276 canon_argtypes
= nreverse (canon_argtypes
);
7278 canon_argtypes
= chainon (canon_argtypes
, void_list_node
);
7280 /* There is a non-canonical type. */
7281 *any_noncanonical_p
= true;
7282 return canon_argtypes
;
7285 /* The canonical argument types are the same as ARGTYPES. */
7289 /* Construct, lay out and return
7290 the type of functions returning type VALUE_TYPE
7291 given arguments of types ARG_TYPES.
7292 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7293 are data type nodes for the arguments of the function.
7294 If such a type has already been constructed, reuse it. */
7297 build_function_type (tree value_type
, tree arg_types
)
7300 inchash::hash hstate
;
7301 bool any_structural_p
, any_noncanonical_p
;
7302 tree canon_argtypes
;
7304 gcc_assert (arg_types
!= error_mark_node
);
7306 if (TREE_CODE (value_type
) == FUNCTION_TYPE
)
7308 error ("function return type cannot be function");
7309 value_type
= integer_type_node
;
7312 /* Make a node of the sort we want. */
7313 t
= make_node (FUNCTION_TYPE
);
7314 TREE_TYPE (t
) = value_type
;
7315 TYPE_ARG_TYPES (t
) = arg_types
;
7317 /* If we already have such a type, use the old one. */
7318 hashval_t hash
= type_hash_canon_hash (t
);
7319 t
= type_hash_canon (hash
, t
);
7321 /* Set up the canonical type. */
7322 any_structural_p
= TYPE_STRUCTURAL_EQUALITY_P (value_type
);
7323 any_noncanonical_p
= TYPE_CANONICAL (value_type
) != value_type
;
7324 canon_argtypes
= maybe_canonicalize_argtypes (arg_types
,
7326 &any_noncanonical_p
);
7327 if (any_structural_p
)
7328 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7329 else if (any_noncanonical_p
)
7330 TYPE_CANONICAL (t
) = build_function_type (TYPE_CANONICAL (value_type
),
7333 if (!COMPLETE_TYPE_P (t
))
7338 /* Build a function type. The RETURN_TYPE is the type returned by the
7339 function. If VAARGS is set, no void_type_node is appended to the
7340 list. ARGP must be always be terminated be a NULL_TREE. */
7343 build_function_type_list_1 (bool vaargs
, tree return_type
, va_list argp
)
7347 t
= va_arg (argp
, tree
);
7348 for (args
= NULL_TREE
; t
!= NULL_TREE
; t
= va_arg (argp
, tree
))
7349 args
= tree_cons (NULL_TREE
, t
, args
);
7354 if (args
!= NULL_TREE
)
7355 args
= nreverse (args
);
7356 gcc_assert (last
!= void_list_node
);
7358 else if (args
== NULL_TREE
)
7359 args
= void_list_node
;
7363 args
= nreverse (args
);
7364 TREE_CHAIN (last
) = void_list_node
;
7366 args
= build_function_type (return_type
, args
);
7371 /* Build a function type. The RETURN_TYPE is the type returned by the
7372 function. If additional arguments are provided, they are
7373 additional argument types. The list of argument types must always
7374 be terminated by NULL_TREE. */
7377 build_function_type_list (tree return_type
, ...)
7382 va_start (p
, return_type
);
7383 args
= build_function_type_list_1 (false, return_type
, p
);
7388 /* Build a variable argument function type. The RETURN_TYPE is the
7389 type returned by the function. If additional arguments are provided,
7390 they are additional argument types. The list of argument types must
7391 always be terminated by NULL_TREE. */
7394 build_varargs_function_type_list (tree return_type
, ...)
7399 va_start (p
, return_type
);
7400 args
= build_function_type_list_1 (true, return_type
, p
);
7406 /* Build a function type. RETURN_TYPE is the type returned by the
7407 function; VAARGS indicates whether the function takes varargs. The
7408 function takes N named arguments, the types of which are provided in
7412 build_function_type_array_1 (bool vaargs
, tree return_type
, int n
,
7416 tree t
= vaargs
? NULL_TREE
: void_list_node
;
7418 for (i
= n
- 1; i
>= 0; i
--)
7419 t
= tree_cons (NULL_TREE
, arg_types
[i
], t
);
7421 return build_function_type (return_type
, t
);
7424 /* Build a function type. RETURN_TYPE is the type returned by the
7425 function. The function takes N named arguments, the types of which
7426 are provided in ARG_TYPES. */
7429 build_function_type_array (tree return_type
, int n
, tree
*arg_types
)
7431 return build_function_type_array_1 (false, return_type
, n
, arg_types
);
7434 /* Build a variable argument function type. RETURN_TYPE is the type
7435 returned by the function. The function takes N named arguments, the
7436 types of which are provided in ARG_TYPES. */
7439 build_varargs_function_type_array (tree return_type
, int n
, tree
*arg_types
)
7441 return build_function_type_array_1 (true, return_type
, n
, arg_types
);
7444 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
7445 and ARGTYPES (a TREE_LIST) are the return type and arguments types
7446 for the method. An implicit additional parameter (of type
7447 pointer-to-BASETYPE) is added to the ARGTYPES. */
7450 build_method_type_directly (tree basetype
,
7456 bool any_structural_p
, any_noncanonical_p
;
7457 tree canon_argtypes
;
7459 /* Make a node of the sort we want. */
7460 t
= make_node (METHOD_TYPE
);
7462 TYPE_METHOD_BASETYPE (t
) = TYPE_MAIN_VARIANT (basetype
);
7463 TREE_TYPE (t
) = rettype
;
7464 ptype
= build_pointer_type (basetype
);
7466 /* The actual arglist for this function includes a "hidden" argument
7467 which is "this". Put it into the list of argument types. */
7468 argtypes
= tree_cons (NULL_TREE
, ptype
, argtypes
);
7469 TYPE_ARG_TYPES (t
) = argtypes
;
7471 /* If we already have such a type, use the old one. */
7472 hashval_t hash
= type_hash_canon_hash (t
);
7473 t
= type_hash_canon (hash
, t
);
7475 /* Set up the canonical type. */
7477 = (TYPE_STRUCTURAL_EQUALITY_P (basetype
)
7478 || TYPE_STRUCTURAL_EQUALITY_P (rettype
));
7480 = (TYPE_CANONICAL (basetype
) != basetype
7481 || TYPE_CANONICAL (rettype
) != rettype
);
7482 canon_argtypes
= maybe_canonicalize_argtypes (TREE_CHAIN (argtypes
),
7484 &any_noncanonical_p
);
7485 if (any_structural_p
)
7486 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7487 else if (any_noncanonical_p
)
7489 = build_method_type_directly (TYPE_CANONICAL (basetype
),
7490 TYPE_CANONICAL (rettype
),
7492 if (!COMPLETE_TYPE_P (t
))
7498 /* Construct, lay out and return the type of methods belonging to class
7499 BASETYPE and whose arguments and values are described by TYPE.
7500 If that type exists already, reuse it.
7501 TYPE must be a FUNCTION_TYPE node. */
7504 build_method_type (tree basetype
, tree type
)
7506 gcc_assert (TREE_CODE (type
) == FUNCTION_TYPE
);
7508 return build_method_type_directly (basetype
,
7510 TYPE_ARG_TYPES (type
));
7513 /* Construct, lay out and return the type of offsets to a value
7514 of type TYPE, within an object of type BASETYPE.
7515 If a suitable offset type exists already, reuse it. */
7518 build_offset_type (tree basetype
, tree type
)
7522 /* Make a node of the sort we want. */
7523 t
= make_node (OFFSET_TYPE
);
7525 TYPE_OFFSET_BASETYPE (t
) = TYPE_MAIN_VARIANT (basetype
);
7526 TREE_TYPE (t
) = type
;
7528 /* If we already have such a type, use the old one. */
7529 hashval_t hash
= type_hash_canon_hash (t
);
7530 t
= type_hash_canon (hash
, t
);
7532 if (!COMPLETE_TYPE_P (t
))
7535 if (TYPE_CANONICAL (t
) == t
)
7537 if (TYPE_STRUCTURAL_EQUALITY_P (basetype
)
7538 || TYPE_STRUCTURAL_EQUALITY_P (type
))
7539 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7540 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype
)) != basetype
7541 || TYPE_CANONICAL (type
) != type
)
7543 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype
)),
7544 TYPE_CANONICAL (type
));
7550 /* Create a complex type whose components are COMPONENT_TYPE.
7552 If NAMED is true, the type is given a TYPE_NAME. We do not always
7553 do so because this creates a DECL node and thus make the DECL_UIDs
7554 dependent on the type canonicalization hashtable, which is GC-ed,
7555 so the DECL_UIDs would not be stable wrt garbage collection. */
7558 build_complex_type (tree component_type
, bool named
)
7560 gcc_assert (INTEGRAL_TYPE_P (component_type
)
7561 || SCALAR_FLOAT_TYPE_P (component_type
)
7562 || FIXED_POINT_TYPE_P (component_type
));
7564 /* Make a node of the sort we want. */
7565 tree probe
= make_node (COMPLEX_TYPE
);
7567 TREE_TYPE (probe
) = TYPE_MAIN_VARIANT (component_type
);
7569 /* If we already have such a type, use the old one. */
7570 hashval_t hash
= type_hash_canon_hash (probe
);
7571 tree t
= type_hash_canon (hash
, probe
);
7575 /* We created a new type. The hash insertion will have laid
7576 out the type. We need to check the canonicalization and
7577 maybe set the name. */
7578 gcc_checking_assert (COMPLETE_TYPE_P (t
)
7580 && TYPE_CANONICAL (t
) == t
);
7582 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t
)))
7583 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7584 else if (TYPE_CANONICAL (TREE_TYPE (t
)) != TREE_TYPE (t
))
7586 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t
)), named
);
7588 /* We need to create a name, since complex is a fundamental type. */
7591 const char *name
= NULL
;
7593 if (TREE_TYPE (t
) == char_type_node
)
7594 name
= "complex char";
7595 else if (TREE_TYPE (t
) == signed_char_type_node
)
7596 name
= "complex signed char";
7597 else if (TREE_TYPE (t
) == unsigned_char_type_node
)
7598 name
= "complex unsigned char";
7599 else if (TREE_TYPE (t
) == short_integer_type_node
)
7600 name
= "complex short int";
7601 else if (TREE_TYPE (t
) == short_unsigned_type_node
)
7602 name
= "complex short unsigned int";
7603 else if (TREE_TYPE (t
) == integer_type_node
)
7604 name
= "complex int";
7605 else if (TREE_TYPE (t
) == unsigned_type_node
)
7606 name
= "complex unsigned int";
7607 else if (TREE_TYPE (t
) == long_integer_type_node
)
7608 name
= "complex long int";
7609 else if (TREE_TYPE (t
) == long_unsigned_type_node
)
7610 name
= "complex long unsigned int";
7611 else if (TREE_TYPE (t
) == long_long_integer_type_node
)
7612 name
= "complex long long int";
7613 else if (TREE_TYPE (t
) == long_long_unsigned_type_node
)
7614 name
= "complex long long unsigned int";
7617 TYPE_NAME (t
) = build_decl (UNKNOWN_LOCATION
, TYPE_DECL
,
7618 get_identifier (name
), t
);
7622 return build_qualified_type (t
, TYPE_QUALS (component_type
));
7625 /* If TYPE is a real or complex floating-point type and the target
7626 does not directly support arithmetic on TYPE then return the wider
7627 type to be used for arithmetic on TYPE. Otherwise, return
7631 excess_precision_type (tree type
)
7633 /* The target can give two different responses to the question of
7634 which excess precision mode it would like depending on whether we
7635 are in -fexcess-precision=standard or -fexcess-precision=fast. */
7637 enum excess_precision_type requested_type
7638 = (flag_excess_precision
== EXCESS_PRECISION_FAST
7639 ? EXCESS_PRECISION_TYPE_FAST
7640 : (flag_excess_precision
== EXCESS_PRECISION_FLOAT16
7641 ? EXCESS_PRECISION_TYPE_FLOAT16
:EXCESS_PRECISION_TYPE_STANDARD
));
7643 enum flt_eval_method target_flt_eval_method
7644 = targetm
.c
.excess_precision (requested_type
);
7646 /* The target should not ask for unpredictable float evaluation (though
7647 it might advertise that implicitly the evaluation is unpredictable,
7648 but we don't care about that here, it will have been reported
7649 elsewhere). If it does ask for unpredictable evaluation, we have
7650 nothing to do here. */
7651 gcc_assert (target_flt_eval_method
!= FLT_EVAL_METHOD_UNPREDICTABLE
);
7653 /* Nothing to do. The target has asked for all types we know about
7654 to be computed with their native precision and range. */
7655 if (target_flt_eval_method
== FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16
)
7658 /* The target will promote this type in a target-dependent way, so excess
7659 precision ought to leave it alone. */
7660 if (targetm
.promoted_type (type
) != NULL_TREE
)
7663 machine_mode float16_type_mode
= (float16_type_node
7664 ? TYPE_MODE (float16_type_node
)
7666 machine_mode float_type_mode
= TYPE_MODE (float_type_node
);
7667 machine_mode double_type_mode
= TYPE_MODE (double_type_node
);
7669 switch (TREE_CODE (type
))
7673 machine_mode type_mode
= TYPE_MODE (type
);
7674 switch (target_flt_eval_method
)
7676 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT
:
7677 if (type_mode
== float16_type_mode
)
7678 return float_type_node
;
7680 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE
:
7681 if (type_mode
== float16_type_mode
7682 || type_mode
== float_type_mode
)
7683 return double_type_node
;
7685 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE
:
7686 if (type_mode
== float16_type_mode
7687 || type_mode
== float_type_mode
7688 || type_mode
== double_type_mode
)
7689 return long_double_type_node
;
7698 if (TREE_CODE (TREE_TYPE (type
)) != REAL_TYPE
)
7700 machine_mode type_mode
= TYPE_MODE (TREE_TYPE (type
));
7701 switch (target_flt_eval_method
)
7703 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT
:
7704 if (type_mode
== float16_type_mode
)
7705 return complex_float_type_node
;
7707 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE
:
7708 if (type_mode
== float16_type_mode
7709 || type_mode
== float_type_mode
)
7710 return complex_double_type_node
;
7712 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE
:
7713 if (type_mode
== float16_type_mode
7714 || type_mode
== float_type_mode
7715 || type_mode
== double_type_mode
)
7716 return complex_long_double_type_node
;
7730 /* Return OP, stripped of any conversions to wider types as much as is safe.
7731 Converting the value back to OP's type makes a value equivalent to OP.
7733 If FOR_TYPE is nonzero, we return a value which, if converted to
7734 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
7736 OP must have integer, real or enumeral type. Pointers are not allowed!
7738 There are some cases where the obvious value we could return
7739 would regenerate to OP if converted to OP's type,
7740 but would not extend like OP to wider types.
7741 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
7742 For example, if OP is (unsigned short)(signed char)-1,
7743 we avoid returning (signed char)-1 if FOR_TYPE is int,
7744 even though extending that to an unsigned short would regenerate OP,
7745 since the result of extending (signed char)-1 to (int)
7746 is different from (int) OP. */
7749 get_unwidened (tree op
, tree for_type
)
7751 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
7752 tree type
= TREE_TYPE (op
);
7754 = TYPE_PRECISION (for_type
!= 0 ? for_type
: type
);
7756 = (for_type
!= 0 && for_type
!= type
7757 && final_prec
> TYPE_PRECISION (type
)
7758 && TYPE_UNSIGNED (type
));
7761 while (CONVERT_EXPR_P (op
))
7765 /* TYPE_PRECISION on vector types has different meaning
7766 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
7767 so avoid them here. */
7768 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op
, 0))) == VECTOR_TYPE
)
7771 bitschange
= TYPE_PRECISION (TREE_TYPE (op
))
7772 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op
, 0)));
7774 /* Truncations are many-one so cannot be removed.
7775 Unless we are later going to truncate down even farther. */
7777 && final_prec
> TYPE_PRECISION (TREE_TYPE (op
)))
7780 /* See what's inside this conversion. If we decide to strip it,
7782 op
= TREE_OPERAND (op
, 0);
7784 /* If we have not stripped any zero-extensions (uns is 0),
7785 we can strip any kind of extension.
7786 If we have previously stripped a zero-extension,
7787 only zero-extensions can safely be stripped.
7788 Any extension can be stripped if the bits it would produce
7789 are all going to be discarded later by truncating to FOR_TYPE. */
7793 if (! uns
|| final_prec
<= TYPE_PRECISION (TREE_TYPE (op
)))
7795 /* TYPE_UNSIGNED says whether this is a zero-extension.
7796 Let's avoid computing it if it does not affect WIN
7797 and if UNS will not be needed again. */
7799 || CONVERT_EXPR_P (op
))
7800 && TYPE_UNSIGNED (TREE_TYPE (op
)))
7808 /* If we finally reach a constant see if it fits in sth smaller and
7809 in that case convert it. */
7810 if (TREE_CODE (win
) == INTEGER_CST
)
7812 tree wtype
= TREE_TYPE (win
);
7813 unsigned prec
= wi::min_precision (wi::to_wide (win
), TYPE_SIGN (wtype
));
7815 prec
= MAX (prec
, final_prec
);
7816 if (prec
< TYPE_PRECISION (wtype
))
7818 tree t
= lang_hooks
.types
.type_for_size (prec
, TYPE_UNSIGNED (wtype
));
7819 if (t
&& TYPE_PRECISION (t
) < TYPE_PRECISION (wtype
))
7820 win
= fold_convert (t
, win
);
7827 /* Return OP or a simpler expression for a narrower value
7828 which can be sign-extended or zero-extended to give back OP.
7829 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
7830 or 0 if the value should be sign-extended. */
7833 get_narrower (tree op
, int *unsignedp_ptr
)
7838 bool integral_p
= INTEGRAL_TYPE_P (TREE_TYPE (op
));
7840 if (TREE_CODE (op
) == COMPOUND_EXPR
)
7843 op
= TREE_OPERAND (op
, 1);
7844 while (TREE_CODE (op
) == COMPOUND_EXPR
);
7845 tree ret
= get_narrower (op
, unsignedp_ptr
);
7848 auto_vec
<tree
, 16> v
;
7850 for (op
= win
; TREE_CODE (op
) == COMPOUND_EXPR
;
7851 op
= TREE_OPERAND (op
, 1))
7853 FOR_EACH_VEC_ELT_REVERSE (v
, i
, op
)
7854 ret
= build2_loc (EXPR_LOCATION (op
), COMPOUND_EXPR
,
7855 TREE_TYPE (ret
), TREE_OPERAND (op
, 0),
7859 while (TREE_CODE (op
) == NOP_EXPR
)
7862 = (TYPE_PRECISION (TREE_TYPE (op
))
7863 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op
, 0))));
7865 /* Truncations are many-one so cannot be removed. */
7869 /* See what's inside this conversion. If we decide to strip it,
7874 op
= TREE_OPERAND (op
, 0);
7875 /* An extension: the outermost one can be stripped,
7876 but remember whether it is zero or sign extension. */
7878 uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
7879 /* Otherwise, if a sign extension has been stripped,
7880 only sign extensions can now be stripped;
7881 if a zero extension has been stripped, only zero-extensions. */
7882 else if (uns
!= TYPE_UNSIGNED (TREE_TYPE (op
)))
7886 else /* bitschange == 0 */
7888 /* A change in nominal type can always be stripped, but we must
7889 preserve the unsignedness. */
7891 uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
7893 op
= TREE_OPERAND (op
, 0);
7894 /* Keep trying to narrow, but don't assign op to win if it
7895 would turn an integral type into something else. */
7896 if (INTEGRAL_TYPE_P (TREE_TYPE (op
)) != integral_p
)
7903 if (TREE_CODE (op
) == COMPONENT_REF
7904 /* Since type_for_size always gives an integer type. */
7905 && TREE_CODE (TREE_TYPE (op
)) != REAL_TYPE
7906 && TREE_CODE (TREE_TYPE (op
)) != FIXED_POINT_TYPE
7907 /* Ensure field is laid out already. */
7908 && DECL_SIZE (TREE_OPERAND (op
, 1)) != 0
7909 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op
, 1))))
7911 unsigned HOST_WIDE_INT innerprec
7912 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op
, 1)));
7913 int unsignedp
= (DECL_UNSIGNED (TREE_OPERAND (op
, 1))
7914 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op
, 1))));
7915 tree type
= lang_hooks
.types
.type_for_size (innerprec
, unsignedp
);
7917 /* We can get this structure field in a narrower type that fits it,
7918 but the resulting extension to its nominal type (a fullword type)
7919 must satisfy the same conditions as for other extensions.
7921 Do this only for fields that are aligned (not bit-fields),
7922 because when bit-field insns will be used there is no
7923 advantage in doing this. */
7925 if (innerprec
< TYPE_PRECISION (TREE_TYPE (op
))
7926 && ! DECL_BIT_FIELD (TREE_OPERAND (op
, 1))
7927 && (first
|| uns
== DECL_UNSIGNED (TREE_OPERAND (op
, 1)))
7931 uns
= DECL_UNSIGNED (TREE_OPERAND (op
, 1));
7932 win
= fold_convert (type
, op
);
7936 *unsignedp_ptr
= uns
;
7940 /* Return true if integer constant C has a value that is permissible
7941 for TYPE, an integral type. */
7944 int_fits_type_p (const_tree c
, const_tree type
)
7946 tree type_low_bound
, type_high_bound
;
7947 bool ok_for_low_bound
, ok_for_high_bound
;
7948 signop sgn_c
= TYPE_SIGN (TREE_TYPE (c
));
7950 /* Non-standard boolean types can have arbitrary precision but various
7951 transformations assume that they can only take values 0 and +/-1. */
7952 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
7953 return wi::fits_to_boolean_p (wi::to_wide (c
), type
);
7956 type_low_bound
= TYPE_MIN_VALUE (type
);
7957 type_high_bound
= TYPE_MAX_VALUE (type
);
7959 /* If at least one bound of the type is a constant integer, we can check
7960 ourselves and maybe make a decision. If no such decision is possible, but
7961 this type is a subtype, try checking against that. Otherwise, use
7962 fits_to_tree_p, which checks against the precision.
7964 Compute the status for each possibly constant bound, and return if we see
7965 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
7966 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
7967 for "constant known to fit". */
7969 /* Check if c >= type_low_bound. */
7970 if (type_low_bound
&& TREE_CODE (type_low_bound
) == INTEGER_CST
)
7972 if (tree_int_cst_lt (c
, type_low_bound
))
7974 ok_for_low_bound
= true;
7977 ok_for_low_bound
= false;
7979 /* Check if c <= type_high_bound. */
7980 if (type_high_bound
&& TREE_CODE (type_high_bound
) == INTEGER_CST
)
7982 if (tree_int_cst_lt (type_high_bound
, c
))
7984 ok_for_high_bound
= true;
7987 ok_for_high_bound
= false;
7989 /* If the constant fits both bounds, the result is known. */
7990 if (ok_for_low_bound
&& ok_for_high_bound
)
7993 /* Perform some generic filtering which may allow making a decision
7994 even if the bounds are not constant. First, negative integers
7995 never fit in unsigned types, */
7996 if (TYPE_UNSIGNED (type
) && sgn_c
== SIGNED
&& wi::neg_p (wi::to_wide (c
)))
7999 /* Second, narrower types always fit in wider ones. */
8000 if (TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (c
)))
8003 /* Third, unsigned integers with top bit set never fit signed types. */
8004 if (!TYPE_UNSIGNED (type
) && sgn_c
== UNSIGNED
)
8006 int prec
= GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c
))) - 1;
8007 if (prec
< TYPE_PRECISION (TREE_TYPE (c
)))
8009 /* When a tree_cst is converted to a wide-int, the precision
8010 is taken from the type. However, if the precision of the
8011 mode underneath the type is smaller than that, it is
8012 possible that the value will not fit. The test below
8013 fails if any bit is set between the sign bit of the
8014 underlying mode and the top bit of the type. */
8015 if (wi::zext (wi::to_wide (c
), prec
- 1) != wi::to_wide (c
))
8018 else if (wi::neg_p (wi::to_wide (c
)))
8022 /* If we haven't been able to decide at this point, there nothing more we
8023 can check ourselves here. Look at the base type if we have one and it
8024 has the same precision. */
8025 if (TREE_CODE (type
) == INTEGER_TYPE
8026 && TREE_TYPE (type
) != 0
8027 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (type
)))
8029 type
= TREE_TYPE (type
);
8033 /* Or to fits_to_tree_p, if nothing else. */
8034 return wi::fits_to_tree_p (wi::to_wide (c
), type
);
8037 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8038 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8039 represented (assuming two's-complement arithmetic) within the bit
8040 precision of the type are returned instead. */
8043 get_type_static_bounds (const_tree type
, mpz_t min
, mpz_t max
)
8045 if (!POINTER_TYPE_P (type
) && TYPE_MIN_VALUE (type
)
8046 && TREE_CODE (TYPE_MIN_VALUE (type
)) == INTEGER_CST
)
8047 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type
)), min
, TYPE_SIGN (type
));
8050 if (TYPE_UNSIGNED (type
))
8051 mpz_set_ui (min
, 0);
8054 wide_int mn
= wi::min_value (TYPE_PRECISION (type
), SIGNED
);
8055 wi::to_mpz (mn
, min
, SIGNED
);
8059 if (!POINTER_TYPE_P (type
) && TYPE_MAX_VALUE (type
)
8060 && TREE_CODE (TYPE_MAX_VALUE (type
)) == INTEGER_CST
)
8061 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type
)), max
, TYPE_SIGN (type
));
8064 wide_int mn
= wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
8065 wi::to_mpz (mn
, max
, TYPE_SIGN (type
));
8069 /* Return true if VAR is an automatic variable. */
8072 auto_var_p (const_tree var
)
8074 return ((((VAR_P (var
) && ! DECL_EXTERNAL (var
))
8075 || TREE_CODE (var
) == PARM_DECL
)
8076 && ! TREE_STATIC (var
))
8077 || TREE_CODE (var
) == RESULT_DECL
);
8080 /* Return true if VAR is an automatic variable defined in function FN. */
8083 auto_var_in_fn_p (const_tree var
, const_tree fn
)
8085 return (DECL_P (var
) && DECL_CONTEXT (var
) == fn
8086 && (auto_var_p (var
)
8087 || TREE_CODE (var
) == LABEL_DECL
));
8090 /* Subprogram of following function. Called by walk_tree.
8092 Return *TP if it is an automatic variable or parameter of the
8093 function passed in as DATA. */
8096 find_var_from_fn (tree
*tp
, int *walk_subtrees
, void *data
)
8098 tree fn
= (tree
) data
;
8103 else if (DECL_P (*tp
)
8104 && auto_var_in_fn_p (*tp
, fn
))
8110 /* Returns true if T is, contains, or refers to a type with variable
8111 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8112 arguments, but not the return type. If FN is nonzero, only return
8113 true if a modifier of the type or position of FN is a variable or
8114 parameter inside FN.
8116 This concept is more general than that of C99 'variably modified types':
8117 in C99, a struct type is never variably modified because a VLA may not
8118 appear as a structure member. However, in GNU C code like:
8120 struct S { int i[f()]; };
8122 is valid, and other languages may define similar constructs. */
8125 variably_modified_type_p (tree type
, tree fn
)
8129 /* Test if T is either variable (if FN is zero) or an expression containing
8130 a variable in FN. If TYPE isn't gimplified, return true also if
8131 gimplify_one_sizepos would gimplify the expression into a local
8133 #define RETURN_TRUE_IF_VAR(T) \
8134 do { tree _t = (T); \
8135 if (_t != NULL_TREE \
8136 && _t != error_mark_node \
8137 && !CONSTANT_CLASS_P (_t) \
8138 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8140 || (!TYPE_SIZES_GIMPLIFIED (type) \
8141 && (TREE_CODE (_t) != VAR_DECL \
8142 && !CONTAINS_PLACEHOLDER_P (_t))) \
8143 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8144 return true; } while (0)
8146 if (type
== error_mark_node
)
8149 /* If TYPE itself has variable size, it is variably modified. */
8150 RETURN_TRUE_IF_VAR (TYPE_SIZE (type
));
8151 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type
));
8153 switch (TREE_CODE (type
))
8156 case REFERENCE_TYPE
:
8158 /* Ada can have pointer types refering to themselves indirectly. */
8159 if (TREE_VISITED (type
))
8161 TREE_VISITED (type
) = true;
8162 if (variably_modified_type_p (TREE_TYPE (type
), fn
))
8164 TREE_VISITED (type
) = false;
8167 TREE_VISITED (type
) = false;
8172 /* If TYPE is a function type, it is variably modified if the
8173 return type is variably modified. */
8174 if (variably_modified_type_p (TREE_TYPE (type
), fn
))
8180 case FIXED_POINT_TYPE
:
8183 /* Scalar types are variably modified if their end points
8185 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type
));
8186 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type
));
8191 case QUAL_UNION_TYPE
:
8192 /* We can't see if any of the fields are variably-modified by the
8193 definition we normally use, since that would produce infinite
8194 recursion via pointers. */
8195 /* This is variably modified if some field's type is. */
8196 for (t
= TYPE_FIELDS (type
); t
; t
= DECL_CHAIN (t
))
8197 if (TREE_CODE (t
) == FIELD_DECL
)
8199 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t
));
8200 RETURN_TRUE_IF_VAR (DECL_SIZE (t
));
8201 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t
));
8203 /* If the type is a qualified union, then the DECL_QUALIFIER
8204 of fields can also be an expression containing a variable. */
8205 if (TREE_CODE (type
) == QUAL_UNION_TYPE
)
8206 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t
));
8208 /* If the field is a qualified union, then it's only a container
8209 for what's inside so we look into it. That's necessary in LTO
8210 mode because the sizes of the field tested above have been set
8211 to PLACEHOLDER_EXPRs by free_lang_data. */
8212 if (TREE_CODE (TREE_TYPE (t
)) == QUAL_UNION_TYPE
8213 && variably_modified_type_p (TREE_TYPE (t
), fn
))
8219 /* Do not call ourselves to avoid infinite recursion. This is
8220 variably modified if the element type is. */
8221 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type
)));
8222 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type
)));
8229 /* The current language may have other cases to check, but in general,
8230 all other types are not variably modified. */
8231 return lang_hooks
.tree_inlining
.var_mod_type_p (type
, fn
);
8233 #undef RETURN_TRUE_IF_VAR
8236 /* Given a DECL or TYPE, return the scope in which it was declared, or
8237 NULL_TREE if there is no containing scope. */
8240 get_containing_scope (const_tree t
)
8242 return (TYPE_P (t
) ? TYPE_CONTEXT (t
) : DECL_CONTEXT (t
));
8245 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
8248 get_ultimate_context (const_tree decl
)
8250 while (decl
&& TREE_CODE (decl
) != TRANSLATION_UNIT_DECL
)
8252 if (TREE_CODE (decl
) == BLOCK
)
8253 decl
= BLOCK_SUPERCONTEXT (decl
);
8255 decl
= get_containing_scope (decl
);
8260 /* Return the innermost context enclosing DECL that is
8261 a FUNCTION_DECL, or zero if none. */
8264 decl_function_context (const_tree decl
)
8268 if (TREE_CODE (decl
) == ERROR_MARK
)
8271 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8272 where we look up the function at runtime. Such functions always take
8273 a first argument of type 'pointer to real context'.
8275 C++ should really be fixed to use DECL_CONTEXT for the real context,
8276 and use something else for the "virtual context". */
8277 else if (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_VIRTUAL_P (decl
))
8280 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl
)))));
8282 context
= DECL_CONTEXT (decl
);
8284 while (context
&& TREE_CODE (context
) != FUNCTION_DECL
)
8286 if (TREE_CODE (context
) == BLOCK
)
8287 context
= BLOCK_SUPERCONTEXT (context
);
8289 context
= get_containing_scope (context
);
8295 /* Return the innermost context enclosing DECL that is
8296 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8297 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8300 decl_type_context (const_tree decl
)
8302 tree context
= DECL_CONTEXT (decl
);
8305 switch (TREE_CODE (context
))
8307 case NAMESPACE_DECL
:
8308 case TRANSLATION_UNIT_DECL
:
8313 case QUAL_UNION_TYPE
:
8318 context
= DECL_CONTEXT (context
);
8322 context
= BLOCK_SUPERCONTEXT (context
);
8332 /* CALL is a CALL_EXPR. Return the declaration for the function
8333 called, or NULL_TREE if the called function cannot be
8337 get_callee_fndecl (const_tree call
)
8341 if (call
== error_mark_node
)
8342 return error_mark_node
;
8344 /* It's invalid to call this function with anything but a
8346 gcc_assert (TREE_CODE (call
) == CALL_EXPR
);
8348 /* The first operand to the CALL is the address of the function
8350 addr
= CALL_EXPR_FN (call
);
8352 /* If there is no function, return early. */
8353 if (addr
== NULL_TREE
)
8358 /* If this is a readonly function pointer, extract its initial value. */
8359 if (DECL_P (addr
) && TREE_CODE (addr
) != FUNCTION_DECL
8360 && TREE_READONLY (addr
) && ! TREE_THIS_VOLATILE (addr
)
8361 && DECL_INITIAL (addr
))
8362 addr
= DECL_INITIAL (addr
);
8364 /* If the address is just `&f' for some function `f', then we know
8365 that `f' is being called. */
8366 if (TREE_CODE (addr
) == ADDR_EXPR
8367 && TREE_CODE (TREE_OPERAND (addr
, 0)) == FUNCTION_DECL
)
8368 return TREE_OPERAND (addr
, 0);
8370 /* We couldn't figure out what was being called. */
8374 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
8375 return the associated function code, otherwise return CFN_LAST. */
8378 get_call_combined_fn (const_tree call
)
8380 /* It's invalid to call this function with anything but a CALL_EXPR. */
8381 gcc_assert (TREE_CODE (call
) == CALL_EXPR
);
8383 if (!CALL_EXPR_FN (call
))
8384 return as_combined_fn (CALL_EXPR_IFN (call
));
8386 tree fndecl
= get_callee_fndecl (call
);
8387 if (fndecl
&& fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
8388 return as_combined_fn (DECL_FUNCTION_CODE (fndecl
));
8393 /* Comparator of indices based on tree_node_counts. */
8396 tree_nodes_cmp (const void *p1
, const void *p2
)
8398 const unsigned *n1
= (const unsigned *)p1
;
8399 const unsigned *n2
= (const unsigned *)p2
;
8401 return tree_node_counts
[*n1
] - tree_node_counts
[*n2
];
8404 /* Comparator of indices based on tree_code_counts. */
8407 tree_codes_cmp (const void *p1
, const void *p2
)
8409 const unsigned *n1
= (const unsigned *)p1
;
8410 const unsigned *n2
= (const unsigned *)p2
;
8412 return tree_code_counts
[*n1
] - tree_code_counts
[*n2
];
8415 #define TREE_MEM_USAGE_SPACES 40
8417 /* Print debugging information about tree nodes generated during the compile,
8418 and any language-specific information. */
8421 dump_tree_statistics (void)
8423 if (GATHER_STATISTICS
)
8425 uint64_t total_nodes
, total_bytes
;
8426 fprintf (stderr
, "\nKind Nodes Bytes\n");
8427 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8428 total_nodes
= total_bytes
= 0;
8431 auto_vec
<unsigned> indices (all_kinds
);
8432 for (unsigned i
= 0; i
< all_kinds
; i
++)
8433 indices
.quick_push (i
);
8434 indices
.qsort (tree_nodes_cmp
);
8436 for (unsigned i
= 0; i
< (int) all_kinds
; i
++)
8438 unsigned j
= indices
[i
];
8439 fprintf (stderr
, "%-20s %6" PRIu64
"%c %9" PRIu64
"%c\n",
8440 tree_node_kind_names
[j
], SIZE_AMOUNT (tree_node_counts
[j
]),
8441 SIZE_AMOUNT (tree_node_sizes
[j
]));
8442 total_nodes
+= tree_node_counts
[j
];
8443 total_bytes
+= tree_node_sizes
[j
];
8445 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8446 fprintf (stderr
, "%-20s %6" PRIu64
"%c %9" PRIu64
"%c\n", "Total",
8447 SIZE_AMOUNT (total_nodes
), SIZE_AMOUNT (total_bytes
));
8448 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8452 fprintf (stderr
, "Code Nodes\n");
8453 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8455 auto_vec
<unsigned> indices (MAX_TREE_CODES
);
8456 for (unsigned i
= 0; i
< MAX_TREE_CODES
; i
++)
8457 indices
.quick_push (i
);
8458 indices
.qsort (tree_codes_cmp
);
8460 for (unsigned i
= 0; i
< MAX_TREE_CODES
; i
++)
8462 unsigned j
= indices
[i
];
8463 fprintf (stderr
, "%-32s %6" PRIu64
"%c\n",
8464 get_tree_code_name ((enum tree_code
) j
),
8465 SIZE_AMOUNT (tree_code_counts
[j
]));
8467 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8468 fprintf (stderr
, "\n");
8469 ssanames_print_statistics ();
8470 fprintf (stderr
, "\n");
8471 phinodes_print_statistics ();
8472 fprintf (stderr
, "\n");
8476 fprintf (stderr
, "(No per-node statistics)\n");
8478 print_type_hash_statistics ();
8479 print_debug_expr_statistics ();
8480 print_value_expr_statistics ();
8481 lang_hooks
.print_statistics ();
8484 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8486 /* Generate a crc32 of the low BYTES bytes of VALUE. */
8489 crc32_unsigned_n (unsigned chksum
, unsigned value
, unsigned bytes
)
8491 /* This relies on the raw feedback's top 4 bits being zero. */
8492 #define FEEDBACK(X) ((X) * 0x04c11db7)
8493 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
8494 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
8495 static const unsigned syndromes
[16] =
8497 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
8498 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
8499 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
8500 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
8505 value
<<= (32 - bytes
* 8);
8506 for (unsigned ix
= bytes
* 2; ix
--; value
<<= 4)
8508 unsigned feedback
= syndromes
[((value
^ chksum
) >> 28) & 0xf];
8510 chksum
= (chksum
<< 4) ^ feedback
;
8516 /* Generate a crc32 of a string. */
8519 crc32_string (unsigned chksum
, const char *string
)
8522 chksum
= crc32_byte (chksum
, *string
);
8527 /* P is a string that will be used in a symbol. Mask out any characters
8528 that are not valid in that context. */
8531 clean_symbol_name (char *p
)
8535 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8538 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
8545 static GTY(()) unsigned anon_cnt
= 0; /* Saved for PCH. */
8547 /* Create a unique anonymous identifier. The identifier is still a
8548 valid assembly label. */
8554 #if !defined (NO_DOT_IN_LABEL)
8556 #elif !defined (NO_DOLLAR_IN_LABEL)
8564 int len
= snprintf (buf
, sizeof (buf
), fmt
, anon_cnt
++);
8565 gcc_checking_assert (len
< int (sizeof (buf
)));
8567 tree id
= get_identifier_with_length (buf
, len
);
8568 IDENTIFIER_ANON_P (id
) = true;
8573 /* Generate a name for a special-purpose function.
8574 The generated name may need to be unique across the whole link.
8575 Changes to this function may also require corresponding changes to
8576 xstrdup_mask_random.
8577 TYPE is some string to identify the purpose of this function to the
8578 linker or collect2; it must start with an uppercase letter,
8580 I - for constructors
8582 N - for C++ anonymous namespaces
8583 F - for DWARF unwind frame information. */
8586 get_file_function_name (const char *type
)
8592 /* If we already have a name we know to be unique, just use that. */
8593 if (first_global_object_name
)
8594 p
= q
= ASTRDUP (first_global_object_name
);
8595 /* If the target is handling the constructors/destructors, they
8596 will be local to this file and the name is only necessary for
8598 We also assign sub_I and sub_D sufixes to constructors called from
8599 the global static constructors. These are always local. */
8600 else if (((type
[0] == 'I' || type
[0] == 'D') && targetm
.have_ctors_dtors
)
8601 || (startswith (type
, "sub_")
8602 && (type
[4] == 'I' || type
[4] == 'D')))
8604 const char *file
= main_input_filename
;
8606 file
= LOCATION_FILE (input_location
);
8607 /* Just use the file's basename, because the full pathname
8608 might be quite long. */
8609 p
= q
= ASTRDUP (lbasename (file
));
8613 /* Otherwise, the name must be unique across the entire link.
8614 We don't have anything that we know to be unique to this translation
8615 unit, so use what we do have and throw in some randomness. */
8617 const char *name
= weak_global_object_name
;
8618 const char *file
= main_input_filename
;
8623 file
= LOCATION_FILE (input_location
);
8625 len
= strlen (file
);
8626 q
= (char *) alloca (9 + 19 + len
+ 1);
8627 memcpy (q
, file
, len
+ 1);
8629 snprintf (q
+ len
, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX
,
8630 crc32_string (0, name
), get_random_seed (false));
8635 clean_symbol_name (q
);
8636 buf
= (char *) alloca (sizeof (FILE_FUNCTION_FORMAT
) + strlen (p
)
8639 /* Set up the name of the file-level functions we may need.
8640 Use a global object (which is already required to be unique over
8641 the program) rather than the file name (which imposes extra
8643 sprintf (buf
, FILE_FUNCTION_FORMAT
, type
, p
);
8645 return get_identifier (buf
);
8648 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
8650 /* Complain that the tree code of NODE does not match the expected 0
8651 terminated list of trailing codes. The trailing code list can be
8652 empty, for a more vague error message. FILE, LINE, and FUNCTION
8653 are of the caller. */
8656 tree_check_failed (const_tree node
, const char *file
,
8657 int line
, const char *function
, ...)
8661 unsigned length
= 0;
8662 enum tree_code code
;
8664 va_start (args
, function
);
8665 while ((code
= (enum tree_code
) va_arg (args
, int)))
8666 length
+= 4 + strlen (get_tree_code_name (code
));
8671 va_start (args
, function
);
8672 length
+= strlen ("expected ");
8673 buffer
= tmp
= (char *) alloca (length
);
8675 while ((code
= (enum tree_code
) va_arg (args
, int)))
8677 const char *prefix
= length
? " or " : "expected ";
8679 strcpy (tmp
+ length
, prefix
);
8680 length
+= strlen (prefix
);
8681 strcpy (tmp
+ length
, get_tree_code_name (code
));
8682 length
+= strlen (get_tree_code_name (code
));
8687 buffer
= "unexpected node";
8689 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8690 buffer
, get_tree_code_name (TREE_CODE (node
)),
8691 function
, trim_filename (file
), line
);
8694 /* Complain that the tree code of NODE does match the expected 0
8695 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
8699 tree_not_check_failed (const_tree node
, const char *file
,
8700 int line
, const char *function
, ...)
8704 unsigned length
= 0;
8705 enum tree_code code
;
8707 va_start (args
, function
);
8708 while ((code
= (enum tree_code
) va_arg (args
, int)))
8709 length
+= 4 + strlen (get_tree_code_name (code
));
8711 va_start (args
, function
);
8712 buffer
= (char *) alloca (length
);
8714 while ((code
= (enum tree_code
) va_arg (args
, int)))
8718 strcpy (buffer
+ length
, " or ");
8721 strcpy (buffer
+ length
, get_tree_code_name (code
));
8722 length
+= strlen (get_tree_code_name (code
));
8726 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
8727 buffer
, get_tree_code_name (TREE_CODE (node
)),
8728 function
, trim_filename (file
), line
);
8731 /* Similar to tree_check_failed, except that we check for a class of tree
8732 code, given in CL. */
8735 tree_class_check_failed (const_tree node
, const enum tree_code_class cl
,
8736 const char *file
, int line
, const char *function
)
8739 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
8740 TREE_CODE_CLASS_STRING (cl
),
8741 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node
))),
8742 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
8745 /* Similar to tree_check_failed, except that instead of specifying a
8746 dozen codes, use the knowledge that they're all sequential. */
8749 tree_range_check_failed (const_tree node
, const char *file
, int line
,
8750 const char *function
, enum tree_code c1
,
8754 unsigned length
= 0;
8757 for (c
= c1
; c
<= c2
; ++c
)
8758 length
+= 4 + strlen (get_tree_code_name ((enum tree_code
) c
));
8760 length
+= strlen ("expected ");
8761 buffer
= (char *) alloca (length
);
8764 for (c
= c1
; c
<= c2
; ++c
)
8766 const char *prefix
= length
? " or " : "expected ";
8768 strcpy (buffer
+ length
, prefix
);
8769 length
+= strlen (prefix
);
8770 strcpy (buffer
+ length
, get_tree_code_name ((enum tree_code
) c
));
8771 length
+= strlen (get_tree_code_name ((enum tree_code
) c
));
8774 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8775 buffer
, get_tree_code_name (TREE_CODE (node
)),
8776 function
, trim_filename (file
), line
);
8780 /* Similar to tree_check_failed, except that we check that a tree does
8781 not have the specified code, given in CL. */
8784 tree_not_class_check_failed (const_tree node
, const enum tree_code_class cl
,
8785 const char *file
, int line
, const char *function
)
8788 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
8789 TREE_CODE_CLASS_STRING (cl
),
8790 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node
))),
8791 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
8795 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
8798 omp_clause_check_failed (const_tree node
, const char *file
, int line
,
8799 const char *function
, enum omp_clause_code code
)
8801 internal_error ("tree check: expected %<omp_clause %s%>, have %qs "
8803 omp_clause_code_name
[code
],
8804 get_tree_code_name (TREE_CODE (node
)),
8805 function
, trim_filename (file
), line
);
8809 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
8812 omp_clause_range_check_failed (const_tree node
, const char *file
, int line
,
8813 const char *function
, enum omp_clause_code c1
,
8814 enum omp_clause_code c2
)
8817 unsigned length
= 0;
8820 for (c
= c1
; c
<= c2
; ++c
)
8821 length
+= 4 + strlen (omp_clause_code_name
[c
]);
8823 length
+= strlen ("expected ");
8824 buffer
= (char *) alloca (length
);
8827 for (c
= c1
; c
<= c2
; ++c
)
8829 const char *prefix
= length
? " or " : "expected ";
8831 strcpy (buffer
+ length
, prefix
);
8832 length
+= strlen (prefix
);
8833 strcpy (buffer
+ length
, omp_clause_code_name
[c
]);
8834 length
+= strlen (omp_clause_code_name
[c
]);
8837 internal_error ("tree check: %s, have %s in %s, at %s:%d",
8838 buffer
, omp_clause_code_name
[TREE_CODE (node
)],
8839 function
, trim_filename (file
), line
);
8843 #undef DEFTREESTRUCT
8844 #define DEFTREESTRUCT(VAL, NAME) NAME,
8846 static const char *ts_enum_names
[] = {
8847 #include "treestruct.def"
8849 #undef DEFTREESTRUCT
8851 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
8853 /* Similar to tree_class_check_failed, except that we check for
8854 whether CODE contains the tree structure identified by EN. */
8857 tree_contains_struct_check_failed (const_tree node
,
8858 const enum tree_node_structure_enum en
,
8859 const char *file
, int line
,
8860 const char *function
)
8863 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
8865 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
8869 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
8870 (dynamically sized) vector. */
8873 tree_int_cst_elt_check_failed (int idx
, int len
, const char *file
, int line
,
8874 const char *function
)
8877 ("tree check: accessed elt %d of %<tree_int_cst%> with %d elts in %s, "
8879 idx
+ 1, len
, function
, trim_filename (file
), line
);
8882 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
8883 (dynamically sized) vector. */
8886 tree_vec_elt_check_failed (int idx
, int len
, const char *file
, int line
,
8887 const char *function
)
8890 ("tree check: accessed elt %d of %<tree_vec%> with %d elts in %s, at %s:%d",
8891 idx
+ 1, len
, function
, trim_filename (file
), line
);
8894 /* Similar to above, except that the check is for the bounds of the operand
8895 vector of an expression node EXP. */
8898 tree_operand_check_failed (int idx
, const_tree exp
, const char *file
,
8899 int line
, const char *function
)
8901 enum tree_code code
= TREE_CODE (exp
);
8903 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
8904 idx
+ 1, get_tree_code_name (code
), TREE_OPERAND_LENGTH (exp
),
8905 function
, trim_filename (file
), line
);
8908 /* Similar to above, except that the check is for the number of
8909 operands of an OMP_CLAUSE node. */
8912 omp_clause_operand_check_failed (int idx
, const_tree t
, const char *file
,
8913 int line
, const char *function
)
8916 ("tree check: accessed operand %d of %<omp_clause %s%> with %d operands "
8917 "in %s, at %s:%d", idx
+ 1, omp_clause_code_name
[OMP_CLAUSE_CODE (t
)],
8918 omp_clause_num_ops
[OMP_CLAUSE_CODE (t
)], function
,
8919 trim_filename (file
), line
);
8921 #endif /* ENABLE_TREE_CHECKING */
8923 /* Create a new vector type node holding NUNITS units of type INNERTYPE,
8924 and mapped to the machine mode MODE. Initialize its fields and build
8925 the information necessary for debugging output. */
8928 make_vector_type (tree innertype
, poly_int64 nunits
, machine_mode mode
)
8931 tree mv_innertype
= TYPE_MAIN_VARIANT (innertype
);
8933 t
= make_node (VECTOR_TYPE
);
8934 TREE_TYPE (t
) = mv_innertype
;
8935 SET_TYPE_VECTOR_SUBPARTS (t
, nunits
);
8936 SET_TYPE_MODE (t
, mode
);
8938 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype
) || in_lto_p
)
8939 SET_TYPE_STRUCTURAL_EQUALITY (t
);
8940 else if ((TYPE_CANONICAL (mv_innertype
) != innertype
8941 || mode
!= VOIDmode
)
8942 && !VECTOR_BOOLEAN_TYPE_P (t
))
8944 = make_vector_type (TYPE_CANONICAL (mv_innertype
), nunits
, VOIDmode
);
8948 hashval_t hash
= type_hash_canon_hash (t
);
8949 t
= type_hash_canon (hash
, t
);
8951 /* We have built a main variant, based on the main variant of the
8952 inner type. Use it to build the variant we return. */
8953 if ((TYPE_ATTRIBUTES (innertype
) || TYPE_QUALS (innertype
))
8954 && TREE_TYPE (t
) != innertype
)
8955 return build_type_attribute_qual_variant (t
,
8956 TYPE_ATTRIBUTES (innertype
),
8957 TYPE_QUALS (innertype
));
8963 make_or_reuse_type (unsigned size
, int unsignedp
)
8967 if (size
== INT_TYPE_SIZE
)
8968 return unsignedp
? unsigned_type_node
: integer_type_node
;
8969 if (size
== CHAR_TYPE_SIZE
)
8970 return unsignedp
? unsigned_char_type_node
: signed_char_type_node
;
8971 if (size
== SHORT_TYPE_SIZE
)
8972 return unsignedp
? short_unsigned_type_node
: short_integer_type_node
;
8973 if (size
== LONG_TYPE_SIZE
)
8974 return unsignedp
? long_unsigned_type_node
: long_integer_type_node
;
8975 if (size
== LONG_LONG_TYPE_SIZE
)
8976 return (unsignedp
? long_long_unsigned_type_node
8977 : long_long_integer_type_node
);
8979 for (i
= 0; i
< NUM_INT_N_ENTS
; i
++)
8980 if (size
== int_n_data
[i
].bitsize
8981 && int_n_enabled_p
[i
])
8982 return (unsignedp
? int_n_trees
[i
].unsigned_type
8983 : int_n_trees
[i
].signed_type
);
8986 return make_unsigned_type (size
);
8988 return make_signed_type (size
);
8991 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
8994 make_or_reuse_fract_type (unsigned size
, int unsignedp
, int satp
)
8998 if (size
== SHORT_FRACT_TYPE_SIZE
)
8999 return unsignedp
? sat_unsigned_short_fract_type_node
9000 : sat_short_fract_type_node
;
9001 if (size
== FRACT_TYPE_SIZE
)
9002 return unsignedp
? sat_unsigned_fract_type_node
: sat_fract_type_node
;
9003 if (size
== LONG_FRACT_TYPE_SIZE
)
9004 return unsignedp
? sat_unsigned_long_fract_type_node
9005 : sat_long_fract_type_node
;
9006 if (size
== LONG_LONG_FRACT_TYPE_SIZE
)
9007 return unsignedp
? sat_unsigned_long_long_fract_type_node
9008 : sat_long_long_fract_type_node
;
9012 if (size
== SHORT_FRACT_TYPE_SIZE
)
9013 return unsignedp
? unsigned_short_fract_type_node
9014 : short_fract_type_node
;
9015 if (size
== FRACT_TYPE_SIZE
)
9016 return unsignedp
? unsigned_fract_type_node
: fract_type_node
;
9017 if (size
== LONG_FRACT_TYPE_SIZE
)
9018 return unsignedp
? unsigned_long_fract_type_node
9019 : long_fract_type_node
;
9020 if (size
== LONG_LONG_FRACT_TYPE_SIZE
)
9021 return unsignedp
? unsigned_long_long_fract_type_node
9022 : long_long_fract_type_node
;
9025 return make_fract_type (size
, unsignedp
, satp
);
9028 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9031 make_or_reuse_accum_type (unsigned size
, int unsignedp
, int satp
)
9035 if (size
== SHORT_ACCUM_TYPE_SIZE
)
9036 return unsignedp
? sat_unsigned_short_accum_type_node
9037 : sat_short_accum_type_node
;
9038 if (size
== ACCUM_TYPE_SIZE
)
9039 return unsignedp
? sat_unsigned_accum_type_node
: sat_accum_type_node
;
9040 if (size
== LONG_ACCUM_TYPE_SIZE
)
9041 return unsignedp
? sat_unsigned_long_accum_type_node
9042 : sat_long_accum_type_node
;
9043 if (size
== LONG_LONG_ACCUM_TYPE_SIZE
)
9044 return unsignedp
? sat_unsigned_long_long_accum_type_node
9045 : sat_long_long_accum_type_node
;
9049 if (size
== SHORT_ACCUM_TYPE_SIZE
)
9050 return unsignedp
? unsigned_short_accum_type_node
9051 : short_accum_type_node
;
9052 if (size
== ACCUM_TYPE_SIZE
)
9053 return unsignedp
? unsigned_accum_type_node
: accum_type_node
;
9054 if (size
== LONG_ACCUM_TYPE_SIZE
)
9055 return unsignedp
? unsigned_long_accum_type_node
9056 : long_accum_type_node
;
9057 if (size
== LONG_LONG_ACCUM_TYPE_SIZE
)
9058 return unsignedp
? unsigned_long_long_accum_type_node
9059 : long_long_accum_type_node
;
9062 return make_accum_type (size
, unsignedp
, satp
);
9066 /* Create an atomic variant node for TYPE. This routine is called
9067 during initialization of data types to create the 5 basic atomic
9068 types. The generic build_variant_type function requires these to
9069 already be set up in order to function properly, so cannot be
9070 called from there. If ALIGN is non-zero, then ensure alignment is
9071 overridden to this value. */
9074 build_atomic_base (tree type
, unsigned int align
)
9078 /* Make sure its not already registered. */
9079 if ((t
= get_qualified_type (type
, TYPE_QUAL_ATOMIC
)))
9082 t
= build_variant_type_copy (type
);
9083 set_type_quals (t
, TYPE_QUAL_ATOMIC
);
9086 SET_TYPE_ALIGN (t
, align
);
9091 /* Information about the _FloatN and _FloatNx types. This must be in
9092 the same order as the corresponding TI_* enum values. */
9093 const floatn_type_info floatn_nx_types
[NUM_FLOATN_NX_TYPES
] =
9105 /* Create nodes for all integer types (and error_mark_node) using the sizes
9106 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9109 build_common_tree_nodes (bool signed_char
)
9113 error_mark_node
= make_node (ERROR_MARK
);
9114 TREE_TYPE (error_mark_node
) = error_mark_node
;
9116 initialize_sizetypes ();
9118 /* Define both `signed char' and `unsigned char'. */
9119 signed_char_type_node
= make_signed_type (CHAR_TYPE_SIZE
);
9120 TYPE_STRING_FLAG (signed_char_type_node
) = 1;
9121 unsigned_char_type_node
= make_unsigned_type (CHAR_TYPE_SIZE
);
9122 TYPE_STRING_FLAG (unsigned_char_type_node
) = 1;
9124 /* Define `char', which is like either `signed char' or `unsigned char'
9125 but not the same as either. */
9128 ? make_signed_type (CHAR_TYPE_SIZE
)
9129 : make_unsigned_type (CHAR_TYPE_SIZE
));
9130 TYPE_STRING_FLAG (char_type_node
) = 1;
9132 short_integer_type_node
= make_signed_type (SHORT_TYPE_SIZE
);
9133 short_unsigned_type_node
= make_unsigned_type (SHORT_TYPE_SIZE
);
9134 integer_type_node
= make_signed_type (INT_TYPE_SIZE
);
9135 unsigned_type_node
= make_unsigned_type (INT_TYPE_SIZE
);
9136 long_integer_type_node
= make_signed_type (LONG_TYPE_SIZE
);
9137 long_unsigned_type_node
= make_unsigned_type (LONG_TYPE_SIZE
);
9138 long_long_integer_type_node
= make_signed_type (LONG_LONG_TYPE_SIZE
);
9139 long_long_unsigned_type_node
= make_unsigned_type (LONG_LONG_TYPE_SIZE
);
9141 for (i
= 0; i
< NUM_INT_N_ENTS
; i
++)
9143 int_n_trees
[i
].signed_type
= make_signed_type (int_n_data
[i
].bitsize
);
9144 int_n_trees
[i
].unsigned_type
= make_unsigned_type (int_n_data
[i
].bitsize
);
9146 if (int_n_enabled_p
[i
])
9148 integer_types
[itk_intN_0
+ i
* 2] = int_n_trees
[i
].signed_type
;
9149 integer_types
[itk_unsigned_intN_0
+ i
* 2] = int_n_trees
[i
].unsigned_type
;
9153 /* Define a boolean type. This type only represents boolean values but
9154 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9155 boolean_type_node
= make_unsigned_type (BOOL_TYPE_SIZE
);
9156 TREE_SET_CODE (boolean_type_node
, BOOLEAN_TYPE
);
9157 TYPE_PRECISION (boolean_type_node
) = 1;
9158 TYPE_MAX_VALUE (boolean_type_node
) = build_int_cst (boolean_type_node
, 1);
9160 /* Define what type to use for size_t. */
9161 if (strcmp (SIZE_TYPE
, "unsigned int") == 0)
9162 size_type_node
= unsigned_type_node
;
9163 else if (strcmp (SIZE_TYPE
, "long unsigned int") == 0)
9164 size_type_node
= long_unsigned_type_node
;
9165 else if (strcmp (SIZE_TYPE
, "long long unsigned int") == 0)
9166 size_type_node
= long_long_unsigned_type_node
;
9167 else if (strcmp (SIZE_TYPE
, "short unsigned int") == 0)
9168 size_type_node
= short_unsigned_type_node
;
9173 size_type_node
= NULL_TREE
;
9174 for (i
= 0; i
< NUM_INT_N_ENTS
; i
++)
9175 if (int_n_enabled_p
[i
])
9177 char name
[50], altname
[50];
9178 sprintf (name
, "__int%d unsigned", int_n_data
[i
].bitsize
);
9179 sprintf (altname
, "__int%d__ unsigned", int_n_data
[i
].bitsize
);
9181 if (strcmp (name
, SIZE_TYPE
) == 0
9182 || strcmp (altname
, SIZE_TYPE
) == 0)
9184 size_type_node
= int_n_trees
[i
].unsigned_type
;
9187 if (size_type_node
== NULL_TREE
)
9191 /* Define what type to use for ptrdiff_t. */
9192 if (strcmp (PTRDIFF_TYPE
, "int") == 0)
9193 ptrdiff_type_node
= integer_type_node
;
9194 else if (strcmp (PTRDIFF_TYPE
, "long int") == 0)
9195 ptrdiff_type_node
= long_integer_type_node
;
9196 else if (strcmp (PTRDIFF_TYPE
, "long long int") == 0)
9197 ptrdiff_type_node
= long_long_integer_type_node
;
9198 else if (strcmp (PTRDIFF_TYPE
, "short int") == 0)
9199 ptrdiff_type_node
= short_integer_type_node
;
9202 ptrdiff_type_node
= NULL_TREE
;
9203 for (int i
= 0; i
< NUM_INT_N_ENTS
; i
++)
9204 if (int_n_enabled_p
[i
])
9206 char name
[50], altname
[50];
9207 sprintf (name
, "__int%d", int_n_data
[i
].bitsize
);
9208 sprintf (altname
, "__int%d__", int_n_data
[i
].bitsize
);
9210 if (strcmp (name
, PTRDIFF_TYPE
) == 0
9211 || strcmp (altname
, PTRDIFF_TYPE
) == 0)
9212 ptrdiff_type_node
= int_n_trees
[i
].signed_type
;
9214 if (ptrdiff_type_node
== NULL_TREE
)
9218 /* Fill in the rest of the sized types. Reuse existing type nodes
9220 intQI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (QImode
), 0);
9221 intHI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (HImode
), 0);
9222 intSI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (SImode
), 0);
9223 intDI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (DImode
), 0);
9224 intTI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (TImode
), 0);
9226 unsigned_intQI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (QImode
), 1);
9227 unsigned_intHI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (HImode
), 1);
9228 unsigned_intSI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (SImode
), 1);
9229 unsigned_intDI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (DImode
), 1);
9230 unsigned_intTI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (TImode
), 1);
9232 /* Don't call build_qualified type for atomics. That routine does
9233 special processing for atomics, and until they are initialized
9234 it's better not to make that call.
9236 Check to see if there is a target override for atomic types. */
9238 atomicQI_type_node
= build_atomic_base (unsigned_intQI_type_node
,
9239 targetm
.atomic_align_for_mode (QImode
));
9240 atomicHI_type_node
= build_atomic_base (unsigned_intHI_type_node
,
9241 targetm
.atomic_align_for_mode (HImode
));
9242 atomicSI_type_node
= build_atomic_base (unsigned_intSI_type_node
,
9243 targetm
.atomic_align_for_mode (SImode
));
9244 atomicDI_type_node
= build_atomic_base (unsigned_intDI_type_node
,
9245 targetm
.atomic_align_for_mode (DImode
));
9246 atomicTI_type_node
= build_atomic_base (unsigned_intTI_type_node
,
9247 targetm
.atomic_align_for_mode (TImode
));
9249 access_public_node
= get_identifier ("public");
9250 access_protected_node
= get_identifier ("protected");
9251 access_private_node
= get_identifier ("private");
9253 /* Define these next since types below may used them. */
9254 integer_zero_node
= build_int_cst (integer_type_node
, 0);
9255 integer_one_node
= build_int_cst (integer_type_node
, 1);
9256 integer_three_node
= build_int_cst (integer_type_node
, 3);
9257 integer_minus_one_node
= build_int_cst (integer_type_node
, -1);
9259 size_zero_node
= size_int (0);
9260 size_one_node
= size_int (1);
9261 bitsize_zero_node
= bitsize_int (0);
9262 bitsize_one_node
= bitsize_int (1);
9263 bitsize_unit_node
= bitsize_int (BITS_PER_UNIT
);
9265 boolean_false_node
= TYPE_MIN_VALUE (boolean_type_node
);
9266 boolean_true_node
= TYPE_MAX_VALUE (boolean_type_node
);
9268 void_type_node
= make_node (VOID_TYPE
);
9269 layout_type (void_type_node
);
9271 /* We are not going to have real types in C with less than byte alignment,
9272 so we might as well not have any types that claim to have it. */
9273 SET_TYPE_ALIGN (void_type_node
, BITS_PER_UNIT
);
9274 TYPE_USER_ALIGN (void_type_node
) = 0;
9276 void_node
= make_node (VOID_CST
);
9277 TREE_TYPE (void_node
) = void_type_node
;
9279 null_pointer_node
= build_int_cst (build_pointer_type (void_type_node
), 0);
9280 layout_type (TREE_TYPE (null_pointer_node
));
9282 ptr_type_node
= build_pointer_type (void_type_node
);
9284 = build_pointer_type (build_type_variant (void_type_node
, 1, 0));
9285 for (unsigned i
= 0;
9286 i
< sizeof (builtin_structptr_types
) / sizeof (builtin_structptr_type
);
9288 builtin_structptr_types
[i
].node
= builtin_structptr_types
[i
].base
;
9290 pointer_sized_int_node
= build_nonstandard_integer_type (POINTER_SIZE
, 1);
9292 float_type_node
= make_node (REAL_TYPE
);
9293 TYPE_PRECISION (float_type_node
) = FLOAT_TYPE_SIZE
;
9294 layout_type (float_type_node
);
9296 double_type_node
= make_node (REAL_TYPE
);
9297 TYPE_PRECISION (double_type_node
) = DOUBLE_TYPE_SIZE
;
9298 layout_type (double_type_node
);
9300 long_double_type_node
= make_node (REAL_TYPE
);
9301 TYPE_PRECISION (long_double_type_node
) = LONG_DOUBLE_TYPE_SIZE
;
9302 layout_type (long_double_type_node
);
9304 for (i
= 0; i
< NUM_FLOATN_NX_TYPES
; i
++)
9306 int n
= floatn_nx_types
[i
].n
;
9307 bool extended
= floatn_nx_types
[i
].extended
;
9308 scalar_float_mode mode
;
9309 if (!targetm
.floatn_mode (n
, extended
).exists (&mode
))
9311 int precision
= GET_MODE_PRECISION (mode
);
9312 /* Work around the rs6000 KFmode having precision 113 not
9314 const struct real_format
*fmt
= REAL_MODE_FORMAT (mode
);
9315 gcc_assert (fmt
->b
== 2 && fmt
->emin
+ fmt
->emax
== 3);
9316 int min_precision
= fmt
->p
+ ceil_log2 (fmt
->emax
- fmt
->emin
);
9318 gcc_assert (min_precision
== n
);
9319 if (precision
< min_precision
)
9320 precision
= min_precision
;
9321 FLOATN_NX_TYPE_NODE (i
) = make_node (REAL_TYPE
);
9322 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i
)) = precision
;
9323 layout_type (FLOATN_NX_TYPE_NODE (i
));
9324 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i
), mode
);
9327 float_ptr_type_node
= build_pointer_type (float_type_node
);
9328 double_ptr_type_node
= build_pointer_type (double_type_node
);
9329 long_double_ptr_type_node
= build_pointer_type (long_double_type_node
);
9330 integer_ptr_type_node
= build_pointer_type (integer_type_node
);
9332 /* Fixed size integer types. */
9333 uint16_type_node
= make_or_reuse_type (16, 1);
9334 uint32_type_node
= make_or_reuse_type (32, 1);
9335 uint64_type_node
= make_or_reuse_type (64, 1);
9336 if (targetm
.scalar_mode_supported_p (TImode
))
9337 uint128_type_node
= make_or_reuse_type (128, 1);
9339 /* Decimal float types. */
9340 if (targetm
.decimal_float_supported_p ())
9342 dfloat32_type_node
= make_node (REAL_TYPE
);
9343 TYPE_PRECISION (dfloat32_type_node
) = DECIMAL32_TYPE_SIZE
;
9344 SET_TYPE_MODE (dfloat32_type_node
, SDmode
);
9345 layout_type (dfloat32_type_node
);
9347 dfloat64_type_node
= make_node (REAL_TYPE
);
9348 TYPE_PRECISION (dfloat64_type_node
) = DECIMAL64_TYPE_SIZE
;
9349 SET_TYPE_MODE (dfloat64_type_node
, DDmode
);
9350 layout_type (dfloat64_type_node
);
9352 dfloat128_type_node
= make_node (REAL_TYPE
);
9353 TYPE_PRECISION (dfloat128_type_node
) = DECIMAL128_TYPE_SIZE
;
9354 SET_TYPE_MODE (dfloat128_type_node
, TDmode
);
9355 layout_type (dfloat128_type_node
);
9358 complex_integer_type_node
= build_complex_type (integer_type_node
, true);
9359 complex_float_type_node
= build_complex_type (float_type_node
, true);
9360 complex_double_type_node
= build_complex_type (double_type_node
, true);
9361 complex_long_double_type_node
= build_complex_type (long_double_type_node
,
9364 for (i
= 0; i
< NUM_FLOATN_NX_TYPES
; i
++)
9366 if (FLOATN_NX_TYPE_NODE (i
) != NULL_TREE
)
9367 COMPLEX_FLOATN_NX_TYPE_NODE (i
)
9368 = build_complex_type (FLOATN_NX_TYPE_NODE (i
));
9371 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9372 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9373 sat_ ## KIND ## _type_node = \
9374 make_sat_signed_ ## KIND ## _type (SIZE); \
9375 sat_unsigned_ ## KIND ## _type_node = \
9376 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9377 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9378 unsigned_ ## KIND ## _type_node = \
9379 make_unsigned_ ## KIND ## _type (SIZE);
9381 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9382 sat_ ## WIDTH ## KIND ## _type_node = \
9383 make_sat_signed_ ## KIND ## _type (SIZE); \
9384 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9385 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9386 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9387 unsigned_ ## WIDTH ## KIND ## _type_node = \
9388 make_unsigned_ ## KIND ## _type (SIZE);
9390 /* Make fixed-point type nodes based on four different widths. */
9391 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9392 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9393 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9394 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9395 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9397 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9398 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9399 NAME ## _type_node = \
9400 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9401 u ## NAME ## _type_node = \
9402 make_or_reuse_unsigned_ ## KIND ## _type \
9403 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9404 sat_ ## NAME ## _type_node = \
9405 make_or_reuse_sat_signed_ ## KIND ## _type \
9406 (GET_MODE_BITSIZE (MODE ## mode)); \
9407 sat_u ## NAME ## _type_node = \
9408 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9409 (GET_MODE_BITSIZE (U ## MODE ## mode));
9411 /* Fixed-point type and mode nodes. */
9412 MAKE_FIXED_TYPE_NODE_FAMILY (fract
, FRACT
)
9413 MAKE_FIXED_TYPE_NODE_FAMILY (accum
, ACCUM
)
9414 MAKE_FIXED_MODE_NODE (fract
, qq
, QQ
)
9415 MAKE_FIXED_MODE_NODE (fract
, hq
, HQ
)
9416 MAKE_FIXED_MODE_NODE (fract
, sq
, SQ
)
9417 MAKE_FIXED_MODE_NODE (fract
, dq
, DQ
)
9418 MAKE_FIXED_MODE_NODE (fract
, tq
, TQ
)
9419 MAKE_FIXED_MODE_NODE (accum
, ha
, HA
)
9420 MAKE_FIXED_MODE_NODE (accum
, sa
, SA
)
9421 MAKE_FIXED_MODE_NODE (accum
, da
, DA
)
9422 MAKE_FIXED_MODE_NODE (accum
, ta
, TA
)
9425 tree t
= targetm
.build_builtin_va_list ();
9427 /* Many back-ends define record types without setting TYPE_NAME.
9428 If we copied the record type here, we'd keep the original
9429 record type without a name. This breaks name mangling. So,
9430 don't copy record types and let c_common_nodes_and_builtins()
9431 declare the type to be __builtin_va_list. */
9432 if (TREE_CODE (t
) != RECORD_TYPE
)
9433 t
= build_variant_type_copy (t
);
9435 va_list_type_node
= t
;
9438 /* SCEV analyzer global shared trees. */
9439 chrec_dont_know
= make_node (SCEV_NOT_KNOWN
);
9440 TREE_TYPE (chrec_dont_know
) = void_type_node
;
9441 chrec_known
= make_node (SCEV_KNOWN
);
9442 TREE_TYPE (chrec_known
) = void_type_node
;
9445 /* Modify DECL for given flags.
9446 TM_PURE attribute is set only on types, so the function will modify
9447 DECL's type when ECF_TM_PURE is used. */
9450 set_call_expr_flags (tree decl
, int flags
)
9452 if (flags
& ECF_NOTHROW
)
9453 TREE_NOTHROW (decl
) = 1;
9454 if (flags
& ECF_CONST
)
9455 TREE_READONLY (decl
) = 1;
9456 if (flags
& ECF_PURE
)
9457 DECL_PURE_P (decl
) = 1;
9458 if (flags
& ECF_LOOPING_CONST_OR_PURE
)
9459 DECL_LOOPING_CONST_OR_PURE_P (decl
) = 1;
9460 if (flags
& ECF_NOVOPS
)
9461 DECL_IS_NOVOPS (decl
) = 1;
9462 if (flags
& ECF_NORETURN
)
9463 TREE_THIS_VOLATILE (decl
) = 1;
9464 if (flags
& ECF_MALLOC
)
9465 DECL_IS_MALLOC (decl
) = 1;
9466 if (flags
& ECF_RETURNS_TWICE
)
9467 DECL_IS_RETURNS_TWICE (decl
) = 1;
9468 if (flags
& ECF_LEAF
)
9469 DECL_ATTRIBUTES (decl
) = tree_cons (get_identifier ("leaf"),
9470 NULL
, DECL_ATTRIBUTES (decl
));
9471 if (flags
& ECF_COLD
)
9472 DECL_ATTRIBUTES (decl
) = tree_cons (get_identifier ("cold"),
9473 NULL
, DECL_ATTRIBUTES (decl
));
9474 if (flags
& ECF_RET1
)
9475 DECL_ATTRIBUTES (decl
)
9476 = tree_cons (get_identifier ("fn spec"),
9477 build_tree_list (NULL_TREE
, build_string (2, "1 ")),
9478 DECL_ATTRIBUTES (decl
));
9479 if ((flags
& ECF_TM_PURE
) && flag_tm
)
9480 apply_tm_attr (decl
, get_identifier ("transaction_pure"));
9481 /* Looping const or pure is implied by noreturn.
9482 There is currently no way to declare looping const or looping pure alone. */
9483 gcc_assert (!(flags
& ECF_LOOPING_CONST_OR_PURE
)
9484 || ((flags
& ECF_NORETURN
) && (flags
& (ECF_CONST
| ECF_PURE
))));
9488 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9491 local_define_builtin (const char *name
, tree type
, enum built_in_function code
,
9492 const char *library_name
, int ecf_flags
)
9496 decl
= add_builtin_function (name
, type
, code
, BUILT_IN_NORMAL
,
9497 library_name
, NULL_TREE
);
9498 set_call_expr_flags (decl
, ecf_flags
);
9500 set_builtin_decl (code
, decl
, true);
9503 /* Call this function after instantiating all builtins that the language
9504 front end cares about. This will build the rest of the builtins
9505 and internal functions that are relied upon by the tree optimizers and
9509 build_common_builtin_nodes (void)
9514 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_PADDING
))
9516 ftype
= build_function_type_list (void_type_node
,
9521 local_define_builtin ("__builtin_clear_padding", ftype
,
9522 BUILT_IN_CLEAR_PADDING
,
9523 "__builtin_clear_padding",
9524 ECF_LEAF
| ECF_NOTHROW
);
9527 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE
)
9528 || !builtin_decl_explicit_p (BUILT_IN_ABORT
))
9530 ftype
= build_function_type (void_type_node
, void_list_node
);
9531 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE
))
9532 local_define_builtin ("__builtin_unreachable", ftype
,
9533 BUILT_IN_UNREACHABLE
,
9534 "__builtin_unreachable",
9535 ECF_NOTHROW
| ECF_LEAF
| ECF_NORETURN
9536 | ECF_CONST
| ECF_COLD
);
9537 if (!builtin_decl_explicit_p (BUILT_IN_ABORT
))
9538 local_define_builtin ("__builtin_abort", ftype
, BUILT_IN_ABORT
,
9540 ECF_LEAF
| ECF_NORETURN
| ECF_CONST
| ECF_COLD
);
9543 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY
)
9544 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE
))
9546 ftype
= build_function_type_list (ptr_type_node
,
9547 ptr_type_node
, const_ptr_type_node
,
9548 size_type_node
, NULL_TREE
);
9550 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY
))
9551 local_define_builtin ("__builtin_memcpy", ftype
, BUILT_IN_MEMCPY
,
9552 "memcpy", ECF_NOTHROW
| ECF_LEAF
);
9553 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE
))
9554 local_define_builtin ("__builtin_memmove", ftype
, BUILT_IN_MEMMOVE
,
9555 "memmove", ECF_NOTHROW
| ECF_LEAF
);
9558 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP
))
9560 ftype
= build_function_type_list (integer_type_node
, const_ptr_type_node
,
9561 const_ptr_type_node
, size_type_node
,
9563 local_define_builtin ("__builtin_memcmp", ftype
, BUILT_IN_MEMCMP
,
9564 "memcmp", ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9567 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET
))
9569 ftype
= build_function_type_list (ptr_type_node
,
9570 ptr_type_node
, integer_type_node
,
9571 size_type_node
, NULL_TREE
);
9572 local_define_builtin ("__builtin_memset", ftype
, BUILT_IN_MEMSET
,
9573 "memset", ECF_NOTHROW
| ECF_LEAF
);
9576 /* If we're checking the stack, `alloca' can throw. */
9577 const int alloca_flags
9578 = ECF_MALLOC
| ECF_LEAF
| (flag_stack_check
? 0 : ECF_NOTHROW
);
9580 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA
))
9582 ftype
= build_function_type_list (ptr_type_node
,
9583 size_type_node
, NULL_TREE
);
9584 local_define_builtin ("__builtin_alloca", ftype
, BUILT_IN_ALLOCA
,
9585 "alloca", alloca_flags
);
9588 ftype
= build_function_type_list (ptr_type_node
, size_type_node
,
9589 size_type_node
, NULL_TREE
);
9590 local_define_builtin ("__builtin_alloca_with_align", ftype
,
9591 BUILT_IN_ALLOCA_WITH_ALIGN
,
9592 "__builtin_alloca_with_align",
9595 ftype
= build_function_type_list (ptr_type_node
, size_type_node
,
9596 size_type_node
, size_type_node
, NULL_TREE
);
9597 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype
,
9598 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
,
9599 "__builtin_alloca_with_align_and_max",
9602 ftype
= build_function_type_list (void_type_node
,
9603 ptr_type_node
, ptr_type_node
,
9604 ptr_type_node
, NULL_TREE
);
9605 local_define_builtin ("__builtin_init_trampoline", ftype
,
9606 BUILT_IN_INIT_TRAMPOLINE
,
9607 "__builtin_init_trampoline", ECF_NOTHROW
| ECF_LEAF
);
9608 local_define_builtin ("__builtin_init_heap_trampoline", ftype
,
9609 BUILT_IN_INIT_HEAP_TRAMPOLINE
,
9610 "__builtin_init_heap_trampoline",
9611 ECF_NOTHROW
| ECF_LEAF
);
9612 local_define_builtin ("__builtin_init_descriptor", ftype
,
9613 BUILT_IN_INIT_DESCRIPTOR
,
9614 "__builtin_init_descriptor", ECF_NOTHROW
| ECF_LEAF
);
9616 ftype
= build_function_type_list (ptr_type_node
, ptr_type_node
, NULL_TREE
);
9617 local_define_builtin ("__builtin_adjust_trampoline", ftype
,
9618 BUILT_IN_ADJUST_TRAMPOLINE
,
9619 "__builtin_adjust_trampoline",
9620 ECF_CONST
| ECF_NOTHROW
);
9621 local_define_builtin ("__builtin_adjust_descriptor", ftype
,
9622 BUILT_IN_ADJUST_DESCRIPTOR
,
9623 "__builtin_adjust_descriptor",
9624 ECF_CONST
| ECF_NOTHROW
);
9626 ftype
= build_function_type_list (void_type_node
,
9627 ptr_type_node
, ptr_type_node
, NULL_TREE
);
9628 if (!builtin_decl_explicit_p (BUILT_IN_CLEAR_CACHE
))
9629 local_define_builtin ("__builtin___clear_cache", ftype
,
9630 BUILT_IN_CLEAR_CACHE
,
9634 local_define_builtin ("__builtin_nonlocal_goto", ftype
,
9635 BUILT_IN_NONLOCAL_GOTO
,
9636 "__builtin_nonlocal_goto",
9637 ECF_NORETURN
| ECF_NOTHROW
);
9639 ftype
= build_function_type_list (void_type_node
,
9640 ptr_type_node
, ptr_type_node
, NULL_TREE
);
9641 local_define_builtin ("__builtin_setjmp_setup", ftype
,
9642 BUILT_IN_SETJMP_SETUP
,
9643 "__builtin_setjmp_setup", ECF_NOTHROW
);
9645 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
9646 local_define_builtin ("__builtin_setjmp_receiver", ftype
,
9647 BUILT_IN_SETJMP_RECEIVER
,
9648 "__builtin_setjmp_receiver", ECF_NOTHROW
| ECF_LEAF
);
9650 ftype
= build_function_type_list (ptr_type_node
, NULL_TREE
);
9651 local_define_builtin ("__builtin_stack_save", ftype
, BUILT_IN_STACK_SAVE
,
9652 "__builtin_stack_save", ECF_NOTHROW
| ECF_LEAF
);
9654 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
9655 local_define_builtin ("__builtin_stack_restore", ftype
,
9656 BUILT_IN_STACK_RESTORE
,
9657 "__builtin_stack_restore", ECF_NOTHROW
| ECF_LEAF
);
9659 ftype
= build_function_type_list (integer_type_node
, const_ptr_type_node
,
9660 const_ptr_type_node
, size_type_node
,
9662 local_define_builtin ("__builtin_memcmp_eq", ftype
, BUILT_IN_MEMCMP_EQ
,
9663 "__builtin_memcmp_eq",
9664 ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9666 local_define_builtin ("__builtin_strncmp_eq", ftype
, BUILT_IN_STRNCMP_EQ
,
9667 "__builtin_strncmp_eq",
9668 ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9670 local_define_builtin ("__builtin_strcmp_eq", ftype
, BUILT_IN_STRCMP_EQ
,
9671 "__builtin_strcmp_eq",
9672 ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9674 /* If there's a possibility that we might use the ARM EABI, build the
9675 alternate __cxa_end_cleanup node used to resume from C++. */
9676 if (targetm
.arm_eabi_unwinder
)
9678 ftype
= build_function_type_list (void_type_node
, NULL_TREE
);
9679 local_define_builtin ("__builtin_cxa_end_cleanup", ftype
,
9680 BUILT_IN_CXA_END_CLEANUP
,
9681 "__cxa_end_cleanup", ECF_NORETURN
| ECF_LEAF
);
9684 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
9685 local_define_builtin ("__builtin_unwind_resume", ftype
,
9686 BUILT_IN_UNWIND_RESUME
,
9687 ((targetm_common
.except_unwind_info (&global_options
)
9689 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
9692 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS
) == NULL_TREE
)
9694 ftype
= build_function_type_list (ptr_type_node
, integer_type_node
,
9696 local_define_builtin ("__builtin_return_address", ftype
,
9697 BUILT_IN_RETURN_ADDRESS
,
9698 "__builtin_return_address",
9702 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER
)
9703 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT
))
9705 ftype
= build_function_type_list (void_type_node
, ptr_type_node
,
9706 ptr_type_node
, NULL_TREE
);
9707 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER
))
9708 local_define_builtin ("__cyg_profile_func_enter", ftype
,
9709 BUILT_IN_PROFILE_FUNC_ENTER
,
9710 "__cyg_profile_func_enter", 0);
9711 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT
))
9712 local_define_builtin ("__cyg_profile_func_exit", ftype
,
9713 BUILT_IN_PROFILE_FUNC_EXIT
,
9714 "__cyg_profile_func_exit", 0);
9717 /* The exception object and filter values from the runtime. The argument
9718 must be zero before exception lowering, i.e. from the front end. After
9719 exception lowering, it will be the region number for the exception
9720 landing pad. These functions are PURE instead of CONST to prevent
9721 them from being hoisted past the exception edge that will initialize
9722 its value in the landing pad. */
9723 ftype
= build_function_type_list (ptr_type_node
,
9724 integer_type_node
, NULL_TREE
);
9725 ecf_flags
= ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
;
9726 /* Only use TM_PURE if we have TM language support. */
9727 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1
))
9728 ecf_flags
|= ECF_TM_PURE
;
9729 local_define_builtin ("__builtin_eh_pointer", ftype
, BUILT_IN_EH_POINTER
,
9730 "__builtin_eh_pointer", ecf_flags
);
9732 tmp
= lang_hooks
.types
.type_for_mode (targetm
.eh_return_filter_mode (), 0);
9733 ftype
= build_function_type_list (tmp
, integer_type_node
, NULL_TREE
);
9734 local_define_builtin ("__builtin_eh_filter", ftype
, BUILT_IN_EH_FILTER
,
9735 "__builtin_eh_filter", ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9737 ftype
= build_function_type_list (void_type_node
,
9738 integer_type_node
, integer_type_node
,
9740 local_define_builtin ("__builtin_eh_copy_values", ftype
,
9741 BUILT_IN_EH_COPY_VALUES
,
9742 "__builtin_eh_copy_values", ECF_NOTHROW
);
9744 /* Complex multiplication and division. These are handled as builtins
9745 rather than optabs because emit_library_call_value doesn't support
9746 complex. Further, we can do slightly better with folding these
9747 beasties if the real and complex parts of the arguments are separate. */
9751 for (mode
= MIN_MODE_COMPLEX_FLOAT
; mode
<= MAX_MODE_COMPLEX_FLOAT
; ++mode
)
9753 char mode_name_buf
[4], *q
;
9755 enum built_in_function mcode
, dcode
;
9756 tree type
, inner_type
;
9757 const char *prefix
= "__";
9759 if (targetm
.libfunc_gnu_prefix
)
9762 type
= lang_hooks
.types
.type_for_mode ((machine_mode
) mode
, 0);
9765 inner_type
= TREE_TYPE (type
);
9767 ftype
= build_function_type_list (type
, inner_type
, inner_type
,
9768 inner_type
, inner_type
, NULL_TREE
);
9770 mcode
= ((enum built_in_function
)
9771 (BUILT_IN_COMPLEX_MUL_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
9772 dcode
= ((enum built_in_function
)
9773 (BUILT_IN_COMPLEX_DIV_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
9775 for (p
= GET_MODE_NAME (mode
), q
= mode_name_buf
; *p
; p
++, q
++)
9779 /* For -ftrapping-math these should throw from a former
9780 -fnon-call-exception stmt. */
9781 built_in_names
[mcode
] = concat (prefix
, "mul", mode_name_buf
, "3",
9783 local_define_builtin (built_in_names
[mcode
], ftype
, mcode
,
9784 built_in_names
[mcode
],
9785 ECF_CONST
| ECF_LEAF
);
9787 built_in_names
[dcode
] = concat (prefix
, "div", mode_name_buf
, "3",
9789 local_define_builtin (built_in_names
[dcode
], ftype
, dcode
,
9790 built_in_names
[dcode
],
9791 ECF_CONST
| ECF_LEAF
);
9795 init_internal_fns ();
9798 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
9801 If we requested a pointer to a vector, build up the pointers that
9802 we stripped off while looking for the inner type. Similarly for
9803 return values from functions.
9805 The argument TYPE is the top of the chain, and BOTTOM is the
9806 new type which we will point to. */
9809 reconstruct_complex_type (tree type
, tree bottom
)
9813 if (TREE_CODE (type
) == POINTER_TYPE
)
9815 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
9816 outer
= build_pointer_type_for_mode (inner
, TYPE_MODE (type
),
9817 TYPE_REF_CAN_ALIAS_ALL (type
));
9819 else if (TREE_CODE (type
) == REFERENCE_TYPE
)
9821 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
9822 outer
= build_reference_type_for_mode (inner
, TYPE_MODE (type
),
9823 TYPE_REF_CAN_ALIAS_ALL (type
));
9825 else if (TREE_CODE (type
) == ARRAY_TYPE
)
9827 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
9828 outer
= build_array_type (inner
, TYPE_DOMAIN (type
));
9830 else if (TREE_CODE (type
) == FUNCTION_TYPE
)
9832 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
9833 outer
= build_function_type (inner
, TYPE_ARG_TYPES (type
));
9835 else if (TREE_CODE (type
) == METHOD_TYPE
)
9837 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
9838 /* The build_method_type_directly() routine prepends 'this' to argument list,
9839 so we must compensate by getting rid of it. */
9841 = build_method_type_directly
9842 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type
))),
9844 TREE_CHAIN (TYPE_ARG_TYPES (type
)));
9846 else if (TREE_CODE (type
) == OFFSET_TYPE
)
9848 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
9849 outer
= build_offset_type (TYPE_OFFSET_BASETYPE (type
), inner
);
9854 return build_type_attribute_qual_variant (outer
, TYPE_ATTRIBUTES (type
),
9858 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
9861 build_vector_type_for_mode (tree innertype
, machine_mode mode
)
9864 unsigned int bitsize
;
9866 switch (GET_MODE_CLASS (mode
))
9868 case MODE_VECTOR_BOOL
:
9869 case MODE_VECTOR_INT
:
9870 case MODE_VECTOR_FLOAT
:
9871 case MODE_VECTOR_FRACT
:
9872 case MODE_VECTOR_UFRACT
:
9873 case MODE_VECTOR_ACCUM
:
9874 case MODE_VECTOR_UACCUM
:
9875 nunits
= GET_MODE_NUNITS (mode
);
9879 /* Check that there are no leftover bits. */
9880 bitsize
= GET_MODE_BITSIZE (as_a
<scalar_int_mode
> (mode
));
9881 gcc_assert (bitsize
% TREE_INT_CST_LOW (TYPE_SIZE (innertype
)) == 0);
9882 nunits
= bitsize
/ TREE_INT_CST_LOW (TYPE_SIZE (innertype
));
9889 return make_vector_type (innertype
, nunits
, mode
);
9892 /* Similarly, but takes the inner type and number of units, which must be
9896 build_vector_type (tree innertype
, poly_int64 nunits
)
9898 return make_vector_type (innertype
, nunits
, VOIDmode
);
9901 /* Build a truth vector with NUNITS units, giving it mode MASK_MODE. */
9904 build_truth_vector_type_for_mode (poly_uint64 nunits
, machine_mode mask_mode
)
9906 gcc_assert (mask_mode
!= BLKmode
);
9908 unsigned HOST_WIDE_INT esize
;
9909 if (VECTOR_MODE_P (mask_mode
))
9911 poly_uint64 vsize
= GET_MODE_BITSIZE (mask_mode
);
9912 esize
= vector_element_size (vsize
, nunits
);
9917 tree bool_type
= build_nonstandard_boolean_type (esize
);
9919 return make_vector_type (bool_type
, nunits
, mask_mode
);
9922 /* Build a vector type that holds one boolean result for each element of
9923 vector type VECTYPE. The public interface for this operation is
9927 build_truth_vector_type_for (tree vectype
)
9929 machine_mode vector_mode
= TYPE_MODE (vectype
);
9930 poly_uint64 nunits
= TYPE_VECTOR_SUBPARTS (vectype
);
9932 machine_mode mask_mode
;
9933 if (VECTOR_MODE_P (vector_mode
)
9934 && targetm
.vectorize
.get_mask_mode (vector_mode
).exists (&mask_mode
))
9935 return build_truth_vector_type_for_mode (nunits
, mask_mode
);
9937 poly_uint64 vsize
= tree_to_poly_uint64 (TYPE_SIZE (vectype
));
9938 unsigned HOST_WIDE_INT esize
= vector_element_size (vsize
, nunits
);
9939 tree bool_type
= build_nonstandard_boolean_type (esize
);
9941 return make_vector_type (bool_type
, nunits
, VOIDmode
);
9944 /* Like build_vector_type, but builds a variant type with TYPE_VECTOR_OPAQUE
9948 build_opaque_vector_type (tree innertype
, poly_int64 nunits
)
9950 tree t
= make_vector_type (innertype
, nunits
, VOIDmode
);
9952 /* We always build the non-opaque variant before the opaque one,
9953 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
9954 cand
= TYPE_NEXT_VARIANT (t
);
9956 && TYPE_VECTOR_OPAQUE (cand
)
9957 && check_qualified_type (cand
, t
, TYPE_QUALS (t
)))
9959 /* Othewise build a variant type and make sure to queue it after
9960 the non-opaque type. */
9961 cand
= build_distinct_type_copy (t
);
9962 TYPE_VECTOR_OPAQUE (cand
) = true;
9963 TYPE_CANONICAL (cand
) = TYPE_CANONICAL (t
);
9964 TYPE_NEXT_VARIANT (cand
) = TYPE_NEXT_VARIANT (t
);
9965 TYPE_NEXT_VARIANT (t
) = cand
;
9966 TYPE_MAIN_VARIANT (cand
) = TYPE_MAIN_VARIANT (t
);
9970 /* Return the value of element I of VECTOR_CST T as a wide_int. */
9972 static poly_wide_int
9973 vector_cst_int_elt (const_tree t
, unsigned int i
)
9975 /* First handle elements that are directly encoded. */
9976 unsigned int encoded_nelts
= vector_cst_encoded_nelts (t
);
9977 if (i
< encoded_nelts
)
9978 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t
, i
));
9980 /* Identify the pattern that contains element I and work out the index of
9981 the last encoded element for that pattern. */
9982 unsigned int npatterns
= VECTOR_CST_NPATTERNS (t
);
9983 unsigned int pattern
= i
% npatterns
;
9984 unsigned int count
= i
/ npatterns
;
9985 unsigned int final_i
= encoded_nelts
- npatterns
+ pattern
;
9987 /* If there are no steps, the final encoded value is the right one. */
9988 if (!VECTOR_CST_STEPPED_P (t
))
9989 return wi::to_poly_wide (VECTOR_CST_ENCODED_ELT (t
, final_i
));
9991 /* Otherwise work out the value from the last two encoded elements. */
9992 tree v1
= VECTOR_CST_ENCODED_ELT (t
, final_i
- npatterns
);
9993 tree v2
= VECTOR_CST_ENCODED_ELT (t
, final_i
);
9994 poly_wide_int diff
= wi::to_poly_wide (v2
) - wi::to_poly_wide (v1
);
9995 return wi::to_poly_wide (v2
) + (count
- 2) * diff
;
9998 /* Return the value of element I of VECTOR_CST T. */
10001 vector_cst_elt (const_tree t
, unsigned int i
)
10003 /* First handle elements that are directly encoded. */
10004 unsigned int encoded_nelts
= vector_cst_encoded_nelts (t
);
10005 if (i
< encoded_nelts
)
10006 return VECTOR_CST_ENCODED_ELT (t
, i
);
10008 /* If there are no steps, the final encoded value is the right one. */
10009 if (!VECTOR_CST_STEPPED_P (t
))
10011 /* Identify the pattern that contains element I and work out the index of
10012 the last encoded element for that pattern. */
10013 unsigned int npatterns
= VECTOR_CST_NPATTERNS (t
);
10014 unsigned int pattern
= i
% npatterns
;
10015 unsigned int final_i
= encoded_nelts
- npatterns
+ pattern
;
10016 return VECTOR_CST_ENCODED_ELT (t
, final_i
);
10019 /* Otherwise work out the value from the last two encoded elements. */
10020 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t
)),
10021 vector_cst_int_elt (t
, i
));
10024 /* Given an initializer INIT, return TRUE if INIT is zero or some
10025 aggregate of zeros. Otherwise return FALSE. If NONZERO is not
10026 null, set *NONZERO if and only if INIT is known not to be all
10027 zeros. The combination of return value of false and *NONZERO
10028 false implies that INIT may but need not be all zeros. Other
10029 combinations indicate definitive answers. */
10032 initializer_zerop (const_tree init
, bool *nonzero
/* = NULL */)
10038 /* Conservatively clear NONZERO and set it only if INIT is definitely
10044 unsigned HOST_WIDE_INT off
= 0;
10046 switch (TREE_CODE (init
))
10049 if (integer_zerop (init
))
10056 /* ??? Note that this is not correct for C4X float formats. There,
10057 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10058 negative exponent. */
10059 if (real_zerop (init
)
10060 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init
)))
10067 if (fixed_zerop (init
))
10074 if (integer_zerop (init
)
10075 || (real_zerop (init
)
10076 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init
)))
10077 && !REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init
)))))
10084 if (VECTOR_CST_NPATTERNS (init
) == 1
10085 && VECTOR_CST_DUPLICATE_P (init
)
10086 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init
, 0)))
10094 if (TREE_CLOBBER_P (init
))
10097 unsigned HOST_WIDE_INT idx
;
10100 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init
), idx
, elt
)
10101 if (!initializer_zerop (elt
, nonzero
))
10109 tree arg
= TREE_OPERAND (init
, 0);
10110 if (TREE_CODE (arg
) != ADDR_EXPR
)
10112 tree offset
= TREE_OPERAND (init
, 1);
10113 if (TREE_CODE (offset
) != INTEGER_CST
10114 || !tree_fits_uhwi_p (offset
))
10116 off
= tree_to_uhwi (offset
);
10119 arg
= TREE_OPERAND (arg
, 0);
10120 if (TREE_CODE (arg
) != STRING_CST
)
10124 /* Fall through. */
10128 gcc_assert (off
<= INT_MAX
);
10131 int n
= TREE_STRING_LENGTH (init
);
10135 /* We need to loop through all elements to handle cases like
10136 "\0" and "\0foobar". */
10137 for (i
= 0; i
< n
; ++i
)
10138 if (TREE_STRING_POINTER (init
)[i
] != '\0')
10152 /* Return true if EXPR is an initializer expression in which every element
10153 is a constant that is numerically equal to 0 or 1. The elements do not
10154 need to be equal to each other. */
10157 initializer_each_zero_or_onep (const_tree expr
)
10159 STRIP_ANY_LOCATION_WRAPPER (expr
);
10161 switch (TREE_CODE (expr
))
10164 return integer_zerop (expr
) || integer_onep (expr
);
10167 return real_zerop (expr
) || real_onep (expr
);
10171 unsigned HOST_WIDE_INT nelts
= vector_cst_encoded_nelts (expr
);
10172 if (VECTOR_CST_STEPPED_P (expr
)
10173 && !TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr
)).is_constant (&nelts
))
10176 for (unsigned int i
= 0; i
< nelts
; ++i
)
10178 tree elt
= vector_cst_elt (expr
, i
);
10179 if (!initializer_each_zero_or_onep (elt
))
10191 /* Check if vector VEC consists of all the equal elements and
10192 that the number of elements corresponds to the type of VEC.
10193 The function returns first element of the vector
10194 or NULL_TREE if the vector is not uniform. */
10196 uniform_vector_p (const_tree vec
)
10199 unsigned HOST_WIDE_INT i
, nelts
;
10201 if (vec
== NULL_TREE
)
10204 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec
)));
10206 if (TREE_CODE (vec
) == VEC_DUPLICATE_EXPR
)
10207 return TREE_OPERAND (vec
, 0);
10209 else if (TREE_CODE (vec
) == VECTOR_CST
)
10211 if (VECTOR_CST_NPATTERNS (vec
) == 1 && VECTOR_CST_DUPLICATE_P (vec
))
10212 return VECTOR_CST_ENCODED_ELT (vec
, 0);
10216 else if (TREE_CODE (vec
) == CONSTRUCTOR
10217 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec
)).is_constant (&nelts
))
10219 first
= error_mark_node
;
10221 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec
), i
, t
)
10228 if (!operand_equal_p (first
, t
, 0))
10240 /* If the argument is INTEGER_CST, return it. If the argument is vector
10241 with all elements the same INTEGER_CST, return that INTEGER_CST. Otherwise
10243 Look through location wrappers. */
10246 uniform_integer_cst_p (tree t
)
10248 STRIP_ANY_LOCATION_WRAPPER (t
);
10250 if (TREE_CODE (t
) == INTEGER_CST
)
10253 if (VECTOR_TYPE_P (TREE_TYPE (t
)))
10255 t
= uniform_vector_p (t
);
10256 if (t
&& TREE_CODE (t
) == INTEGER_CST
)
10263 /* If VECTOR_CST T has a single nonzero element, return the index of that
10264 element, otherwise return -1. */
10267 single_nonzero_element (const_tree t
)
10269 unsigned HOST_WIDE_INT nelts
;
10270 unsigned int repeat_nelts
;
10271 if (VECTOR_CST_NELTS (t
).is_constant (&nelts
))
10272 repeat_nelts
= nelts
;
10273 else if (VECTOR_CST_NELTS_PER_PATTERN (t
) == 2)
10275 nelts
= vector_cst_encoded_nelts (t
);
10276 repeat_nelts
= VECTOR_CST_NPATTERNS (t
);
10282 for (unsigned int i
= 0; i
< nelts
; ++i
)
10284 tree elt
= vector_cst_elt (t
, i
);
10285 if (!integer_zerop (elt
) && !real_zerop (elt
))
10287 if (res
>= 0 || i
>= repeat_nelts
)
10295 /* Build an empty statement at location LOC. */
10298 build_empty_stmt (location_t loc
)
10300 tree t
= build1 (NOP_EXPR
, void_type_node
, size_zero_node
);
10301 SET_EXPR_LOCATION (t
, loc
);
10306 /* Build an OMP clause with code CODE. LOC is the location of the
10310 build_omp_clause (location_t loc
, enum omp_clause_code code
)
10315 length
= omp_clause_num_ops
[code
];
10316 size
= (sizeof (struct tree_omp_clause
) + (length
- 1) * sizeof (tree
));
10318 record_node_allocation_statistics (OMP_CLAUSE
, size
);
10320 t
= (tree
) ggc_internal_alloc (size
);
10321 memset (t
, 0, size
);
10322 TREE_SET_CODE (t
, OMP_CLAUSE
);
10323 OMP_CLAUSE_SET_CODE (t
, code
);
10324 OMP_CLAUSE_LOCATION (t
) = loc
;
10329 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10330 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10331 Except for the CODE and operand count field, other storage for the
10332 object is initialized to zeros. */
10335 build_vl_exp (enum tree_code code
, int len MEM_STAT_DECL
)
10338 int length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_exp
);
10340 gcc_assert (TREE_CODE_CLASS (code
) == tcc_vl_exp
);
10341 gcc_assert (len
>= 1);
10343 record_node_allocation_statistics (code
, length
);
10345 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
10347 TREE_SET_CODE (t
, code
);
10349 /* Can't use TREE_OPERAND to store the length because if checking is
10350 enabled, it will try to check the length before we store it. :-P */
10351 t
->exp
.operands
[0] = build_int_cst (sizetype
, len
);
10356 /* Helper function for build_call_* functions; build a CALL_EXPR with
10357 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10358 the argument slots. */
10361 build_call_1 (tree return_type
, tree fn
, int nargs
)
10365 t
= build_vl_exp (CALL_EXPR
, nargs
+ 3);
10366 TREE_TYPE (t
) = return_type
;
10367 CALL_EXPR_FN (t
) = fn
;
10368 CALL_EXPR_STATIC_CHAIN (t
) = NULL
;
10373 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10374 FN and a null static chain slot. NARGS is the number of call arguments
10375 which are specified as "..." arguments. */
10378 build_call_nary (tree return_type
, tree fn
, int nargs
, ...)
10382 va_start (args
, nargs
);
10383 ret
= build_call_valist (return_type
, fn
, nargs
, args
);
10388 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10389 FN and a null static chain slot. NARGS is the number of call arguments
10390 which are specified as a va_list ARGS. */
10393 build_call_valist (tree return_type
, tree fn
, int nargs
, va_list args
)
10398 t
= build_call_1 (return_type
, fn
, nargs
);
10399 for (i
= 0; i
< nargs
; i
++)
10400 CALL_EXPR_ARG (t
, i
) = va_arg (args
, tree
);
10401 process_call_operands (t
);
10405 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10406 FN and a null static chain slot. NARGS is the number of call arguments
10407 which are specified as a tree array ARGS. */
10410 build_call_array_loc (location_t loc
, tree return_type
, tree fn
,
10411 int nargs
, const tree
*args
)
10416 t
= build_call_1 (return_type
, fn
, nargs
);
10417 for (i
= 0; i
< nargs
; i
++)
10418 CALL_EXPR_ARG (t
, i
) = args
[i
];
10419 process_call_operands (t
);
10420 SET_EXPR_LOCATION (t
, loc
);
10424 /* Like build_call_array, but takes a vec. */
10427 build_call_vec (tree return_type
, tree fn
, vec
<tree
, va_gc
> *args
)
10432 ret
= build_call_1 (return_type
, fn
, vec_safe_length (args
));
10433 FOR_EACH_VEC_SAFE_ELT (args
, ix
, t
)
10434 CALL_EXPR_ARG (ret
, ix
) = t
;
10435 process_call_operands (ret
);
10439 /* Conveniently construct a function call expression. FNDECL names the
10440 function to be called and N arguments are passed in the array
10444 build_call_expr_loc_array (location_t loc
, tree fndecl
, int n
, tree
*argarray
)
10446 tree fntype
= TREE_TYPE (fndecl
);
10447 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10449 return fold_build_call_array_loc (loc
, TREE_TYPE (fntype
), fn
, n
, argarray
);
10452 /* Conveniently construct a function call expression. FNDECL names the
10453 function to be called and the arguments are passed in the vector
10457 build_call_expr_loc_vec (location_t loc
, tree fndecl
, vec
<tree
, va_gc
> *vec
)
10459 return build_call_expr_loc_array (loc
, fndecl
, vec_safe_length (vec
),
10460 vec_safe_address (vec
));
10464 /* Conveniently construct a function call expression. FNDECL names the
10465 function to be called, N is the number of arguments, and the "..."
10466 parameters are the argument expressions. */
10469 build_call_expr_loc (location_t loc
, tree fndecl
, int n
, ...)
10472 tree
*argarray
= XALLOCAVEC (tree
, n
);
10476 for (i
= 0; i
< n
; i
++)
10477 argarray
[i
] = va_arg (ap
, tree
);
10479 return build_call_expr_loc_array (loc
, fndecl
, n
, argarray
);
10482 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10483 varargs macros aren't supported by all bootstrap compilers. */
10486 build_call_expr (tree fndecl
, int n
, ...)
10489 tree
*argarray
= XALLOCAVEC (tree
, n
);
10493 for (i
= 0; i
< n
; i
++)
10494 argarray
[i
] = va_arg (ap
, tree
);
10496 return build_call_expr_loc_array (UNKNOWN_LOCATION
, fndecl
, n
, argarray
);
10499 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10500 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10501 It will get gimplified later into an ordinary internal function. */
10504 build_call_expr_internal_loc_array (location_t loc
, internal_fn ifn
,
10505 tree type
, int n
, const tree
*args
)
10507 tree t
= build_call_1 (type
, NULL_TREE
, n
);
10508 for (int i
= 0; i
< n
; ++i
)
10509 CALL_EXPR_ARG (t
, i
) = args
[i
];
10510 SET_EXPR_LOCATION (t
, loc
);
10511 CALL_EXPR_IFN (t
) = ifn
;
10512 process_call_operands (t
);
10516 /* Build internal call expression. This is just like CALL_EXPR, except
10517 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10518 internal function. */
10521 build_call_expr_internal_loc (location_t loc
, enum internal_fn ifn
,
10522 tree type
, int n
, ...)
10525 tree
*argarray
= XALLOCAVEC (tree
, n
);
10529 for (i
= 0; i
< n
; i
++)
10530 argarray
[i
] = va_arg (ap
, tree
);
10532 return build_call_expr_internal_loc_array (loc
, ifn
, type
, n
, argarray
);
10535 /* Return a function call to FN, if the target is guaranteed to support it,
10538 N is the number of arguments, passed in the "...", and TYPE is the
10539 type of the return value. */
10542 maybe_build_call_expr_loc (location_t loc
, combined_fn fn
, tree type
,
10546 tree
*argarray
= XALLOCAVEC (tree
, n
);
10550 for (i
= 0; i
< n
; i
++)
10551 argarray
[i
] = va_arg (ap
, tree
);
10553 if (internal_fn_p (fn
))
10555 internal_fn ifn
= as_internal_fn (fn
);
10556 if (direct_internal_fn_p (ifn
))
10558 tree_pair types
= direct_internal_fn_types (ifn
, type
, argarray
);
10559 if (!direct_internal_fn_supported_p (ifn
, types
,
10560 OPTIMIZE_FOR_BOTH
))
10563 return build_call_expr_internal_loc_array (loc
, ifn
, type
, n
, argarray
);
10567 tree fndecl
= builtin_decl_implicit (as_builtin_fn (fn
));
10570 return build_call_expr_loc_array (loc
, fndecl
, n
, argarray
);
10574 /* Return a function call to the appropriate builtin alloca variant.
10576 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
10577 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
10578 bound for SIZE in case it is not a fixed value. */
10581 build_alloca_call_expr (tree size
, unsigned int align
, HOST_WIDE_INT max_size
)
10585 tree t
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
);
10587 build_call_expr (t
, 3, size
, size_int (align
), size_int (max_size
));
10589 else if (align
> 0)
10591 tree t
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
10592 return build_call_expr (t
, 2, size
, size_int (align
));
10596 tree t
= builtin_decl_explicit (BUILT_IN_ALLOCA
);
10597 return build_call_expr (t
, 1, size
);
10601 /* Create a new constant string literal of type ELTYPE[SIZE] (or LEN
10602 if SIZE == -1) and return a tree node representing char* pointer to
10603 it as an ADDR_EXPR (ARRAY_REF (ELTYPE, ...)). When STR is nonnull
10604 the STRING_CST value is the LEN bytes at STR (the representation
10605 of the string, which may be wide). Otherwise it's all zeros. */
10608 build_string_literal (unsigned len
, const char *str
/* = NULL */,
10609 tree eltype
/* = char_type_node */,
10610 unsigned HOST_WIDE_INT size
/* = -1 */)
10612 tree t
= build_string (len
, str
);
10613 /* Set the maximum valid index based on the string length or SIZE. */
10614 unsigned HOST_WIDE_INT maxidx
10615 = (size
== HOST_WIDE_INT_M1U
? len
: size
) - 1;
10617 tree index
= build_index_type (size_int (maxidx
));
10618 eltype
= build_type_variant (eltype
, 1, 0);
10619 tree type
= build_array_type (eltype
, index
);
10620 TREE_TYPE (t
) = type
;
10621 TREE_CONSTANT (t
) = 1;
10622 TREE_READONLY (t
) = 1;
10623 TREE_STATIC (t
) = 1;
10625 type
= build_pointer_type (eltype
);
10626 t
= build1 (ADDR_EXPR
, type
,
10627 build4 (ARRAY_REF
, eltype
,
10628 t
, integer_zero_node
, NULL_TREE
, NULL_TREE
));
10634 /* Return true if T (assumed to be a DECL) must be assigned a memory
10638 needs_to_live_in_memory (const_tree t
)
10640 return (TREE_ADDRESSABLE (t
)
10641 || is_global_var (t
)
10642 || (TREE_CODE (t
) == RESULT_DECL
10643 && !DECL_BY_REFERENCE (t
)
10644 && aggregate_value_p (t
, current_function_decl
)));
10647 /* Return value of a constant X and sign-extend it. */
10650 int_cst_value (const_tree x
)
10652 unsigned bits
= TYPE_PRECISION (TREE_TYPE (x
));
10653 unsigned HOST_WIDE_INT val
= TREE_INT_CST_LOW (x
);
10655 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10656 gcc_assert (cst_and_fits_in_hwi (x
));
10658 if (bits
< HOST_BITS_PER_WIDE_INT
)
10660 bool negative
= ((val
>> (bits
- 1)) & 1) != 0;
10662 val
|= HOST_WIDE_INT_M1U
<< (bits
- 1) << 1;
10664 val
&= ~(HOST_WIDE_INT_M1U
<< (bits
- 1) << 1);
10670 /* If TYPE is an integral or pointer type, return an integer type with
10671 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10672 if TYPE is already an integer type of signedness UNSIGNEDP.
10673 If TYPE is a floating-point type, return an integer type with the same
10674 bitsize and with the signedness given by UNSIGNEDP; this is useful
10675 when doing bit-level operations on a floating-point value. */
10678 signed_or_unsigned_type_for (int unsignedp
, tree type
)
10680 if (ANY_INTEGRAL_TYPE_P (type
) && TYPE_UNSIGNED (type
) == unsignedp
)
10683 if (TREE_CODE (type
) == VECTOR_TYPE
)
10685 tree inner
= TREE_TYPE (type
);
10686 tree inner2
= signed_or_unsigned_type_for (unsignedp
, inner
);
10689 if (inner
== inner2
)
10691 return build_vector_type (inner2
, TYPE_VECTOR_SUBPARTS (type
));
10694 if (TREE_CODE (type
) == COMPLEX_TYPE
)
10696 tree inner
= TREE_TYPE (type
);
10697 tree inner2
= signed_or_unsigned_type_for (unsignedp
, inner
);
10700 if (inner
== inner2
)
10702 return build_complex_type (inner2
);
10706 if (INTEGRAL_TYPE_P (type
)
10707 || POINTER_TYPE_P (type
)
10708 || TREE_CODE (type
) == OFFSET_TYPE
)
10709 bits
= TYPE_PRECISION (type
);
10710 else if (TREE_CODE (type
) == REAL_TYPE
)
10711 bits
= GET_MODE_BITSIZE (SCALAR_TYPE_MODE (type
));
10715 return build_nonstandard_integer_type (bits
, unsignedp
);
10718 /* If TYPE is an integral or pointer type, return an integer type with
10719 the same precision which is unsigned, or itself if TYPE is already an
10720 unsigned integer type. If TYPE is a floating-point type, return an
10721 unsigned integer type with the same bitsize as TYPE. */
10724 unsigned_type_for (tree type
)
10726 return signed_or_unsigned_type_for (1, type
);
10729 /* If TYPE is an integral or pointer type, return an integer type with
10730 the same precision which is signed, or itself if TYPE is already a
10731 signed integer type. If TYPE is a floating-point type, return a
10732 signed integer type with the same bitsize as TYPE. */
10735 signed_type_for (tree type
)
10737 return signed_or_unsigned_type_for (0, type
);
10740 /* - For VECTOR_TYPEs:
10741 - The truth type must be a VECTOR_BOOLEAN_TYPE.
10742 - The number of elements must match (known_eq).
10743 - targetm.vectorize.get_mask_mode exists, and exactly
10744 the same mode as the truth type.
10745 - Otherwise, the truth type must be a BOOLEAN_TYPE
10746 or useless_type_conversion_p to BOOLEAN_TYPE. */
10748 is_truth_type_for (tree type
, tree truth_type
)
10750 machine_mode mask_mode
= TYPE_MODE (truth_type
);
10751 machine_mode vmode
= TYPE_MODE (type
);
10752 machine_mode tmask_mode
;
10754 if (TREE_CODE (type
) == VECTOR_TYPE
)
10756 if (VECTOR_BOOLEAN_TYPE_P (truth_type
)
10757 && known_eq (TYPE_VECTOR_SUBPARTS (type
),
10758 TYPE_VECTOR_SUBPARTS (truth_type
))
10759 && targetm
.vectorize
.get_mask_mode (vmode
).exists (&tmask_mode
)
10760 && tmask_mode
== mask_mode
)
10766 return useless_type_conversion_p (boolean_type_node
, truth_type
);
10769 /* If TYPE is a vector type, return a signed integer vector type with the
10770 same width and number of subparts. Otherwise return boolean_type_node. */
10773 truth_type_for (tree type
)
10775 if (TREE_CODE (type
) == VECTOR_TYPE
)
10777 if (VECTOR_BOOLEAN_TYPE_P (type
))
10779 return build_truth_vector_type_for (type
);
10782 return boolean_type_node
;
10785 /* Returns the largest value obtainable by casting something in INNER type to
10789 upper_bound_in_type (tree outer
, tree inner
)
10791 unsigned int det
= 0;
10792 unsigned oprec
= TYPE_PRECISION (outer
);
10793 unsigned iprec
= TYPE_PRECISION (inner
);
10796 /* Compute a unique number for every combination. */
10797 det
|= (oprec
> iprec
) ? 4 : 0;
10798 det
|= TYPE_UNSIGNED (outer
) ? 2 : 0;
10799 det
|= TYPE_UNSIGNED (inner
) ? 1 : 0;
10801 /* Determine the exponent to use. */
10806 /* oprec <= iprec, outer: signed, inner: don't care. */
10811 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10815 /* oprec > iprec, outer: signed, inner: signed. */
10819 /* oprec > iprec, outer: signed, inner: unsigned. */
10823 /* oprec > iprec, outer: unsigned, inner: signed. */
10827 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10831 gcc_unreachable ();
10834 return wide_int_to_tree (outer
,
10835 wi::mask (prec
, false, TYPE_PRECISION (outer
)));
10838 /* Returns the smallest value obtainable by casting something in INNER type to
10842 lower_bound_in_type (tree outer
, tree inner
)
10844 unsigned oprec
= TYPE_PRECISION (outer
);
10845 unsigned iprec
= TYPE_PRECISION (inner
);
10847 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10849 if (TYPE_UNSIGNED (outer
)
10850 /* If we are widening something of an unsigned type, OUTER type
10851 contains all values of INNER type. In particular, both INNER
10852 and OUTER types have zero in common. */
10853 || (oprec
> iprec
&& TYPE_UNSIGNED (inner
)))
10854 return build_int_cst (outer
, 0);
10857 /* If we are widening a signed type to another signed type, we
10858 want to obtain -2^^(iprec-1). If we are keeping the
10859 precision or narrowing to a signed type, we want to obtain
10861 unsigned prec
= oprec
> iprec
? iprec
: oprec
;
10862 return wide_int_to_tree (outer
,
10863 wi::mask (prec
- 1, true,
10864 TYPE_PRECISION (outer
)));
10868 /* Return nonzero if two operands that are suitable for PHI nodes are
10869 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10870 SSA_NAME or invariant. Note that this is strictly an optimization.
10871 That is, callers of this function can directly call operand_equal_p
10872 and get the same result, only slower. */
10875 operand_equal_for_phi_arg_p (const_tree arg0
, const_tree arg1
)
10879 if (TREE_CODE (arg0
) == SSA_NAME
|| TREE_CODE (arg1
) == SSA_NAME
)
10881 return operand_equal_p (arg0
, arg1
, 0);
10884 /* Returns number of zeros at the end of binary representation of X. */
10887 num_ending_zeros (const_tree x
)
10889 return build_int_cst (TREE_TYPE (x
), wi::ctz (wi::to_wide (x
)));
10893 #define WALK_SUBTREE(NODE) \
10896 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10902 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10903 be walked whenever a type is seen in the tree. Rest of operands and return
10904 value are as for walk_tree. */
10907 walk_type_fields (tree type
, walk_tree_fn func
, void *data
,
10908 hash_set
<tree
> *pset
, walk_tree_lh lh
)
10910 tree result
= NULL_TREE
;
10912 switch (TREE_CODE (type
))
10915 case REFERENCE_TYPE
:
10917 /* We have to worry about mutually recursive pointers. These can't
10918 be written in C. They can in Ada. It's pathological, but
10919 there's an ACATS test (c38102a) that checks it. Deal with this
10920 by checking if we're pointing to another pointer, that one
10921 points to another pointer, that one does too, and we have no htab.
10922 If so, get a hash table. We check three levels deep to avoid
10923 the cost of the hash table if we don't need one. */
10924 if (POINTER_TYPE_P (TREE_TYPE (type
))
10925 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type
)))
10926 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type
))))
10929 result
= walk_tree_without_duplicates (&TREE_TYPE (type
),
10940 WALK_SUBTREE (TREE_TYPE (type
));
10944 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type
));
10946 /* Fall through. */
10948 case FUNCTION_TYPE
:
10949 WALK_SUBTREE (TREE_TYPE (type
));
10953 /* We never want to walk into default arguments. */
10954 for (arg
= TYPE_ARG_TYPES (type
); arg
; arg
= TREE_CHAIN (arg
))
10955 WALK_SUBTREE (TREE_VALUE (arg
));
10960 /* Don't follow this nodes's type if a pointer for fear that
10961 we'll have infinite recursion. If we have a PSET, then we
10964 || (!POINTER_TYPE_P (TREE_TYPE (type
))
10965 && TREE_CODE (TREE_TYPE (type
)) != OFFSET_TYPE
))
10966 WALK_SUBTREE (TREE_TYPE (type
));
10967 WALK_SUBTREE (TYPE_DOMAIN (type
));
10971 WALK_SUBTREE (TREE_TYPE (type
));
10972 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type
));
10982 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10983 called with the DATA and the address of each sub-tree. If FUNC returns a
10984 non-NULL value, the traversal is stopped, and the value returned by FUNC
10985 is returned. If PSET is non-NULL it is used to record the nodes visited,
10986 and to avoid visiting a node more than once. */
10989 walk_tree_1 (tree
*tp
, walk_tree_fn func
, void *data
,
10990 hash_set
<tree
> *pset
, walk_tree_lh lh
)
10992 enum tree_code code
;
10996 #define WALK_SUBTREE_TAIL(NODE) \
11000 goto tail_recurse; \
11005 /* Skip empty subtrees. */
11009 /* Don't walk the same tree twice, if the user has requested
11010 that we avoid doing so. */
11011 if (pset
&& pset
->add (*tp
))
11014 /* Call the function. */
11016 result
= (*func
) (tp
, &walk_subtrees
, data
);
11018 /* If we found something, return it. */
11022 code
= TREE_CODE (*tp
);
11024 /* Even if we didn't, FUNC may have decided that there was nothing
11025 interesting below this point in the tree. */
11026 if (!walk_subtrees
)
11028 /* But we still need to check our siblings. */
11029 if (code
== TREE_LIST
)
11030 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp
));
11031 else if (code
== OMP_CLAUSE
)
11032 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11039 result
= (*lh
) (tp
, &walk_subtrees
, func
, data
, pset
);
11040 if (result
|| !walk_subtrees
)
11047 case IDENTIFIER_NODE
:
11053 case PLACEHOLDER_EXPR
:
11057 /* None of these have subtrees other than those already walked
11062 WALK_SUBTREE (TREE_VALUE (*tp
));
11063 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp
));
11068 int len
= TREE_VEC_LENGTH (*tp
);
11073 /* Walk all elements but the first. */
11075 WALK_SUBTREE (TREE_VEC_ELT (*tp
, len
));
11077 /* Now walk the first one as a tail call. */
11078 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp
, 0));
11083 unsigned len
= vector_cst_encoded_nelts (*tp
);
11086 /* Walk all elements but the first. */
11088 WALK_SUBTREE (VECTOR_CST_ENCODED_ELT (*tp
, len
));
11089 /* Now walk the first one as a tail call. */
11090 WALK_SUBTREE_TAIL (VECTOR_CST_ENCODED_ELT (*tp
, 0));
11094 WALK_SUBTREE (TREE_REALPART (*tp
));
11095 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp
));
11099 unsigned HOST_WIDE_INT idx
;
11100 constructor_elt
*ce
;
11102 for (idx
= 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp
), idx
, &ce
);
11104 WALK_SUBTREE (ce
->value
);
11109 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, 0));
11114 for (decl
= BIND_EXPR_VARS (*tp
); decl
; decl
= DECL_CHAIN (decl
))
11116 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11117 into declarations that are just mentioned, rather than
11118 declared; they don't really belong to this part of the tree.
11119 And, we can see cycles: the initializer for a declaration
11120 can refer to the declaration itself. */
11121 WALK_SUBTREE (DECL_INITIAL (decl
));
11122 WALK_SUBTREE (DECL_SIZE (decl
));
11123 WALK_SUBTREE (DECL_SIZE_UNIT (decl
));
11125 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp
));
11128 case STATEMENT_LIST
:
11130 tree_stmt_iterator i
;
11131 for (i
= tsi_start (*tp
); !tsi_end_p (i
); tsi_next (&i
))
11132 WALK_SUBTREE (*tsi_stmt_ptr (i
));
11138 int len
= omp_clause_num_ops
[OMP_CLAUSE_CODE (*tp
)];
11139 for (int i
= 0; i
< len
; i
++)
11140 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, i
));
11141 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11149 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11150 But, we only want to walk once. */
11151 len
= (TREE_OPERAND (*tp
, 3) == TREE_OPERAND (*tp
, 1)) ? 2 : 3;
11152 for (i
= 0; i
< len
; ++i
)
11153 WALK_SUBTREE (TREE_OPERAND (*tp
, i
));
11154 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, len
));
11158 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11159 defining. We only want to walk into these fields of a type in this
11160 case and not in the general case of a mere reference to the type.
11162 The criterion is as follows: if the field can be an expression, it
11163 must be walked only here. This should be in keeping with the fields
11164 that are directly gimplified in gimplify_type_sizes in order for the
11165 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11166 variable-sized types.
11168 Note that DECLs get walked as part of processing the BIND_EXPR. */
11169 if (TREE_CODE (DECL_EXPR_DECL (*tp
)) == TYPE_DECL
)
11171 /* Call the function for the decl so e.g. copy_tree_body_r can
11172 replace it with the remapped one. */
11173 result
= (*func
) (&DECL_EXPR_DECL (*tp
), &walk_subtrees
, data
);
11174 if (result
|| !walk_subtrees
)
11177 tree
*type_p
= &TREE_TYPE (DECL_EXPR_DECL (*tp
));
11178 if (TREE_CODE (*type_p
) == ERROR_MARK
)
11181 /* Call the function for the type. See if it returns anything or
11182 doesn't want us to continue. If we are to continue, walk both
11183 the normal fields and those for the declaration case. */
11184 result
= (*func
) (type_p
, &walk_subtrees
, data
);
11185 if (result
|| !walk_subtrees
)
11188 /* But do not walk a pointed-to type since it may itself need to
11189 be walked in the declaration case if it isn't anonymous. */
11190 if (!POINTER_TYPE_P (*type_p
))
11192 result
= walk_type_fields (*type_p
, func
, data
, pset
, lh
);
11197 /* If this is a record type, also walk the fields. */
11198 if (RECORD_OR_UNION_TYPE_P (*type_p
))
11202 for (field
= TYPE_FIELDS (*type_p
); field
;
11203 field
= DECL_CHAIN (field
))
11205 /* We'd like to look at the type of the field, but we can
11206 easily get infinite recursion. So assume it's pointed
11207 to elsewhere in the tree. Also, ignore things that
11209 if (TREE_CODE (field
) != FIELD_DECL
)
11212 WALK_SUBTREE (DECL_FIELD_OFFSET (field
));
11213 WALK_SUBTREE (DECL_SIZE (field
));
11214 WALK_SUBTREE (DECL_SIZE_UNIT (field
));
11215 if (TREE_CODE (*type_p
) == QUAL_UNION_TYPE
)
11216 WALK_SUBTREE (DECL_QUALIFIER (field
));
11220 /* Same for scalar types. */
11221 else if (TREE_CODE (*type_p
) == BOOLEAN_TYPE
11222 || TREE_CODE (*type_p
) == ENUMERAL_TYPE
11223 || TREE_CODE (*type_p
) == INTEGER_TYPE
11224 || TREE_CODE (*type_p
) == FIXED_POINT_TYPE
11225 || TREE_CODE (*type_p
) == REAL_TYPE
)
11227 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p
));
11228 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p
));
11231 WALK_SUBTREE (TYPE_SIZE (*type_p
));
11232 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p
));
11237 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
11241 /* Walk over all the sub-trees of this operand. */
11242 len
= TREE_OPERAND_LENGTH (*tp
);
11244 /* Go through the subtrees. We need to do this in forward order so
11245 that the scope of a FOR_EXPR is handled properly. */
11248 for (i
= 0; i
< len
- 1; ++i
)
11249 WALK_SUBTREE (TREE_OPERAND (*tp
, i
));
11250 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, len
- 1));
11253 /* If this is a type, walk the needed fields in the type. */
11254 else if (TYPE_P (*tp
))
11255 return walk_type_fields (*tp
, func
, data
, pset
, lh
);
11259 /* We didn't find what we were looking for. */
11262 #undef WALK_SUBTREE_TAIL
11264 #undef WALK_SUBTREE
11266 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11269 walk_tree_without_duplicates_1 (tree
*tp
, walk_tree_fn func
, void *data
,
11274 hash_set
<tree
> pset
;
11275 result
= walk_tree_1 (tp
, func
, data
, &pset
, lh
);
11281 tree_block (tree t
)
11283 const enum tree_code_class c
= TREE_CODE_CLASS (TREE_CODE (t
));
11285 if (IS_EXPR_CODE_CLASS (c
))
11286 return LOCATION_BLOCK (t
->exp
.locus
);
11287 gcc_unreachable ();
11292 tree_set_block (tree t
, tree b
)
11294 const enum tree_code_class c
= TREE_CODE_CLASS (TREE_CODE (t
));
11296 if (IS_EXPR_CODE_CLASS (c
))
11298 t
->exp
.locus
= set_block (t
->exp
.locus
, b
);
11301 gcc_unreachable ();
11304 /* Create a nameless artificial label and put it in the current
11305 function context. The label has a location of LOC. Returns the
11306 newly created label. */
11309 create_artificial_label (location_t loc
)
11311 tree lab
= build_decl (loc
,
11312 LABEL_DECL
, NULL_TREE
, void_type_node
);
11314 DECL_ARTIFICIAL (lab
) = 1;
11315 DECL_IGNORED_P (lab
) = 1;
11316 DECL_CONTEXT (lab
) = current_function_decl
;
11320 /* Given a tree, try to return a useful variable name that we can use
11321 to prefix a temporary that is being assigned the value of the tree.
11322 I.E. given <temp> = &A, return A. */
11327 tree stripped_decl
;
11330 STRIP_NOPS (stripped_decl
);
11331 if (DECL_P (stripped_decl
) && DECL_NAME (stripped_decl
))
11332 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl
));
11333 else if (TREE_CODE (stripped_decl
) == SSA_NAME
)
11335 tree name
= SSA_NAME_IDENTIFIER (stripped_decl
);
11338 return IDENTIFIER_POINTER (name
);
11342 switch (TREE_CODE (stripped_decl
))
11345 return get_name (TREE_OPERAND (stripped_decl
, 0));
11352 /* Return true if TYPE has a variable argument list. */
11355 stdarg_p (const_tree fntype
)
11357 function_args_iterator args_iter
;
11358 tree n
= NULL_TREE
, t
;
11363 FOREACH_FUNCTION_ARGS (fntype
, t
, args_iter
)
11368 return n
!= NULL_TREE
&& n
!= void_type_node
;
11371 /* Return true if TYPE has a prototype. */
11374 prototype_p (const_tree fntype
)
11378 gcc_assert (fntype
!= NULL_TREE
);
11380 t
= TYPE_ARG_TYPES (fntype
);
11381 return (t
!= NULL_TREE
);
11384 /* If BLOCK is inlined from an __attribute__((__artificial__))
11385 routine, return pointer to location from where it has been
11388 block_nonartificial_location (tree block
)
11390 location_t
*ret
= NULL
;
11392 while (block
&& TREE_CODE (block
) == BLOCK
11393 && BLOCK_ABSTRACT_ORIGIN (block
))
11395 tree ao
= BLOCK_ABSTRACT_ORIGIN (block
);
11396 if (TREE_CODE (ao
) == FUNCTION_DECL
)
11398 /* If AO is an artificial inline, point RET to the
11399 call site locus at which it has been inlined and continue
11400 the loop, in case AO's caller is also an artificial
11402 if (DECL_DECLARED_INLINE_P (ao
)
11403 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao
)))
11404 ret
= &BLOCK_SOURCE_LOCATION (block
);
11408 else if (TREE_CODE (ao
) != BLOCK
)
11411 block
= BLOCK_SUPERCONTEXT (block
);
11417 /* If EXP is inlined from an __attribute__((__artificial__))
11418 function, return the location of the original call expression. */
11421 tree_nonartificial_location (tree exp
)
11423 location_t
*loc
= block_nonartificial_location (TREE_BLOCK (exp
));
11428 return EXPR_LOCATION (exp
);
11431 /* Return the location into which EXP has been inlined. Analogous
11432 to tree_nonartificial_location() above but not limited to artificial
11433 functions declared inline. If SYSTEM_HEADER is true, return
11434 the macro expansion point of the location if it's in a system header */
11437 tree_inlined_location (tree exp
, bool system_header
/* = true */)
11439 location_t loc
= UNKNOWN_LOCATION
;
11441 tree block
= TREE_BLOCK (exp
);
11443 while (block
&& TREE_CODE (block
) == BLOCK
11444 && BLOCK_ABSTRACT_ORIGIN (block
))
11446 tree ao
= BLOCK_ABSTRACT_ORIGIN (block
);
11447 if (TREE_CODE (ao
) == FUNCTION_DECL
)
11448 loc
= BLOCK_SOURCE_LOCATION (block
);
11449 else if (TREE_CODE (ao
) != BLOCK
)
11452 block
= BLOCK_SUPERCONTEXT (block
);
11455 if (loc
== UNKNOWN_LOCATION
)
11457 loc
= EXPR_LOCATION (exp
);
11459 /* Only consider macro expansion when the block traversal failed
11460 to find a location. Otherwise it's not relevant. */
11461 return expansion_point_location_if_in_system_header (loc
);
11467 /* These are the hash table functions for the hash table of OPTIMIZATION_NODE
11470 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11473 cl_option_hasher::hash (tree x
)
11475 const_tree
const t
= x
;
11477 if (TREE_CODE (t
) == OPTIMIZATION_NODE
)
11478 return cl_optimization_hash (TREE_OPTIMIZATION (t
));
11479 else if (TREE_CODE (t
) == TARGET_OPTION_NODE
)
11480 return cl_target_option_hash (TREE_TARGET_OPTION (t
));
11482 gcc_unreachable ();
11485 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11486 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11490 cl_option_hasher::equal (tree x
, tree y
)
11492 const_tree
const xt
= x
;
11493 const_tree
const yt
= y
;
11495 if (TREE_CODE (xt
) != TREE_CODE (yt
))
11498 if (TREE_CODE (xt
) == OPTIMIZATION_NODE
)
11499 return cl_optimization_option_eq (TREE_OPTIMIZATION (xt
),
11500 TREE_OPTIMIZATION (yt
));
11501 else if (TREE_CODE (xt
) == TARGET_OPTION_NODE
)
11502 return cl_target_option_eq (TREE_TARGET_OPTION (xt
),
11503 TREE_TARGET_OPTION (yt
));
11505 gcc_unreachable ();
11508 /* Build an OPTIMIZATION_NODE based on the options in OPTS and OPTS_SET. */
11511 build_optimization_node (struct gcc_options
*opts
,
11512 struct gcc_options
*opts_set
)
11516 /* Use the cache of optimization nodes. */
11518 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node
),
11521 tree
*slot
= cl_option_hash_table
->find_slot (cl_optimization_node
, INSERT
);
11525 /* Insert this one into the hash table. */
11526 t
= cl_optimization_node
;
11529 /* Make a new node for next time round. */
11530 cl_optimization_node
= make_node (OPTIMIZATION_NODE
);
11536 /* Build a TARGET_OPTION_NODE based on the options in OPTS and OPTS_SET. */
11539 build_target_option_node (struct gcc_options
*opts
,
11540 struct gcc_options
*opts_set
)
11544 /* Use the cache of optimization nodes. */
11546 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node
),
11549 tree
*slot
= cl_option_hash_table
->find_slot (cl_target_option_node
, INSERT
);
11553 /* Insert this one into the hash table. */
11554 t
= cl_target_option_node
;
11557 /* Make a new node for next time round. */
11558 cl_target_option_node
= make_node (TARGET_OPTION_NODE
);
11564 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11565 so that they aren't saved during PCH writing. */
11568 prepare_target_option_nodes_for_pch (void)
11570 hash_table
<cl_option_hasher
>::iterator iter
= cl_option_hash_table
->begin ();
11571 for (; iter
!= cl_option_hash_table
->end (); ++iter
)
11572 if (TREE_CODE (*iter
) == TARGET_OPTION_NODE
)
11573 TREE_TARGET_GLOBALS (*iter
) = NULL
;
11576 /* Determine the "ultimate origin" of a block. */
11579 block_ultimate_origin (const_tree block
)
11581 tree origin
= BLOCK_ABSTRACT_ORIGIN (block
);
11583 if (origin
== NULL_TREE
)
11587 gcc_checking_assert ((DECL_P (origin
)
11588 && DECL_ORIGIN (origin
) == origin
)
11589 || BLOCK_ORIGIN (origin
) == origin
);
11594 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11598 tree_nop_conversion_p (const_tree outer_type
, const_tree inner_type
)
11600 /* Do not strip casts into or out of differing address spaces. */
11601 if (POINTER_TYPE_P (outer_type
)
11602 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type
)) != ADDR_SPACE_GENERIC
)
11604 if (!POINTER_TYPE_P (inner_type
)
11605 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type
))
11606 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type
))))
11609 else if (POINTER_TYPE_P (inner_type
)
11610 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type
)) != ADDR_SPACE_GENERIC
)
11612 /* We already know that outer_type is not a pointer with
11613 a non-generic address space. */
11617 /* Use precision rather then machine mode when we can, which gives
11618 the correct answer even for submode (bit-field) types. */
11619 if ((INTEGRAL_TYPE_P (outer_type
)
11620 || POINTER_TYPE_P (outer_type
)
11621 || TREE_CODE (outer_type
) == OFFSET_TYPE
)
11622 && (INTEGRAL_TYPE_P (inner_type
)
11623 || POINTER_TYPE_P (inner_type
)
11624 || TREE_CODE (inner_type
) == OFFSET_TYPE
))
11625 return TYPE_PRECISION (outer_type
) == TYPE_PRECISION (inner_type
);
11627 /* Otherwise fall back on comparing machine modes (e.g. for
11628 aggregate types, floats). */
11629 return TYPE_MODE (outer_type
) == TYPE_MODE (inner_type
);
11632 /* Return true iff conversion in EXP generates no instruction. Mark
11633 it inline so that we fully inline into the stripping functions even
11634 though we have two uses of this function. */
11637 tree_nop_conversion (const_tree exp
)
11639 tree outer_type
, inner_type
;
11641 if (location_wrapper_p (exp
))
11643 if (!CONVERT_EXPR_P (exp
)
11644 && TREE_CODE (exp
) != NON_LVALUE_EXPR
)
11647 outer_type
= TREE_TYPE (exp
);
11648 inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
11649 if (!inner_type
|| inner_type
== error_mark_node
)
11652 return tree_nop_conversion_p (outer_type
, inner_type
);
11655 /* Return true iff conversion in EXP generates no instruction. Don't
11656 consider conversions changing the signedness. */
11659 tree_sign_nop_conversion (const_tree exp
)
11661 tree outer_type
, inner_type
;
11663 if (!tree_nop_conversion (exp
))
11666 outer_type
= TREE_TYPE (exp
);
11667 inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
11669 return (TYPE_UNSIGNED (outer_type
) == TYPE_UNSIGNED (inner_type
)
11670 && POINTER_TYPE_P (outer_type
) == POINTER_TYPE_P (inner_type
));
11673 /* Strip conversions from EXP according to tree_nop_conversion and
11674 return the resulting expression. */
11677 tree_strip_nop_conversions (tree exp
)
11679 while (tree_nop_conversion (exp
))
11680 exp
= TREE_OPERAND (exp
, 0);
11684 /* Strip conversions from EXP according to tree_sign_nop_conversion
11685 and return the resulting expression. */
11688 tree_strip_sign_nop_conversions (tree exp
)
11690 while (tree_sign_nop_conversion (exp
))
11691 exp
= TREE_OPERAND (exp
, 0);
11695 /* Avoid any floating point extensions from EXP. */
11697 strip_float_extensions (tree exp
)
11699 tree sub
, expt
, subt
;
11701 /* For floating point constant look up the narrowest type that can hold
11702 it properly and handle it like (type)(narrowest_type)constant.
11703 This way we can optimize for instance a=a*2.0 where "a" is float
11704 but 2.0 is double constant. */
11705 if (TREE_CODE (exp
) == REAL_CST
&& !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp
)))
11707 REAL_VALUE_TYPE orig
;
11710 orig
= TREE_REAL_CST (exp
);
11711 if (TYPE_PRECISION (TREE_TYPE (exp
)) > TYPE_PRECISION (float_type_node
)
11712 && exact_real_truncate (TYPE_MODE (float_type_node
), &orig
))
11713 type
= float_type_node
;
11714 else if (TYPE_PRECISION (TREE_TYPE (exp
))
11715 > TYPE_PRECISION (double_type_node
)
11716 && exact_real_truncate (TYPE_MODE (double_type_node
), &orig
))
11717 type
= double_type_node
;
11719 return build_real_truncate (type
, orig
);
11722 if (!CONVERT_EXPR_P (exp
))
11725 sub
= TREE_OPERAND (exp
, 0);
11726 subt
= TREE_TYPE (sub
);
11727 expt
= TREE_TYPE (exp
);
11729 if (!FLOAT_TYPE_P (subt
))
11732 if (DECIMAL_FLOAT_TYPE_P (expt
) != DECIMAL_FLOAT_TYPE_P (subt
))
11735 if (TYPE_PRECISION (subt
) > TYPE_PRECISION (expt
))
11738 return strip_float_extensions (sub
);
11741 /* Strip out all handled components that produce invariant
11745 strip_invariant_refs (const_tree op
)
11747 while (handled_component_p (op
))
11749 switch (TREE_CODE (op
))
11752 case ARRAY_RANGE_REF
:
11753 if (!is_gimple_constant (TREE_OPERAND (op
, 1))
11754 || TREE_OPERAND (op
, 2) != NULL_TREE
11755 || TREE_OPERAND (op
, 3) != NULL_TREE
)
11759 case COMPONENT_REF
:
11760 if (TREE_OPERAND (op
, 2) != NULL_TREE
)
11766 op
= TREE_OPERAND (op
, 0);
11772 static GTY(()) tree gcc_eh_personality_decl
;
11774 /* Return the GCC personality function decl. */
11777 lhd_gcc_personality (void)
11779 if (!gcc_eh_personality_decl
)
11780 gcc_eh_personality_decl
= build_personality_function ("gcc");
11781 return gcc_eh_personality_decl
;
11784 /* TARGET is a call target of GIMPLE call statement
11785 (obtained by gimple_call_fn). Return true if it is
11786 OBJ_TYPE_REF representing an virtual call of C++ method.
11787 (As opposed to OBJ_TYPE_REF representing objc calls
11788 through a cast where middle-end devirtualization machinery
11789 can't apply.) FOR_DUMP_P is true when being called from
11790 the dump routines. */
11793 virtual_method_call_p (const_tree target
, bool for_dump_p
)
11795 if (TREE_CODE (target
) != OBJ_TYPE_REF
)
11797 tree t
= TREE_TYPE (target
);
11798 gcc_checking_assert (TREE_CODE (t
) == POINTER_TYPE
);
11800 if (TREE_CODE (t
) == FUNCTION_TYPE
)
11802 gcc_checking_assert (TREE_CODE (t
) == METHOD_TYPE
);
11803 /* If we do not have BINFO associated, it means that type was built
11804 without devirtualization enabled. Do not consider this a virtual
11806 if (!TYPE_BINFO (obj_type_ref_class (target
, for_dump_p
)))
11811 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
11814 lookup_binfo_at_offset (tree binfo
, tree type
, HOST_WIDE_INT pos
)
11817 tree base_binfo
, b
;
11819 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
11820 if (pos
== tree_to_shwi (BINFO_OFFSET (base_binfo
))
11821 && types_same_for_odr (TREE_TYPE (base_binfo
), type
))
11823 else if ((b
= lookup_binfo_at_offset (base_binfo
, type
, pos
)) != NULL
)
11828 /* Try to find a base info of BINFO that would have its field decl at offset
11829 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11830 found, return, otherwise return NULL_TREE. */
11833 get_binfo_at_offset (tree binfo
, poly_int64 offset
, tree expected_type
)
11835 tree type
= BINFO_TYPE (binfo
);
11839 HOST_WIDE_INT pos
, size
;
11843 if (types_same_for_odr (type
, expected_type
))
11845 if (maybe_lt (offset
, 0))
11848 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
11850 if (TREE_CODE (fld
) != FIELD_DECL
|| !DECL_ARTIFICIAL (fld
))
11853 pos
= int_bit_position (fld
);
11854 size
= tree_to_uhwi (DECL_SIZE (fld
));
11855 if (known_in_range_p (offset
, pos
, size
))
11858 if (!fld
|| TREE_CODE (TREE_TYPE (fld
)) != RECORD_TYPE
)
11861 /* Offset 0 indicates the primary base, whose vtable contents are
11862 represented in the binfo for the derived class. */
11863 else if (maybe_ne (offset
, 0))
11865 tree found_binfo
= NULL
, base_binfo
;
11866 /* Offsets in BINFO are in bytes relative to the whole structure
11867 while POS is in bits relative to the containing field. */
11868 int binfo_offset
= (tree_to_shwi (BINFO_OFFSET (binfo
)) + pos
11871 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
11872 if (tree_to_shwi (BINFO_OFFSET (base_binfo
)) == binfo_offset
11873 && types_same_for_odr (TREE_TYPE (base_binfo
), TREE_TYPE (fld
)))
11875 found_binfo
= base_binfo
;
11879 binfo
= found_binfo
;
11881 binfo
= lookup_binfo_at_offset (binfo
, TREE_TYPE (fld
),
11885 type
= TREE_TYPE (fld
);
11890 /* Returns true if X is a typedef decl. */
11893 is_typedef_decl (const_tree x
)
11895 return (x
&& TREE_CODE (x
) == TYPE_DECL
11896 && DECL_ORIGINAL_TYPE (x
) != NULL_TREE
);
11899 /* Returns true iff TYPE is a type variant created for a typedef. */
11902 typedef_variant_p (const_tree type
)
11904 return is_typedef_decl (TYPE_NAME (type
));
11907 /* PR 84195: Replace control characters in "unescaped" with their
11908 escaped equivalents. Allow newlines if -fmessage-length has
11909 been set to a non-zero value. This is done here, rather than
11910 where the attribute is recorded as the message length can
11911 change between these two locations. */
11914 escaped_string::escape (const char *unescaped
)
11917 size_t i
, new_i
, len
;
11922 m_str
= const_cast<char *> (unescaped
);
11925 if (unescaped
== NULL
|| *unescaped
== 0)
11928 len
= strlen (unescaped
);
11932 for (i
= 0; i
< len
; i
++)
11934 char c
= unescaped
[i
];
11939 escaped
[new_i
++] = c
;
11943 if (c
!= '\n' || !pp_is_wrapping_line (global_dc
->printer
))
11945 if (escaped
== NULL
)
11947 /* We only allocate space for a new string if we
11948 actually encounter a control character that
11949 needs replacing. */
11950 escaped
= (char *) xmalloc (len
* 2 + 1);
11951 strncpy (escaped
, unescaped
, i
);
11955 escaped
[new_i
++] = '\\';
11959 case '\a': escaped
[new_i
++] = 'a'; break;
11960 case '\b': escaped
[new_i
++] = 'b'; break;
11961 case '\f': escaped
[new_i
++] = 'f'; break;
11962 case '\n': escaped
[new_i
++] = 'n'; break;
11963 case '\r': escaped
[new_i
++] = 'r'; break;
11964 case '\t': escaped
[new_i
++] = 't'; break;
11965 case '\v': escaped
[new_i
++] = 'v'; break;
11966 default: escaped
[new_i
++] = '?'; break;
11970 escaped
[new_i
++] = c
;
11975 escaped
[new_i
] = 0;
11981 /* Warn about a use of an identifier which was marked deprecated. Returns
11982 whether a warning was given. */
11985 warn_deprecated_use (tree node
, tree attr
)
11987 escaped_string msg
;
11989 if (node
== 0 || !warn_deprecated_decl
)
11995 attr
= DECL_ATTRIBUTES (node
);
11996 else if (TYPE_P (node
))
11998 tree decl
= TYPE_STUB_DECL (node
);
12000 attr
= lookup_attribute ("deprecated",
12001 TYPE_ATTRIBUTES (TREE_TYPE (decl
)));
12006 attr
= lookup_attribute ("deprecated", attr
);
12009 msg
.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
12014 auto_diagnostic_group d
;
12016 w
= warning (OPT_Wdeprecated_declarations
,
12017 "%qD is deprecated: %s", node
, (const char *) msg
);
12019 w
= warning (OPT_Wdeprecated_declarations
,
12020 "%qD is deprecated", node
);
12022 inform (DECL_SOURCE_LOCATION (node
), "declared here");
12024 else if (TYPE_P (node
))
12026 tree what
= NULL_TREE
;
12027 tree decl
= TYPE_STUB_DECL (node
);
12029 if (TYPE_NAME (node
))
12031 if (TREE_CODE (TYPE_NAME (node
)) == IDENTIFIER_NODE
)
12032 what
= TYPE_NAME (node
);
12033 else if (TREE_CODE (TYPE_NAME (node
)) == TYPE_DECL
12034 && DECL_NAME (TYPE_NAME (node
)))
12035 what
= DECL_NAME (TYPE_NAME (node
));
12038 auto_diagnostic_group d
;
12042 w
= warning (OPT_Wdeprecated_declarations
,
12043 "%qE is deprecated: %s", what
, (const char *) msg
);
12045 w
= warning (OPT_Wdeprecated_declarations
,
12046 "%qE is deprecated", what
);
12051 w
= warning (OPT_Wdeprecated_declarations
,
12052 "type is deprecated: %s", (const char *) msg
);
12054 w
= warning (OPT_Wdeprecated_declarations
,
12055 "type is deprecated");
12059 inform (DECL_SOURCE_LOCATION (decl
), "declared here");
12065 /* Error out with an identifier which was marked 'unavailable'. */
12067 error_unavailable_use (tree node
, tree attr
)
12069 escaped_string msg
;
12077 attr
= DECL_ATTRIBUTES (node
);
12078 else if (TYPE_P (node
))
12080 tree decl
= TYPE_STUB_DECL (node
);
12082 attr
= lookup_attribute ("unavailable",
12083 TYPE_ATTRIBUTES (TREE_TYPE (decl
)));
12088 attr
= lookup_attribute ("unavailable", attr
);
12091 msg
.escape (TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
12095 auto_diagnostic_group d
;
12097 error ("%qD is unavailable: %s", node
, (const char *) msg
);
12099 error ("%qD is unavailable", node
);
12100 inform (DECL_SOURCE_LOCATION (node
), "declared here");
12102 else if (TYPE_P (node
))
12104 tree what
= NULL_TREE
;
12105 tree decl
= TYPE_STUB_DECL (node
);
12107 if (TYPE_NAME (node
))
12109 if (TREE_CODE (TYPE_NAME (node
)) == IDENTIFIER_NODE
)
12110 what
= TYPE_NAME (node
);
12111 else if (TREE_CODE (TYPE_NAME (node
)) == TYPE_DECL
12112 && DECL_NAME (TYPE_NAME (node
)))
12113 what
= DECL_NAME (TYPE_NAME (node
));
12116 auto_diagnostic_group d
;
12120 error ("%qE is unavailable: %s", what
, (const char *) msg
);
12122 error ("%qE is unavailable", what
);
12127 error ("type is unavailable: %s", (const char *) msg
);
12129 error ("type is unavailable");
12133 inform (DECL_SOURCE_LOCATION (decl
), "declared here");
12137 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12138 somewhere in it. */
12141 contains_bitfld_component_ref_p (const_tree ref
)
12143 while (handled_component_p (ref
))
12145 if (TREE_CODE (ref
) == COMPONENT_REF
12146 && DECL_BIT_FIELD (TREE_OPERAND (ref
, 1)))
12148 ref
= TREE_OPERAND (ref
, 0);
12154 /* Try to determine whether a TRY_CATCH expression can fall through.
12155 This is a subroutine of block_may_fallthru. */
12158 try_catch_may_fallthru (const_tree stmt
)
12160 tree_stmt_iterator i
;
12162 /* If the TRY block can fall through, the whole TRY_CATCH can
12164 if (block_may_fallthru (TREE_OPERAND (stmt
, 0)))
12167 i
= tsi_start (TREE_OPERAND (stmt
, 1));
12168 switch (TREE_CODE (tsi_stmt (i
)))
12171 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12172 catch expression and a body. The whole TRY_CATCH may fall
12173 through iff any of the catch bodies falls through. */
12174 for (; !tsi_end_p (i
); tsi_next (&i
))
12176 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i
))))
12181 case EH_FILTER_EXPR
:
12182 /* The exception filter expression only matters if there is an
12183 exception. If the exception does not match EH_FILTER_TYPES,
12184 we will execute EH_FILTER_FAILURE, and we will fall through
12185 if that falls through. If the exception does match
12186 EH_FILTER_TYPES, the stack unwinder will continue up the
12187 stack, so we will not fall through. We don't know whether we
12188 will throw an exception which matches EH_FILTER_TYPES or not,
12189 so we just ignore EH_FILTER_TYPES and assume that we might
12190 throw an exception which doesn't match. */
12191 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i
)));
12194 /* This case represents statements to be executed when an
12195 exception occurs. Those statements are implicitly followed
12196 by a RESX statement to resume execution after the exception.
12197 So in this case the TRY_CATCH never falls through. */
12202 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12203 need not be 100% accurate; simply be conservative and return true if we
12204 don't know. This is used only to avoid stupidly generating extra code.
12205 If we're wrong, we'll just delete the extra code later. */
12208 block_may_fallthru (const_tree block
)
12210 /* This CONST_CAST is okay because expr_last returns its argument
12211 unmodified and we assign it to a const_tree. */
12212 const_tree stmt
= expr_last (CONST_CAST_TREE (block
));
12214 switch (stmt
? TREE_CODE (stmt
) : ERROR_MARK
)
12218 /* Easy cases. If the last statement of the block implies
12219 control transfer, then we can't fall through. */
12223 /* If there is a default: label or case labels cover all possible
12224 SWITCH_COND values, then the SWITCH_EXPR will transfer control
12225 to some case label in all cases and all we care is whether the
12226 SWITCH_BODY falls through. */
12227 if (SWITCH_ALL_CASES_P (stmt
))
12228 return block_may_fallthru (SWITCH_BODY (stmt
));
12232 if (block_may_fallthru (COND_EXPR_THEN (stmt
)))
12234 return block_may_fallthru (COND_EXPR_ELSE (stmt
));
12237 return block_may_fallthru (BIND_EXPR_BODY (stmt
));
12239 case TRY_CATCH_EXPR
:
12240 return try_catch_may_fallthru (stmt
);
12242 case TRY_FINALLY_EXPR
:
12243 /* The finally clause is always executed after the try clause,
12244 so if it does not fall through, then the try-finally will not
12245 fall through. Otherwise, if the try clause does not fall
12246 through, then when the finally clause falls through it will
12247 resume execution wherever the try clause was going. So the
12248 whole try-finally will only fall through if both the try
12249 clause and the finally clause fall through. */
12250 return (block_may_fallthru (TREE_OPERAND (stmt
, 0))
12251 && block_may_fallthru (TREE_OPERAND (stmt
, 1)));
12254 return block_may_fallthru (TREE_OPERAND (stmt
, 0));
12257 if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
)
12258 stmt
= TREE_OPERAND (stmt
, 1);
12264 /* Functions that do not return do not fall through. */
12265 return (call_expr_flags (stmt
) & ECF_NORETURN
) == 0;
12267 case CLEANUP_POINT_EXPR
:
12268 return block_may_fallthru (TREE_OPERAND (stmt
, 0));
12271 return block_may_fallthru (TREE_OPERAND (stmt
, 1));
12277 return lang_hooks
.block_may_fallthru (stmt
);
12281 /* True if we are using EH to handle cleanups. */
12282 static bool using_eh_for_cleanups_flag
= false;
12284 /* This routine is called from front ends to indicate eh should be used for
12287 using_eh_for_cleanups (void)
12289 using_eh_for_cleanups_flag
= true;
12292 /* Query whether EH is used for cleanups. */
12294 using_eh_for_cleanups_p (void)
12296 return using_eh_for_cleanups_flag
;
12299 /* Wrapper for tree_code_name to ensure that tree code is valid */
12301 get_tree_code_name (enum tree_code code
)
12303 const char *invalid
= "<invalid tree code>";
12305 /* The tree_code enum promotes to signed, but we could be getting
12306 invalid values, so force an unsigned comparison. */
12307 if (unsigned (code
) >= MAX_TREE_CODES
)
12309 if ((unsigned)code
== 0xa5a5)
12310 return "ggc_freed";
12314 return tree_code_name
[code
];
12317 /* Drops the TREE_OVERFLOW flag from T. */
12320 drop_tree_overflow (tree t
)
12322 gcc_checking_assert (TREE_OVERFLOW (t
));
12324 /* For tree codes with a sharing machinery re-build the result. */
12325 if (poly_int_tree_p (t
))
12326 return wide_int_to_tree (TREE_TYPE (t
), wi::to_poly_wide (t
));
12328 /* For VECTOR_CST, remove the overflow bits from the encoded elements
12329 and canonicalize the result. */
12330 if (TREE_CODE (t
) == VECTOR_CST
)
12332 tree_vector_builder builder
;
12333 builder
.new_unary_operation (TREE_TYPE (t
), t
, true);
12334 unsigned int count
= builder
.encoded_nelts ();
12335 for (unsigned int i
= 0; i
< count
; ++i
)
12337 tree elt
= VECTOR_CST_ELT (t
, i
);
12338 if (TREE_OVERFLOW (elt
))
12339 elt
= drop_tree_overflow (elt
);
12340 builder
.quick_push (elt
);
12342 return builder
.build ();
12345 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12346 and drop the flag. */
12348 TREE_OVERFLOW (t
) = 0;
12350 /* For constants that contain nested constants, drop the flag
12351 from those as well. */
12352 if (TREE_CODE (t
) == COMPLEX_CST
)
12354 if (TREE_OVERFLOW (TREE_REALPART (t
)))
12355 TREE_REALPART (t
) = drop_tree_overflow (TREE_REALPART (t
));
12356 if (TREE_OVERFLOW (TREE_IMAGPART (t
)))
12357 TREE_IMAGPART (t
) = drop_tree_overflow (TREE_IMAGPART (t
));
12363 /* Given a memory reference expression T, return its base address.
12364 The base address of a memory reference expression is the main
12365 object being referenced. For instance, the base address for
12366 'array[i].fld[j]' is 'array'. You can think of this as stripping
12367 away the offset part from a memory address.
12369 This function calls handled_component_p to strip away all the inner
12370 parts of the memory reference until it reaches the base object. */
12373 get_base_address (tree t
)
12375 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
12376 t
= TREE_OPERAND (t
, 0);
12377 while (handled_component_p (t
))
12378 t
= TREE_OPERAND (t
, 0);
12380 if ((TREE_CODE (t
) == MEM_REF
12381 || TREE_CODE (t
) == TARGET_MEM_REF
)
12382 && TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
)
12383 t
= TREE_OPERAND (TREE_OPERAND (t
, 0), 0);
12388 /* Return a tree of sizetype representing the size, in bytes, of the element
12389 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12392 array_ref_element_size (tree exp
)
12394 tree aligned_size
= TREE_OPERAND (exp
, 3);
12395 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
12396 location_t loc
= EXPR_LOCATION (exp
);
12398 /* If a size was specified in the ARRAY_REF, it's the size measured
12399 in alignment units of the element type. So multiply by that value. */
12402 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12403 sizetype from another type of the same width and signedness. */
12404 if (TREE_TYPE (aligned_size
) != sizetype
)
12405 aligned_size
= fold_convert_loc (loc
, sizetype
, aligned_size
);
12406 return size_binop_loc (loc
, MULT_EXPR
, aligned_size
,
12407 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
12410 /* Otherwise, take the size from that of the element type. Substitute
12411 any PLACEHOLDER_EXPR that we have. */
12413 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type
), exp
);
12416 /* Return a tree representing the lower bound of the array mentioned in
12417 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12420 array_ref_low_bound (tree exp
)
12422 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
12424 /* If a lower bound is specified in EXP, use it. */
12425 if (TREE_OPERAND (exp
, 2))
12426 return TREE_OPERAND (exp
, 2);
12428 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12429 substituting for a PLACEHOLDER_EXPR as needed. */
12430 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
12431 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type
), exp
);
12433 /* Otherwise, return a zero of the appropriate type. */
12434 tree idxtype
= TREE_TYPE (TREE_OPERAND (exp
, 1));
12435 return (idxtype
== error_mark_node
12436 ? integer_zero_node
: build_int_cst (idxtype
, 0));
12439 /* Return a tree representing the upper bound of the array mentioned in
12440 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12443 array_ref_up_bound (tree exp
)
12445 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
12447 /* If there is a domain type and it has an upper bound, use it, substituting
12448 for a PLACEHOLDER_EXPR as needed. */
12449 if (domain_type
&& TYPE_MAX_VALUE (domain_type
))
12450 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type
), exp
);
12452 /* Otherwise fail. */
12456 /* Returns true if REF is an array reference, component reference,
12457 or memory reference to an array at the end of a structure.
12458 If this is the case, the array may be allocated larger
12459 than its upper bound implies. */
12462 array_at_struct_end_p (tree ref
)
12466 if (TREE_CODE (ref
) == ARRAY_REF
12467 || TREE_CODE (ref
) == ARRAY_RANGE_REF
)
12469 atype
= TREE_TYPE (TREE_OPERAND (ref
, 0));
12470 ref
= TREE_OPERAND (ref
, 0);
12472 else if (TREE_CODE (ref
) == COMPONENT_REF
12473 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref
, 1))) == ARRAY_TYPE
)
12474 atype
= TREE_TYPE (TREE_OPERAND (ref
, 1));
12475 else if (TREE_CODE (ref
) == MEM_REF
)
12477 tree arg
= TREE_OPERAND (ref
, 0);
12478 if (TREE_CODE (arg
) == ADDR_EXPR
)
12479 arg
= TREE_OPERAND (arg
, 0);
12480 tree argtype
= TREE_TYPE (arg
);
12481 if (TREE_CODE (argtype
) == RECORD_TYPE
)
12483 if (tree fld
= last_field (argtype
))
12485 atype
= TREE_TYPE (fld
);
12486 if (TREE_CODE (atype
) != ARRAY_TYPE
)
12488 if (VAR_P (arg
) && DECL_SIZE (fld
))
12500 if (TREE_CODE (ref
) == STRING_CST
)
12503 tree ref_to_array
= ref
;
12504 while (handled_component_p (ref
))
12506 /* If the reference chain contains a component reference to a
12507 non-union type and there follows another field the reference
12508 is not at the end of a structure. */
12509 if (TREE_CODE (ref
) == COMPONENT_REF
)
12511 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref
, 0))) == RECORD_TYPE
)
12513 tree nextf
= DECL_CHAIN (TREE_OPERAND (ref
, 1));
12514 while (nextf
&& TREE_CODE (nextf
) != FIELD_DECL
)
12515 nextf
= DECL_CHAIN (nextf
);
12520 /* If we have a multi-dimensional array we do not consider
12521 a non-innermost dimension as flex array if the whole
12522 multi-dimensional array is at struct end.
12523 Same for an array of aggregates with a trailing array
12525 else if (TREE_CODE (ref
) == ARRAY_REF
)
12527 else if (TREE_CODE (ref
) == ARRAY_RANGE_REF
)
12529 /* If we view an underlying object as sth else then what we
12530 gathered up to now is what we have to rely on. */
12531 else if (TREE_CODE (ref
) == VIEW_CONVERT_EXPR
)
12534 gcc_unreachable ();
12536 ref
= TREE_OPERAND (ref
, 0);
12539 /* The array now is at struct end. Treat flexible arrays as
12540 always subject to extend, even into just padding constrained by
12541 an underlying decl. */
12542 if (! TYPE_SIZE (atype
)
12543 || ! TYPE_DOMAIN (atype
)
12544 || ! TYPE_MAX_VALUE (TYPE_DOMAIN (atype
)))
12547 /* If the reference is based on a declared entity, the size of the array
12548 is constrained by its given domain. (Do not trust commons PR/69368). */
12549 ref
= get_base_address (ref
);
12552 && !(flag_unconstrained_commons
12553 && VAR_P (ref
) && DECL_COMMON (ref
))
12554 && DECL_SIZE_UNIT (ref
)
12555 && TREE_CODE (DECL_SIZE_UNIT (ref
)) == INTEGER_CST
)
12557 /* Check whether the array domain covers all of the available
12560 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (atype
))) != INTEGER_CST
12561 || TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (atype
))) != INTEGER_CST
12562 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (atype
))) != INTEGER_CST
)
12564 if (! get_addr_base_and_unit_offset (ref_to_array
, &offset
))
12567 /* If at least one extra element fits it is a flexarray. */
12568 if (known_le ((wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (atype
)))
12569 - wi::to_offset (TYPE_MIN_VALUE (TYPE_DOMAIN (atype
)))
12571 * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (atype
))),
12572 wi::to_offset (DECL_SIZE_UNIT (ref
)) - offset
))
12581 /* Return a tree representing the offset, in bytes, of the field referenced
12582 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12585 component_ref_field_offset (tree exp
)
12587 tree aligned_offset
= TREE_OPERAND (exp
, 2);
12588 tree field
= TREE_OPERAND (exp
, 1);
12589 location_t loc
= EXPR_LOCATION (exp
);
12591 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12592 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12594 if (aligned_offset
)
12596 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12597 sizetype from another type of the same width and signedness. */
12598 if (TREE_TYPE (aligned_offset
) != sizetype
)
12599 aligned_offset
= fold_convert_loc (loc
, sizetype
, aligned_offset
);
12600 return size_binop_loc (loc
, MULT_EXPR
, aligned_offset
,
12601 size_int (DECL_OFFSET_ALIGN (field
)
12605 /* Otherwise, take the offset from that of the field. Substitute
12606 any PLACEHOLDER_EXPR that we have. */
12608 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field
), exp
);
12611 /* Given the initializer INIT, return the initializer for the field
12612 DECL if it exists, otherwise null. Used to obtain the initializer
12613 for a flexible array member and determine its size. */
12616 get_initializer_for (tree init
, tree decl
)
12620 tree fld
, fld_init
;
12621 unsigned HOST_WIDE_INT i
;
12622 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), i
, fld
, fld_init
)
12627 if (TREE_CODE (fld
) == CONSTRUCTOR
)
12629 fld_init
= get_initializer_for (fld_init
, decl
);
12638 /* Determines the size of the member referenced by the COMPONENT_REF
12639 REF, using its initializer expression if necessary in order to
12640 determine the size of an initialized flexible array member.
12641 If non-null, set *ARK when REF refers to an interior zero-length
12642 array or a trailing one-element array.
12643 Returns the size as sizetype (which might be zero for an object
12644 with an uninitialized flexible array member) or null if the size
12645 cannot be determined. */
12648 component_ref_size (tree ref
, special_array_member
*sam
/* = NULL */)
12650 gcc_assert (TREE_CODE (ref
) == COMPONENT_REF
);
12652 special_array_member sambuf
;
12655 *sam
= special_array_member::none
;
12657 /* The object/argument referenced by the COMPONENT_REF and its type. */
12658 tree arg
= TREE_OPERAND (ref
, 0);
12659 tree argtype
= TREE_TYPE (arg
);
12660 /* The referenced member. */
12661 tree member
= TREE_OPERAND (ref
, 1);
12663 tree memsize
= DECL_SIZE_UNIT (member
);
12666 tree memtype
= TREE_TYPE (member
);
12667 if (TREE_CODE (memtype
) != ARRAY_TYPE
)
12668 /* DECL_SIZE may be less than TYPE_SIZE in C++ when referring
12669 to the type of a class with a virtual base which doesn't
12670 reflect the size of the virtual's members (see pr97595).
12671 If that's the case fail for now and implement something
12672 more robust in the future. */
12673 return (tree_int_cst_equal (memsize
, TYPE_SIZE_UNIT (memtype
))
12674 ? memsize
: NULL_TREE
);
12676 bool trailing
= array_at_struct_end_p (ref
);
12677 bool zero_length
= integer_zerop (memsize
);
12678 if (!trailing
&& !zero_length
)
12679 /* MEMBER is either an interior array or is an array with
12680 more than one element. */
12686 *sam
= special_array_member::trail_0
;
12689 *sam
= special_array_member::int_0
;
12690 memsize
= NULL_TREE
;
12695 if (tree dom
= TYPE_DOMAIN (memtype
))
12696 if (tree min
= TYPE_MIN_VALUE (dom
))
12697 if (tree max
= TYPE_MAX_VALUE (dom
))
12698 if (TREE_CODE (min
) == INTEGER_CST
12699 && TREE_CODE (max
) == INTEGER_CST
)
12701 offset_int minidx
= wi::to_offset (min
);
12702 offset_int maxidx
= wi::to_offset (max
);
12703 offset_int neltsm1
= maxidx
- minidx
;
12705 /* MEMBER is an array with more than one element. */
12709 *sam
= special_array_member::trail_1
;
12712 /* For a reference to a zero- or one-element array member of a union
12713 use the size of the union instead of the size of the member. */
12714 if (TREE_CODE (argtype
) == UNION_TYPE
)
12715 memsize
= TYPE_SIZE_UNIT (argtype
);
12718 /* MEMBER is either a bona fide flexible array member, or a zero-length
12719 array member, or an array of length one treated as such. */
12721 /* If the reference is to a declared object and the member a true
12722 flexible array, try to determine its size from its initializer. */
12723 poly_int64 baseoff
= 0;
12724 tree base
= get_addr_base_and_unit_offset (ref
, &baseoff
);
12725 if (!base
|| !VAR_P (base
))
12727 if (*sam
!= special_array_member::int_0
)
12730 if (TREE_CODE (arg
) != COMPONENT_REF
)
12734 while (TREE_CODE (base
) == COMPONENT_REF
)
12735 base
= TREE_OPERAND (base
, 0);
12736 baseoff
= tree_to_poly_int64 (byte_position (TREE_OPERAND (ref
, 1)));
12739 /* BASE is the declared object of which MEMBER is either a member
12740 or that is cast to ARGTYPE (e.g., a char buffer used to store
12741 an ARGTYPE object). */
12742 tree basetype
= TREE_TYPE (base
);
12744 /* Determine the base type of the referenced object. If it's
12745 the same as ARGTYPE and MEMBER has a known size, return it. */
12746 tree bt
= basetype
;
12747 if (*sam
!= special_array_member::int_0
)
12748 while (TREE_CODE (bt
) == ARRAY_TYPE
)
12749 bt
= TREE_TYPE (bt
);
12750 bool typematch
= useless_type_conversion_p (argtype
, bt
);
12751 if (memsize
&& typematch
)
12754 memsize
= NULL_TREE
;
12757 /* MEMBER is a true flexible array member. Compute its size from
12758 the initializer of the BASE object if it has one. */
12759 if (tree init
= DECL_P (base
) ? DECL_INITIAL (base
) : NULL_TREE
)
12760 if (init
!= error_mark_node
)
12762 init
= get_initializer_for (init
, member
);
12765 memsize
= TYPE_SIZE_UNIT (TREE_TYPE (init
));
12766 if (tree refsize
= TYPE_SIZE_UNIT (argtype
))
12768 /* Use the larger of the initializer size and the tail
12769 padding in the enclosing struct. */
12770 poly_int64 rsz
= tree_to_poly_int64 (refsize
);
12772 if (known_lt (tree_to_poly_int64 (memsize
), rsz
))
12773 memsize
= wide_int_to_tree (TREE_TYPE (memsize
), rsz
);
12785 && DECL_EXTERNAL (base
)
12787 && *sam
!= special_array_member::int_0
)
12788 /* The size of a flexible array member of an extern struct
12789 with no initializer cannot be determined (it's defined
12790 in another translation unit and can have an initializer
12791 with an arbitrary number of elements). */
12794 /* Use the size of the base struct or, for interior zero-length
12795 arrays, the size of the enclosing type. */
12796 memsize
= TYPE_SIZE_UNIT (bt
);
12798 else if (DECL_P (base
))
12799 /* Use the size of the BASE object (possibly an array of some
12800 other type such as char used to store the struct). */
12801 memsize
= DECL_SIZE_UNIT (base
);
12806 /* If the flexible array member has a known size use the greater
12807 of it and the tail padding in the enclosing struct.
12808 Otherwise, when the size of the flexible array member is unknown
12809 and the referenced object is not a struct, use the size of its
12810 type when known. This detects sizes of array buffers when cast
12811 to struct types with flexible array members. */
12814 poly_int64 memsz64
= memsize
? tree_to_poly_int64 (memsize
) : 0;
12815 if (known_lt (baseoff
, memsz64
))
12817 memsz64
-= baseoff
;
12818 return wide_int_to_tree (TREE_TYPE (memsize
), memsz64
);
12820 return size_zero_node
;
12823 /* Return "don't know" for an external non-array object since its
12824 flexible array member can be initialized to have any number of
12825 elements. Otherwise, return zero because the flexible array
12826 member has no elements. */
12827 return (DECL_P (base
)
12828 && DECL_EXTERNAL (base
)
12830 || TREE_CODE (basetype
) != ARRAY_TYPE
)
12831 ? NULL_TREE
: size_zero_node
);
12834 /* Return the machine mode of T. For vectors, returns the mode of the
12835 inner type. The main use case is to feed the result to HONOR_NANS,
12836 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12839 element_mode (const_tree t
)
12843 if (VECTOR_TYPE_P (t
) || TREE_CODE (t
) == COMPLEX_TYPE
)
12845 return TYPE_MODE (t
);
12848 /* Vector types need to re-check the target flags each time we report
12849 the machine mode. We need to do this because attribute target can
12850 change the result of vector_mode_supported_p and have_regs_of_mode
12851 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
12852 change on a per-function basis. */
12853 /* ??? Possibly a better solution is to run through all the types
12854 referenced by a function and re-compute the TYPE_MODE once, rather
12855 than make the TYPE_MODE macro call a function. */
12858 vector_type_mode (const_tree t
)
12862 gcc_assert (TREE_CODE (t
) == VECTOR_TYPE
);
12864 mode
= t
->type_common
.mode
;
12865 if (VECTOR_MODE_P (mode
)
12866 && (!targetm
.vector_mode_supported_p (mode
)
12867 || !have_regs_of_mode
[mode
]))
12869 scalar_int_mode innermode
;
12871 /* For integers, try mapping it to a same-sized scalar mode. */
12872 if (is_int_mode (TREE_TYPE (t
)->type_common
.mode
, &innermode
))
12874 poly_int64 size
= (TYPE_VECTOR_SUBPARTS (t
)
12875 * GET_MODE_BITSIZE (innermode
));
12876 scalar_int_mode mode
;
12877 if (int_mode_for_size (size
, 0).exists (&mode
)
12878 && have_regs_of_mode
[mode
])
12888 /* Return the size in bits of each element of vector type TYPE. */
12891 vector_element_bits (const_tree type
)
12893 gcc_checking_assert (VECTOR_TYPE_P (type
));
12894 if (VECTOR_BOOLEAN_TYPE_P (type
))
12895 return TYPE_PRECISION (TREE_TYPE (type
));
12896 return tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type
)));
12899 /* Calculate the size in bits of each element of vector type TYPE
12900 and return the result as a tree of type bitsizetype. */
12903 vector_element_bits_tree (const_tree type
)
12905 gcc_checking_assert (VECTOR_TYPE_P (type
));
12906 if (VECTOR_BOOLEAN_TYPE_P (type
))
12907 return bitsize_int (vector_element_bits (type
));
12908 return TYPE_SIZE (TREE_TYPE (type
));
12911 /* Verify that basic properties of T match TV and thus T can be a variant of
12912 TV. TV should be the more specified variant (i.e. the main variant). */
12915 verify_type_variant (const_tree t
, tree tv
)
12917 /* Type variant can differ by:
12919 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
12920 ENCODE_QUAL_ADDR_SPACE.
12921 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
12922 in this case some values may not be set in the variant types
12923 (see TYPE_COMPLETE_P checks).
12924 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
12925 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
12926 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
12927 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
12928 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
12929 this is necessary to make it possible to merge types form different TUs
12930 - arrays, pointers and references may have TREE_TYPE that is a variant
12931 of TREE_TYPE of their main variants.
12932 - aggregates may have new TYPE_FIELDS list that list variants of
12933 the main variant TYPE_FIELDS.
12934 - vector types may differ by TYPE_VECTOR_OPAQUE
12937 /* Convenience macro for matching individual fields. */
12938 #define verify_variant_match(flag) \
12940 if (flag (tv) != flag (t)) \
12942 error ("type variant differs by %s", #flag); \
12948 /* tree_base checks. */
12950 verify_variant_match (TREE_CODE
);
12951 /* FIXME: Ada builds non-artificial variants of artificial types. */
12953 if (TYPE_ARTIFICIAL (tv
))
12954 verify_variant_match (TYPE_ARTIFICIAL
);
12956 if (POINTER_TYPE_P (tv
))
12957 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL
);
12958 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
12959 verify_variant_match (TYPE_UNSIGNED
);
12960 verify_variant_match (TYPE_PACKED
);
12961 if (TREE_CODE (t
) == REFERENCE_TYPE
)
12962 verify_variant_match (TYPE_REF_IS_RVALUE
);
12963 if (AGGREGATE_TYPE_P (t
))
12964 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER
);
12966 verify_variant_match (TYPE_SATURATING
);
12967 /* FIXME: This check trigger during libstdc++ build. */
12969 if (RECORD_OR_UNION_TYPE_P (t
) && COMPLETE_TYPE_P (t
))
12970 verify_variant_match (TYPE_FINAL_P
);
12973 /* tree_type_common checks. */
12975 if (COMPLETE_TYPE_P (t
))
12977 verify_variant_match (TYPE_MODE
);
12978 if (TREE_CODE (TYPE_SIZE (t
)) != PLACEHOLDER_EXPR
12979 && TREE_CODE (TYPE_SIZE (tv
)) != PLACEHOLDER_EXPR
)
12980 verify_variant_match (TYPE_SIZE
);
12981 if (TREE_CODE (TYPE_SIZE_UNIT (t
)) != PLACEHOLDER_EXPR
12982 && TREE_CODE (TYPE_SIZE_UNIT (tv
)) != PLACEHOLDER_EXPR
12983 && TYPE_SIZE_UNIT (t
) != TYPE_SIZE_UNIT (tv
))
12985 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t
),
12986 TYPE_SIZE_UNIT (tv
), 0));
12987 error ("type variant has different %<TYPE_SIZE_UNIT%>");
12989 error ("type variant%'s %<TYPE_SIZE_UNIT%>");
12990 debug_tree (TYPE_SIZE_UNIT (tv
));
12991 error ("type%'s %<TYPE_SIZE_UNIT%>");
12992 debug_tree (TYPE_SIZE_UNIT (t
));
12995 verify_variant_match (TYPE_NEEDS_CONSTRUCTING
);
12997 verify_variant_match (TYPE_PRECISION
);
12998 if (RECORD_OR_UNION_TYPE_P (t
))
12999 verify_variant_match (TYPE_TRANSPARENT_AGGR
);
13000 else if (TREE_CODE (t
) == ARRAY_TYPE
)
13001 verify_variant_match (TYPE_NONALIASED_COMPONENT
);
13002 /* During LTO we merge variant lists from diferent translation units
13003 that may differ BY TYPE_CONTEXT that in turn may point
13004 to TRANSLATION_UNIT_DECL.
13005 Ada also builds variants of types with different TYPE_CONTEXT. */
13007 if (!in_lto_p
|| !TYPE_FILE_SCOPE_P (t
))
13008 verify_variant_match (TYPE_CONTEXT
);
13010 if (TREE_CODE (t
) == ARRAY_TYPE
|| TREE_CODE (t
) == INTEGER_TYPE
)
13011 verify_variant_match (TYPE_STRING_FLAG
);
13012 if (TREE_CODE (t
) == RECORD_TYPE
|| TREE_CODE (t
) == UNION_TYPE
)
13013 verify_variant_match (TYPE_CXX_ODR_P
);
13014 if (TYPE_ALIAS_SET_KNOWN_P (t
))
13016 error ("type variant with %<TYPE_ALIAS_SET_KNOWN_P%>");
13021 /* tree_type_non_common checks. */
13023 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13024 and dangle the pointer from time to time. */
13025 if (RECORD_OR_UNION_TYPE_P (t
) && TYPE_VFIELD (t
) != TYPE_VFIELD (tv
)
13026 && (in_lto_p
|| !TYPE_VFIELD (tv
)
13027 || TREE_CODE (TYPE_VFIELD (tv
)) != TREE_LIST
))
13029 error ("type variant has different %<TYPE_VFIELD%>");
13033 if ((TREE_CODE (t
) == ENUMERAL_TYPE
&& COMPLETE_TYPE_P (t
))
13034 || TREE_CODE (t
) == INTEGER_TYPE
13035 || TREE_CODE (t
) == BOOLEAN_TYPE
13036 || TREE_CODE (t
) == REAL_TYPE
13037 || TREE_CODE (t
) == FIXED_POINT_TYPE
)
13039 verify_variant_match (TYPE_MAX_VALUE
);
13040 verify_variant_match (TYPE_MIN_VALUE
);
13042 if (TREE_CODE (t
) == METHOD_TYPE
)
13043 verify_variant_match (TYPE_METHOD_BASETYPE
);
13044 if (TREE_CODE (t
) == OFFSET_TYPE
)
13045 verify_variant_match (TYPE_OFFSET_BASETYPE
);
13046 if (TREE_CODE (t
) == ARRAY_TYPE
)
13047 verify_variant_match (TYPE_ARRAY_MAX_SIZE
);
13048 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13049 or even type's main variant. This is needed to make bootstrap pass
13050 and the bug seems new in GCC 5.
13051 C++ FE should be updated to make this consistent and we should check
13052 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13053 is a match with main variant.
13055 Also disable the check for Java for now because of parser hack that builds
13056 first an dummy BINFO and then sometimes replace it by real BINFO in some
13058 if (RECORD_OR_UNION_TYPE_P (t
) && TYPE_BINFO (t
) && TYPE_BINFO (tv
)
13059 && TYPE_BINFO (t
) != TYPE_BINFO (tv
)
13060 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13061 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13062 at LTO time only. */
13063 && (in_lto_p
&& odr_type_p (t
)))
13065 error ("type variant has different %<TYPE_BINFO%>");
13067 error ("type variant%'s %<TYPE_BINFO%>");
13068 debug_tree (TYPE_BINFO (tv
));
13069 error ("type%'s %<TYPE_BINFO%>");
13070 debug_tree (TYPE_BINFO (t
));
13074 /* Check various uses of TYPE_VALUES_RAW. */
13075 if (TREE_CODE (t
) == ENUMERAL_TYPE
13076 && TYPE_VALUES (t
))
13077 verify_variant_match (TYPE_VALUES
);
13078 else if (TREE_CODE (t
) == ARRAY_TYPE
)
13079 verify_variant_match (TYPE_DOMAIN
);
13080 /* Permit incomplete variants of complete type. While FEs may complete
13081 all variants, this does not happen for C++ templates in all cases. */
13082 else if (RECORD_OR_UNION_TYPE_P (t
)
13083 && COMPLETE_TYPE_P (t
)
13084 && TYPE_FIELDS (t
) != TYPE_FIELDS (tv
))
13088 /* Fortran builds qualified variants as new records with items of
13089 qualified type. Verify that they looks same. */
13090 for (f1
= TYPE_FIELDS (t
), f2
= TYPE_FIELDS (tv
);
13092 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
13093 if (TREE_CODE (f1
) != FIELD_DECL
|| TREE_CODE (f2
) != FIELD_DECL
13094 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1
))
13095 != TYPE_MAIN_VARIANT (TREE_TYPE (f2
))
13096 /* FIXME: gfc_nonrestricted_type builds all types as variants
13097 with exception of pointer types. It deeply copies the type
13098 which means that we may end up with a variant type
13099 referring non-variant pointer. We may change it to
13100 produce types as variants, too, like
13101 objc_get_protocol_qualified_type does. */
13102 && !POINTER_TYPE_P (TREE_TYPE (f1
)))
13103 || DECL_FIELD_OFFSET (f1
) != DECL_FIELD_OFFSET (f2
)
13104 || DECL_FIELD_BIT_OFFSET (f1
) != DECL_FIELD_BIT_OFFSET (f2
))
13108 error ("type variant has different %<TYPE_FIELDS%>");
13110 error ("first mismatch is field");
13112 error ("and field");
13117 else if ((TREE_CODE (t
) == FUNCTION_TYPE
|| TREE_CODE (t
) == METHOD_TYPE
))
13118 verify_variant_match (TYPE_ARG_TYPES
);
13119 /* For C++ the qualified variant of array type is really an array type
13120 of qualified TREE_TYPE.
13121 objc builds variants of pointer where pointer to type is a variant, too
13122 in objc_get_protocol_qualified_type. */
13123 if (TREE_TYPE (t
) != TREE_TYPE (tv
)
13124 && ((TREE_CODE (t
) != ARRAY_TYPE
13125 && !POINTER_TYPE_P (t
))
13126 || TYPE_MAIN_VARIANT (TREE_TYPE (t
))
13127 != TYPE_MAIN_VARIANT (TREE_TYPE (tv
))))
13129 error ("type variant has different %<TREE_TYPE%>");
13131 error ("type variant%'s %<TREE_TYPE%>");
13132 debug_tree (TREE_TYPE (tv
));
13133 error ("type%'s %<TREE_TYPE%>");
13134 debug_tree (TREE_TYPE (t
));
13137 if (type_with_alias_set_p (t
)
13138 && !gimple_canonical_types_compatible_p (t
, tv
, false))
13140 error ("type is not compatible with its variant");
13142 error ("type variant%'s %<TREE_TYPE%>");
13143 debug_tree (TREE_TYPE (tv
));
13144 error ("type%'s %<TREE_TYPE%>");
13145 debug_tree (TREE_TYPE (t
));
13149 #undef verify_variant_match
13153 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13154 the middle-end types_compatible_p function. It needs to avoid
13155 claiming types are different for types that should be treated
13156 the same with respect to TBAA. Canonical types are also used
13157 for IL consistency checks via the useless_type_conversion_p
13158 predicate which does not handle all type kinds itself but falls
13159 back to pointer-comparison of TYPE_CANONICAL for aggregates
13162 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13163 type calculation because we need to allow inter-operability between signed
13164 and unsigned variants. */
13167 type_with_interoperable_signedness (const_tree type
)
13169 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13170 signed char and unsigned char. Similarly fortran FE builds
13171 C_SIZE_T as signed type, while C defines it unsigned. */
13173 return tree_code_for_canonical_type_merging (TREE_CODE (type
))
13175 && (TYPE_PRECISION (type
) == TYPE_PRECISION (signed_char_type_node
)
13176 || TYPE_PRECISION (type
) == TYPE_PRECISION (size_type_node
));
13179 /* Return true iff T1 and T2 are structurally identical for what
13181 This function is used both by lto.c canonical type merging and by the
13182 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13183 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13184 only for LTO because only in these cases TYPE_CANONICAL equivalence
13185 correspond to one defined by gimple_canonical_types_compatible_p. */
13188 gimple_canonical_types_compatible_p (const_tree t1
, const_tree t2
,
13189 bool trust_type_canonical
)
13191 /* Type variants should be same as the main variant. When not doing sanity
13192 checking to verify this fact, go to main variants and save some work. */
13193 if (trust_type_canonical
)
13195 t1
= TYPE_MAIN_VARIANT (t1
);
13196 t2
= TYPE_MAIN_VARIANT (t2
);
13199 /* Check first for the obvious case of pointer identity. */
13203 /* Check that we have two types to compare. */
13204 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
13207 /* We consider complete types always compatible with incomplete type.
13208 This does not make sense for canonical type calculation and thus we
13209 need to ensure that we are never called on it.
13211 FIXME: For more correctness the function probably should have three modes
13212 1) mode assuming that types are complete mathcing their structure
13213 2) mode allowing incomplete types but producing equivalence classes
13214 and thus ignoring all info from complete types
13215 3) mode allowing incomplete types to match complete but checking
13216 compatibility between complete types.
13218 1 and 2 can be used for canonical type calculation. 3 is the real
13219 definition of type compatibility that can be used i.e. for warnings during
13220 declaration merging. */
13222 gcc_assert (!trust_type_canonical
13223 || (type_with_alias_set_p (t1
) && type_with_alias_set_p (t2
)));
13225 /* If the types have been previously registered and found equal
13228 if (TYPE_CANONICAL (t1
) && TYPE_CANONICAL (t2
)
13229 && trust_type_canonical
)
13231 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13232 they are always NULL, but they are set to non-NULL for types
13233 constructed by build_pointer_type and variants. In this case the
13234 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13235 all pointers are considered equal. Be sure to not return false
13237 gcc_checking_assert (canonical_type_used_p (t1
)
13238 && canonical_type_used_p (t2
));
13239 return TYPE_CANONICAL (t1
) == TYPE_CANONICAL (t2
);
13242 /* For types where we do ODR based TBAA the canonical type is always
13243 set correctly, so we know that types are different if their
13244 canonical types does not match. */
13245 if (trust_type_canonical
13246 && (odr_type_p (t1
) && odr_based_tbaa_p (t1
))
13247 != (odr_type_p (t2
) && odr_based_tbaa_p (t2
)))
13250 /* Can't be the same type if the types don't have the same code. */
13251 enum tree_code code
= tree_code_for_canonical_type_merging (TREE_CODE (t1
));
13252 if (code
!= tree_code_for_canonical_type_merging (TREE_CODE (t2
)))
13255 /* Qualifiers do not matter for canonical type comparison purposes. */
13257 /* Void types and nullptr types are always the same. */
13258 if (TREE_CODE (t1
) == VOID_TYPE
13259 || TREE_CODE (t1
) == NULLPTR_TYPE
)
13262 /* Can't be the same type if they have different mode. */
13263 if (TYPE_MODE (t1
) != TYPE_MODE (t2
))
13266 /* Non-aggregate types can be handled cheaply. */
13267 if (INTEGRAL_TYPE_P (t1
)
13268 || SCALAR_FLOAT_TYPE_P (t1
)
13269 || FIXED_POINT_TYPE_P (t1
)
13270 || TREE_CODE (t1
) == VECTOR_TYPE
13271 || TREE_CODE (t1
) == COMPLEX_TYPE
13272 || TREE_CODE (t1
) == OFFSET_TYPE
13273 || POINTER_TYPE_P (t1
))
13275 /* Can't be the same type if they have different recision. */
13276 if (TYPE_PRECISION (t1
) != TYPE_PRECISION (t2
))
13279 /* In some cases the signed and unsigned types are required to be
13281 if (TYPE_UNSIGNED (t1
) != TYPE_UNSIGNED (t2
)
13282 && !type_with_interoperable_signedness (t1
))
13285 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13286 interoperable with "signed char". Unless all frontends are revisited
13287 to agree on these types, we must ignore the flag completely. */
13289 /* Fortran standard define C_PTR type that is compatible with every
13290 C pointer. For this reason we need to glob all pointers into one.
13291 Still pointers in different address spaces are not compatible. */
13292 if (POINTER_TYPE_P (t1
))
13294 if (TYPE_ADDR_SPACE (TREE_TYPE (t1
))
13295 != TYPE_ADDR_SPACE (TREE_TYPE (t2
)))
13299 /* Tail-recurse to components. */
13300 if (TREE_CODE (t1
) == VECTOR_TYPE
13301 || TREE_CODE (t1
) == COMPLEX_TYPE
)
13302 return gimple_canonical_types_compatible_p (TREE_TYPE (t1
),
13304 trust_type_canonical
);
13309 /* Do type-specific comparisons. */
13310 switch (TREE_CODE (t1
))
13313 /* Array types are the same if the element types are the same and
13314 the number of elements are the same. */
13315 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
),
13316 trust_type_canonical
)
13317 || TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
)
13318 || TYPE_REVERSE_STORAGE_ORDER (t1
) != TYPE_REVERSE_STORAGE_ORDER (t2
)
13319 || TYPE_NONALIASED_COMPONENT (t1
) != TYPE_NONALIASED_COMPONENT (t2
))
13323 tree i1
= TYPE_DOMAIN (t1
);
13324 tree i2
= TYPE_DOMAIN (t2
);
13326 /* For an incomplete external array, the type domain can be
13327 NULL_TREE. Check this condition also. */
13328 if (i1
== NULL_TREE
&& i2
== NULL_TREE
)
13330 else if (i1
== NULL_TREE
|| i2
== NULL_TREE
)
13334 tree min1
= TYPE_MIN_VALUE (i1
);
13335 tree min2
= TYPE_MIN_VALUE (i2
);
13336 tree max1
= TYPE_MAX_VALUE (i1
);
13337 tree max2
= TYPE_MAX_VALUE (i2
);
13339 /* The minimum/maximum values have to be the same. */
13342 && ((TREE_CODE (min1
) == PLACEHOLDER_EXPR
13343 && TREE_CODE (min2
) == PLACEHOLDER_EXPR
)
13344 || operand_equal_p (min1
, min2
, 0))))
13347 && ((TREE_CODE (max1
) == PLACEHOLDER_EXPR
13348 && TREE_CODE (max2
) == PLACEHOLDER_EXPR
)
13349 || operand_equal_p (max1
, max2
, 0)))))
13357 case FUNCTION_TYPE
:
13358 /* Function types are the same if the return type and arguments types
13360 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
),
13361 trust_type_canonical
))
13364 if (TYPE_ARG_TYPES (t1
) == TYPE_ARG_TYPES (t2
))
13368 tree parms1
, parms2
;
13370 for (parms1
= TYPE_ARG_TYPES (t1
), parms2
= TYPE_ARG_TYPES (t2
);
13372 parms1
= TREE_CHAIN (parms1
), parms2
= TREE_CHAIN (parms2
))
13374 if (!gimple_canonical_types_compatible_p
13375 (TREE_VALUE (parms1
), TREE_VALUE (parms2
),
13376 trust_type_canonical
))
13380 if (parms1
|| parms2
)
13388 case QUAL_UNION_TYPE
:
13392 /* Don't try to compare variants of an incomplete type, before
13393 TYPE_FIELDS has been copied around. */
13394 if (!COMPLETE_TYPE_P (t1
) && !COMPLETE_TYPE_P (t2
))
13398 if (TYPE_REVERSE_STORAGE_ORDER (t1
) != TYPE_REVERSE_STORAGE_ORDER (t2
))
13401 /* For aggregate types, all the fields must be the same. */
13402 for (f1
= TYPE_FIELDS (t1
), f2
= TYPE_FIELDS (t2
);
13404 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
13406 /* Skip non-fields and zero-sized fields. */
13407 while (f1
&& (TREE_CODE (f1
) != FIELD_DECL
13409 && integer_zerop (DECL_SIZE (f1
)))))
13410 f1
= TREE_CHAIN (f1
);
13411 while (f2
&& (TREE_CODE (f2
) != FIELD_DECL
13413 && integer_zerop (DECL_SIZE (f2
)))))
13414 f2
= TREE_CHAIN (f2
);
13417 /* The fields must have the same name, offset and type. */
13418 if (DECL_NONADDRESSABLE_P (f1
) != DECL_NONADDRESSABLE_P (f2
)
13419 || !gimple_compare_field_offset (f1
, f2
)
13420 || !gimple_canonical_types_compatible_p
13421 (TREE_TYPE (f1
), TREE_TYPE (f2
),
13422 trust_type_canonical
))
13426 /* If one aggregate has more fields than the other, they
13427 are not the same. */
13435 /* Consider all types with language specific trees in them mutually
13436 compatible. This is executed only from verify_type and false
13437 positives can be tolerated. */
13438 gcc_assert (!in_lto_p
);
13443 /* Verify type T. */
13446 verify_type (const_tree t
)
13448 bool error_found
= false;
13449 tree mv
= TYPE_MAIN_VARIANT (t
);
13452 error ("main variant is not defined");
13453 error_found
= true;
13455 else if (mv
!= TYPE_MAIN_VARIANT (mv
))
13457 error ("%<TYPE_MAIN_VARIANT%> has different %<TYPE_MAIN_VARIANT%>");
13459 error_found
= true;
13461 else if (t
!= mv
&& !verify_type_variant (t
, mv
))
13462 error_found
= true;
13464 tree ct
= TYPE_CANONICAL (t
);
13467 else if (TYPE_CANONICAL (t
) != ct
)
13469 error ("%<TYPE_CANONICAL%> has different %<TYPE_CANONICAL%>");
13471 error_found
= true;
13473 /* Method and function types cannot be used to address memory and thus
13474 TYPE_CANONICAL really matters only for determining useless conversions.
13476 FIXME: C++ FE produce declarations of builtin functions that are not
13477 compatible with main variants. */
13478 else if (TREE_CODE (t
) == FUNCTION_TYPE
)
13481 /* FIXME: gimple_canonical_types_compatible_p cannot compare types
13482 with variably sized arrays because their sizes possibly
13483 gimplified to different variables. */
13484 && !variably_modified_type_p (ct
, NULL
)
13485 && !gimple_canonical_types_compatible_p (t
, ct
, false)
13486 && COMPLETE_TYPE_P (t
))
13488 error ("%<TYPE_CANONICAL%> is not compatible");
13490 error_found
= true;
13493 if (COMPLETE_TYPE_P (t
) && TYPE_CANONICAL (t
)
13494 && TYPE_MODE (t
) != TYPE_MODE (TYPE_CANONICAL (t
)))
13496 error ("%<TYPE_MODE%> of %<TYPE_CANONICAL%> is not compatible");
13498 error_found
= true;
13500 if (TYPE_MAIN_VARIANT (t
) == t
&& ct
&& TYPE_MAIN_VARIANT (ct
) != ct
)
13502 error ("%<TYPE_CANONICAL%> of main variant is not main variant");
13504 debug_tree (TYPE_MAIN_VARIANT (ct
));
13505 error_found
= true;
13509 /* Check various uses of TYPE_MIN_VALUE_RAW. */
13510 if (RECORD_OR_UNION_TYPE_P (t
))
13512 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13513 and danagle the pointer from time to time. */
13514 if (TYPE_VFIELD (t
)
13515 && TREE_CODE (TYPE_VFIELD (t
)) != FIELD_DECL
13516 && TREE_CODE (TYPE_VFIELD (t
)) != TREE_LIST
)
13518 error ("%<TYPE_VFIELD%> is not %<FIELD_DECL%> nor %<TREE_LIST%>");
13519 debug_tree (TYPE_VFIELD (t
));
13520 error_found
= true;
13523 else if (TREE_CODE (t
) == POINTER_TYPE
)
13525 if (TYPE_NEXT_PTR_TO (t
)
13526 && TREE_CODE (TYPE_NEXT_PTR_TO (t
)) != POINTER_TYPE
)
13528 error ("%<TYPE_NEXT_PTR_TO%> is not %<POINTER_TYPE%>");
13529 debug_tree (TYPE_NEXT_PTR_TO (t
));
13530 error_found
= true;
13533 else if (TREE_CODE (t
) == REFERENCE_TYPE
)
13535 if (TYPE_NEXT_REF_TO (t
)
13536 && TREE_CODE (TYPE_NEXT_REF_TO (t
)) != REFERENCE_TYPE
)
13538 error ("%<TYPE_NEXT_REF_TO%> is not %<REFERENCE_TYPE%>");
13539 debug_tree (TYPE_NEXT_REF_TO (t
));
13540 error_found
= true;
13543 else if (INTEGRAL_TYPE_P (t
) || TREE_CODE (t
) == REAL_TYPE
13544 || TREE_CODE (t
) == FIXED_POINT_TYPE
)
13546 /* FIXME: The following check should pass:
13547 useless_type_conversion_p (const_cast <tree> (t),
13548 TREE_TYPE (TYPE_MIN_VALUE (t))
13549 but does not for C sizetypes in LTO. */
13552 /* Check various uses of TYPE_MAXVAL_RAW. */
13553 if (RECORD_OR_UNION_TYPE_P (t
))
13555 if (!TYPE_BINFO (t
))
13557 else if (TREE_CODE (TYPE_BINFO (t
)) != TREE_BINFO
)
13559 error ("%<TYPE_BINFO%> is not %<TREE_BINFO%>");
13560 debug_tree (TYPE_BINFO (t
));
13561 error_found
= true;
13563 else if (TREE_TYPE (TYPE_BINFO (t
)) != TYPE_MAIN_VARIANT (t
))
13565 error ("%<TYPE_BINFO%> type is not %<TYPE_MAIN_VARIANT%>");
13566 debug_tree (TREE_TYPE (TYPE_BINFO (t
)));
13567 error_found
= true;
13570 else if (TREE_CODE (t
) == FUNCTION_TYPE
|| TREE_CODE (t
) == METHOD_TYPE
)
13572 if (TYPE_METHOD_BASETYPE (t
)
13573 && TREE_CODE (TYPE_METHOD_BASETYPE (t
)) != RECORD_TYPE
13574 && TREE_CODE (TYPE_METHOD_BASETYPE (t
)) != UNION_TYPE
)
13576 error ("%<TYPE_METHOD_BASETYPE%> is not record nor union");
13577 debug_tree (TYPE_METHOD_BASETYPE (t
));
13578 error_found
= true;
13581 else if (TREE_CODE (t
) == OFFSET_TYPE
)
13583 if (TYPE_OFFSET_BASETYPE (t
)
13584 && TREE_CODE (TYPE_OFFSET_BASETYPE (t
)) != RECORD_TYPE
13585 && TREE_CODE (TYPE_OFFSET_BASETYPE (t
)) != UNION_TYPE
)
13587 error ("%<TYPE_OFFSET_BASETYPE%> is not record nor union");
13588 debug_tree (TYPE_OFFSET_BASETYPE (t
));
13589 error_found
= true;
13592 else if (INTEGRAL_TYPE_P (t
) || TREE_CODE (t
) == REAL_TYPE
13593 || TREE_CODE (t
) == FIXED_POINT_TYPE
)
13595 /* FIXME: The following check should pass:
13596 useless_type_conversion_p (const_cast <tree> (t),
13597 TREE_TYPE (TYPE_MAX_VALUE (t))
13598 but does not for C sizetypes in LTO. */
13600 else if (TREE_CODE (t
) == ARRAY_TYPE
)
13602 if (TYPE_ARRAY_MAX_SIZE (t
)
13603 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t
)) != INTEGER_CST
)
13605 error ("%<TYPE_ARRAY_MAX_SIZE%> not %<INTEGER_CST%>");
13606 debug_tree (TYPE_ARRAY_MAX_SIZE (t
));
13607 error_found
= true;
13610 else if (TYPE_MAX_VALUE_RAW (t
))
13612 error ("%<TYPE_MAX_VALUE_RAW%> non-NULL");
13613 debug_tree (TYPE_MAX_VALUE_RAW (t
));
13614 error_found
= true;
13617 if (TYPE_LANG_SLOT_1 (t
) && in_lto_p
)
13619 error ("%<TYPE_LANG_SLOT_1 (binfo)%> field is non-NULL");
13620 debug_tree (TYPE_LANG_SLOT_1 (t
));
13621 error_found
= true;
13624 /* Check various uses of TYPE_VALUES_RAW. */
13625 if (TREE_CODE (t
) == ENUMERAL_TYPE
)
13626 for (tree l
= TYPE_VALUES (t
); l
; l
= TREE_CHAIN (l
))
13628 tree value
= TREE_VALUE (l
);
13629 tree name
= TREE_PURPOSE (l
);
13631 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13632 CONST_DECL of ENUMERAL TYPE. */
13633 if (TREE_CODE (value
) != INTEGER_CST
&& TREE_CODE (value
) != CONST_DECL
)
13635 error ("enum value is not %<CONST_DECL%> or %<INTEGER_CST%>");
13636 debug_tree (value
);
13638 error_found
= true;
13640 if (TREE_CODE (TREE_TYPE (value
)) != INTEGER_TYPE
13641 && !useless_type_conversion_p (const_cast <tree
> (t
), TREE_TYPE (value
)))
13643 error ("enum value type is not %<INTEGER_TYPE%> nor convertible "
13645 debug_tree (value
);
13647 error_found
= true;
13649 if (TREE_CODE (name
) != IDENTIFIER_NODE
)
13651 error ("enum value name is not %<IDENTIFIER_NODE%>");
13652 debug_tree (value
);
13654 error_found
= true;
13657 else if (TREE_CODE (t
) == ARRAY_TYPE
)
13659 if (TYPE_DOMAIN (t
) && TREE_CODE (TYPE_DOMAIN (t
)) != INTEGER_TYPE
)
13661 error ("array %<TYPE_DOMAIN%> is not integer type");
13662 debug_tree (TYPE_DOMAIN (t
));
13663 error_found
= true;
13666 else if (RECORD_OR_UNION_TYPE_P (t
))
13668 if (TYPE_FIELDS (t
) && !COMPLETE_TYPE_P (t
) && in_lto_p
)
13670 error ("%<TYPE_FIELDS%> defined in incomplete type");
13671 error_found
= true;
13673 for (tree fld
= TYPE_FIELDS (t
); fld
; fld
= TREE_CHAIN (fld
))
13675 /* TODO: verify properties of decls. */
13676 if (TREE_CODE (fld
) == FIELD_DECL
)
13678 else if (TREE_CODE (fld
) == TYPE_DECL
)
13680 else if (TREE_CODE (fld
) == CONST_DECL
)
13682 else if (VAR_P (fld
))
13684 else if (TREE_CODE (fld
) == TEMPLATE_DECL
)
13686 else if (TREE_CODE (fld
) == USING_DECL
)
13688 else if (TREE_CODE (fld
) == FUNCTION_DECL
)
13692 error ("wrong tree in %<TYPE_FIELDS%> list");
13694 error_found
= true;
13698 else if (TREE_CODE (t
) == INTEGER_TYPE
13699 || TREE_CODE (t
) == BOOLEAN_TYPE
13700 || TREE_CODE (t
) == OFFSET_TYPE
13701 || TREE_CODE (t
) == REFERENCE_TYPE
13702 || TREE_CODE (t
) == NULLPTR_TYPE
13703 || TREE_CODE (t
) == POINTER_TYPE
)
13705 if (TYPE_CACHED_VALUES_P (t
) != (TYPE_CACHED_VALUES (t
) != NULL
))
13707 error ("%<TYPE_CACHED_VALUES_P%> is %i while %<TYPE_CACHED_VALUES%> "
13709 TYPE_CACHED_VALUES_P (t
), (void *)TYPE_CACHED_VALUES (t
));
13710 error_found
= true;
13712 else if (TYPE_CACHED_VALUES_P (t
) && TREE_CODE (TYPE_CACHED_VALUES (t
)) != TREE_VEC
)
13714 error ("%<TYPE_CACHED_VALUES%> is not %<TREE_VEC%>");
13715 debug_tree (TYPE_CACHED_VALUES (t
));
13716 error_found
= true;
13718 /* Verify just enough of cache to ensure that no one copied it to new type.
13719 All copying should go by copy_node that should clear it. */
13720 else if (TYPE_CACHED_VALUES_P (t
))
13723 for (i
= 0; i
< TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t
)); i
++)
13724 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t
), i
)
13725 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t
), i
)) != t
)
13727 error ("wrong %<TYPE_CACHED_VALUES%> entry");
13728 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t
), i
));
13729 error_found
= true;
13734 else if (TREE_CODE (t
) == FUNCTION_TYPE
|| TREE_CODE (t
) == METHOD_TYPE
)
13735 for (tree l
= TYPE_ARG_TYPES (t
); l
; l
= TREE_CHAIN (l
))
13737 /* C++ FE uses TREE_PURPOSE to store initial values. */
13738 if (TREE_PURPOSE (l
) && in_lto_p
)
13740 error ("%<TREE_PURPOSE%> is non-NULL in %<TYPE_ARG_TYPES%> list");
13742 error_found
= true;
13744 if (!TYPE_P (TREE_VALUE (l
)))
13746 error ("wrong entry in %<TYPE_ARG_TYPES%> list");
13748 error_found
= true;
13751 else if (!is_lang_specific (t
) && TYPE_VALUES_RAW (t
))
13753 error ("%<TYPE_VALUES_RAW%> field is non-NULL");
13754 debug_tree (TYPE_VALUES_RAW (t
));
13755 error_found
= true;
13757 if (TREE_CODE (t
) != INTEGER_TYPE
13758 && TREE_CODE (t
) != BOOLEAN_TYPE
13759 && TREE_CODE (t
) != OFFSET_TYPE
13760 && TREE_CODE (t
) != REFERENCE_TYPE
13761 && TREE_CODE (t
) != NULLPTR_TYPE
13762 && TREE_CODE (t
) != POINTER_TYPE
13763 && TYPE_CACHED_VALUES_P (t
))
13765 error ("%<TYPE_CACHED_VALUES_P%> is set while it should not be");
13766 error_found
= true;
13769 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13770 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13772 if (TREE_CODE (t
) == METHOD_TYPE
13773 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t
)) != TYPE_METHOD_BASETYPE (t
))
13775 error ("%<TYPE_METHOD_BASETYPE%> is not main variant");
13776 error_found
= true;
13781 debug_tree (const_cast <tree
> (t
));
13782 internal_error ("%qs failed", __func__
);
13787 /* Return 1 if ARG interpreted as signed in its precision is known to be
13788 always positive or 2 if ARG is known to be always negative, or 3 if
13789 ARG may be positive or negative. */
13792 get_range_pos_neg (tree arg
)
13794 if (arg
== error_mark_node
)
13797 int prec
= TYPE_PRECISION (TREE_TYPE (arg
));
13799 if (TREE_CODE (arg
) == INTEGER_CST
)
13801 wide_int w
= wi::sext (wi::to_wide (arg
), prec
);
13807 while (CONVERT_EXPR_P (arg
)
13808 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg
, 0)))
13809 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg
, 0))) <= prec
)
13811 arg
= TREE_OPERAND (arg
, 0);
13812 /* Narrower value zero extended into wider type
13813 will always result in positive values. */
13814 if (TYPE_UNSIGNED (TREE_TYPE (arg
))
13815 && TYPE_PRECISION (TREE_TYPE (arg
)) < prec
)
13817 prec
= TYPE_PRECISION (TREE_TYPE (arg
));
13822 if (TREE_CODE (arg
) != SSA_NAME
)
13825 while (!get_global_range_query ()->range_of_expr (r
, arg
) || r
.kind () != VR_RANGE
)
13827 gimple
*g
= SSA_NAME_DEF_STMT (arg
);
13828 if (is_gimple_assign (g
)
13829 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g
)))
13831 tree t
= gimple_assign_rhs1 (g
);
13832 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
13833 && TYPE_PRECISION (TREE_TYPE (t
)) <= prec
)
13835 if (TYPE_UNSIGNED (TREE_TYPE (t
))
13836 && TYPE_PRECISION (TREE_TYPE (t
)) < prec
)
13838 prec
= TYPE_PRECISION (TREE_TYPE (t
));
13847 if (TYPE_UNSIGNED (TREE_TYPE (arg
)))
13849 /* For unsigned values, the "positive" range comes
13850 below the "negative" range. */
13851 if (!wi::neg_p (wi::sext (r
.upper_bound (), prec
), SIGNED
))
13853 if (wi::neg_p (wi::sext (r
.lower_bound (), prec
), SIGNED
))
13858 if (!wi::neg_p (wi::sext (r
.lower_bound (), prec
), SIGNED
))
13860 if (wi::neg_p (wi::sext (r
.upper_bound (), prec
), SIGNED
))
13869 /* Return true if ARG is marked with the nonnull attribute in the
13870 current function signature. */
13873 nonnull_arg_p (const_tree arg
)
13875 tree t
, attrs
, fntype
;
13876 unsigned HOST_WIDE_INT arg_num
;
13878 gcc_assert (TREE_CODE (arg
) == PARM_DECL
13879 && (POINTER_TYPE_P (TREE_TYPE (arg
))
13880 || TREE_CODE (TREE_TYPE (arg
)) == OFFSET_TYPE
));
13882 /* The static chain decl is always non null. */
13883 if (arg
== cfun
->static_chain_decl
)
13886 /* THIS argument of method is always non-NULL. */
13887 if (TREE_CODE (TREE_TYPE (cfun
->decl
)) == METHOD_TYPE
13888 && arg
== DECL_ARGUMENTS (cfun
->decl
)
13889 && flag_delete_null_pointer_checks
)
13892 /* Values passed by reference are always non-NULL. */
13893 if (TREE_CODE (TREE_TYPE (arg
)) == REFERENCE_TYPE
13894 && flag_delete_null_pointer_checks
)
13897 fntype
= TREE_TYPE (cfun
->decl
);
13898 for (attrs
= TYPE_ATTRIBUTES (fntype
); attrs
; attrs
= TREE_CHAIN (attrs
))
13900 attrs
= lookup_attribute ("nonnull", attrs
);
13902 /* If "nonnull" wasn't specified, we know nothing about the argument. */
13903 if (attrs
== NULL_TREE
)
13906 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
13907 if (TREE_VALUE (attrs
) == NULL_TREE
)
13910 /* Get the position number for ARG in the function signature. */
13911 for (arg_num
= 1, t
= DECL_ARGUMENTS (cfun
->decl
);
13913 t
= DECL_CHAIN (t
), arg_num
++)
13919 gcc_assert (t
== arg
);
13921 /* Now see if ARG_NUM is mentioned in the nonnull list. */
13922 for (t
= TREE_VALUE (attrs
); t
; t
= TREE_CHAIN (t
))
13924 if (compare_tree_int (TREE_VALUE (t
), arg_num
) == 0)
13932 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
13936 set_block (location_t loc
, tree block
)
13938 location_t pure_loc
= get_pure_location (loc
);
13939 source_range src_range
= get_range_from_loc (line_table
, loc
);
13940 return COMBINE_LOCATION_DATA (line_table
, pure_loc
, src_range
, block
);
13944 set_source_range (tree expr
, location_t start
, location_t finish
)
13946 source_range src_range
;
13947 src_range
.m_start
= start
;
13948 src_range
.m_finish
= finish
;
13949 return set_source_range (expr
, src_range
);
13953 set_source_range (tree expr
, source_range src_range
)
13955 if (!EXPR_P (expr
))
13956 return UNKNOWN_LOCATION
;
13958 location_t pure_loc
= get_pure_location (EXPR_LOCATION (expr
));
13959 location_t adhoc
= COMBINE_LOCATION_DATA (line_table
,
13963 SET_EXPR_LOCATION (expr
, adhoc
);
13967 /* Return EXPR, potentially wrapped with a node expression LOC,
13968 if !CAN_HAVE_LOCATION_P (expr).
13970 NON_LVALUE_EXPR is used for wrapping constants, apart from STRING_CST.
13971 VIEW_CONVERT_EXPR is used for wrapping non-constants and STRING_CST.
13973 Wrapper nodes can be identified using location_wrapper_p. */
13976 maybe_wrap_with_location (tree expr
, location_t loc
)
13980 if (loc
== UNKNOWN_LOCATION
)
13982 if (CAN_HAVE_LOCATION_P (expr
))
13984 /* We should only be adding wrappers for constants and for decls,
13985 or for some exceptional tree nodes (e.g. BASELINK in the C++ FE). */
13986 gcc_assert (CONSTANT_CLASS_P (expr
)
13988 || EXCEPTIONAL_CLASS_P (expr
));
13990 /* For now, don't add wrappers to exceptional tree nodes, to minimize
13991 any impact of the wrapper nodes. */
13992 if (EXCEPTIONAL_CLASS_P (expr
))
13995 /* Compiler-generated temporary variables don't need a wrapper. */
13996 if (DECL_P (expr
) && DECL_ARTIFICIAL (expr
) && DECL_IGNORED_P (expr
))
13999 /* If any auto_suppress_location_wrappers are active, don't create
14001 if (suppress_location_wrappers
> 0)
14005 = (((CONSTANT_CLASS_P (expr
) && TREE_CODE (expr
) != STRING_CST
)
14006 || (TREE_CODE (expr
) == CONST_DECL
&& !TREE_STATIC (expr
)))
14007 ? NON_LVALUE_EXPR
: VIEW_CONVERT_EXPR
);
14008 tree wrapper
= build1_loc (loc
, code
, TREE_TYPE (expr
), expr
);
14009 /* Mark this node as being a wrapper. */
14010 EXPR_LOCATION_WRAPPER_P (wrapper
) = 1;
14014 int suppress_location_wrappers
;
14016 /* Return the name of combined function FN, for debugging purposes. */
14019 combined_fn_name (combined_fn fn
)
14021 if (builtin_fn_p (fn
))
14023 tree fndecl
= builtin_decl_explicit (as_builtin_fn (fn
));
14024 return IDENTIFIER_POINTER (DECL_NAME (fndecl
));
14027 return internal_fn_name (as_internal_fn (fn
));
14030 /* Return a bitmap with a bit set corresponding to each argument in
14031 a function call type FNTYPE declared with attribute nonnull,
14032 or null if none of the function's argument are nonnull. The caller
14033 must free the bitmap. */
14036 get_nonnull_args (const_tree fntype
)
14038 if (fntype
== NULL_TREE
)
14041 bitmap argmap
= NULL
;
14042 if (TREE_CODE (fntype
) == METHOD_TYPE
)
14044 /* The this pointer in C++ non-static member functions is
14045 implicitly nonnull whether or not it's declared as such. */
14046 argmap
= BITMAP_ALLOC (NULL
);
14047 bitmap_set_bit (argmap
, 0);
14050 tree attrs
= TYPE_ATTRIBUTES (fntype
);
14054 /* A function declaration can specify multiple attribute nonnull,
14055 each with zero or more arguments. The loop below creates a bitmap
14056 representing a union of all the arguments. An empty (but non-null)
14057 bitmap means that all arguments have been declaraed nonnull. */
14058 for ( ; attrs
; attrs
= TREE_CHAIN (attrs
))
14060 attrs
= lookup_attribute ("nonnull", attrs
);
14065 argmap
= BITMAP_ALLOC (NULL
);
14067 if (!TREE_VALUE (attrs
))
14069 /* Clear the bitmap in case a previous attribute nonnull
14070 set it and this one overrides it for all arguments. */
14071 bitmap_clear (argmap
);
14075 /* Iterate over the indices of the format arguments declared nonnull
14076 and set a bit for each. */
14077 for (tree idx
= TREE_VALUE (attrs
); idx
; idx
= TREE_CHAIN (idx
))
14079 unsigned int val
= TREE_INT_CST_LOW (TREE_VALUE (idx
)) - 1;
14080 bitmap_set_bit (argmap
, val
);
14087 /* Returns true if TYPE is a type where it and all of its subobjects
14088 (recursively) are of structure, union, or array type. */
14091 is_empty_type (const_tree type
)
14093 if (RECORD_OR_UNION_TYPE_P (type
))
14095 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
14096 if (TREE_CODE (field
) == FIELD_DECL
14097 && !DECL_PADDING_P (field
)
14098 && !is_empty_type (TREE_TYPE (field
)))
14102 else if (TREE_CODE (type
) == ARRAY_TYPE
)
14103 return (integer_minus_onep (array_type_nelts (type
))
14104 || TYPE_DOMAIN (type
) == NULL_TREE
14105 || is_empty_type (TREE_TYPE (type
)));
14109 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
14110 that shouldn't be passed via stack. */
14113 default_is_empty_record (const_tree type
)
14115 if (!abi_version_at_least (12))
14118 if (type
== error_mark_node
)
14121 if (TREE_ADDRESSABLE (type
))
14124 return is_empty_type (TYPE_MAIN_VARIANT (type
));
14127 /* Determine whether TYPE is a structure with a flexible array member,
14128 or a union containing such a structure (possibly recursively). */
14131 flexible_array_type_p (const_tree type
)
14134 switch (TREE_CODE (type
))
14138 for (x
= TYPE_FIELDS (type
); x
!= NULL_TREE
; x
= DECL_CHAIN (x
))
14139 if (TREE_CODE (x
) == FIELD_DECL
)
14141 if (last
== NULL_TREE
)
14143 if (TREE_CODE (TREE_TYPE (last
)) == ARRAY_TYPE
14144 && TYPE_SIZE (TREE_TYPE (last
)) == NULL_TREE
14145 && TYPE_DOMAIN (TREE_TYPE (last
)) != NULL_TREE
14146 && TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (last
))) == NULL_TREE
)
14150 for (x
= TYPE_FIELDS (type
); x
!= NULL_TREE
; x
= DECL_CHAIN (x
))
14152 if (TREE_CODE (x
) == FIELD_DECL
14153 && flexible_array_type_p (TREE_TYPE (x
)))
14162 /* Like int_size_in_bytes, but handle empty records specially. */
14165 arg_int_size_in_bytes (const_tree type
)
14167 return TYPE_EMPTY_P (type
) ? 0 : int_size_in_bytes (type
);
14170 /* Like size_in_bytes, but handle empty records specially. */
14173 arg_size_in_bytes (const_tree type
)
14175 return TYPE_EMPTY_P (type
) ? size_zero_node
: size_in_bytes (type
);
14178 /* Return true if an expression with CODE has to have the same result type as
14179 its first operand. */
14182 expr_type_first_operand_type_p (tree_code code
)
14195 case TRUNC_DIV_EXPR
:
14196 case CEIL_DIV_EXPR
:
14197 case FLOOR_DIV_EXPR
:
14198 case ROUND_DIV_EXPR
:
14199 case TRUNC_MOD_EXPR
:
14200 case CEIL_MOD_EXPR
:
14201 case FLOOR_MOD_EXPR
:
14202 case ROUND_MOD_EXPR
:
14204 case EXACT_DIV_EXPR
:
14222 /* Return a typenode for the "standard" C type with a given name. */
14224 get_typenode_from_name (const char *name
)
14226 if (name
== NULL
|| *name
== '\0')
14229 if (strcmp (name
, "char") == 0)
14230 return char_type_node
;
14231 if (strcmp (name
, "unsigned char") == 0)
14232 return unsigned_char_type_node
;
14233 if (strcmp (name
, "signed char") == 0)
14234 return signed_char_type_node
;
14236 if (strcmp (name
, "short int") == 0)
14237 return short_integer_type_node
;
14238 if (strcmp (name
, "short unsigned int") == 0)
14239 return short_unsigned_type_node
;
14241 if (strcmp (name
, "int") == 0)
14242 return integer_type_node
;
14243 if (strcmp (name
, "unsigned int") == 0)
14244 return unsigned_type_node
;
14246 if (strcmp (name
, "long int") == 0)
14247 return long_integer_type_node
;
14248 if (strcmp (name
, "long unsigned int") == 0)
14249 return long_unsigned_type_node
;
14251 if (strcmp (name
, "long long int") == 0)
14252 return long_long_integer_type_node
;
14253 if (strcmp (name
, "long long unsigned int") == 0)
14254 return long_long_unsigned_type_node
;
14256 gcc_unreachable ();
14259 /* List of pointer types used to declare builtins before we have seen their
14262 Keep the size up to date in tree.h ! */
14263 const builtin_structptr_type builtin_structptr_types
[6] =
14265 { fileptr_type_node
, ptr_type_node
, "FILE" },
14266 { const_tm_ptr_type_node
, const_ptr_type_node
, "tm" },
14267 { fenv_t_ptr_type_node
, ptr_type_node
, "fenv_t" },
14268 { const_fenv_t_ptr_type_node
, const_ptr_type_node
, "fenv_t" },
14269 { fexcept_t_ptr_type_node
, ptr_type_node
, "fexcept_t" },
14270 { const_fexcept_t_ptr_type_node
, const_ptr_type_node
, "fexcept_t" }
14273 /* Return the maximum object size. */
14276 max_object_size (void)
14278 /* To do: Make this a configurable parameter. */
14279 return TYPE_MAX_VALUE (ptrdiff_type_node
);
14282 /* A wrapper around TARGET_VERIFY_TYPE_CONTEXT that makes the silent_p
14283 parameter default to false and that weeds out error_mark_node. */
14286 verify_type_context (location_t loc
, type_context_kind context
,
14287 const_tree type
, bool silent_p
)
14289 if (type
== error_mark_node
)
14292 gcc_assert (TYPE_P (type
));
14293 return (!targetm
.verify_type_context
14294 || targetm
.verify_type_context (loc
, context
, type
, silent_p
));
14297 /* Return true if NEW_ASM and DELETE_ASM name a valid pair of new and
14298 delete operators. Return false if they may or may not name such
14299 a pair and, when nonnull, set *PCERTAIN to true if they certainly
14303 valid_new_delete_pair_p (tree new_asm
, tree delete_asm
,
14304 bool *pcertain
/* = NULL */)
14308 pcertain
= &certain
;
14310 const char *new_name
= IDENTIFIER_POINTER (new_asm
);
14311 const char *delete_name
= IDENTIFIER_POINTER (delete_asm
);
14312 unsigned int new_len
= IDENTIFIER_LENGTH (new_asm
);
14313 unsigned int delete_len
= IDENTIFIER_LENGTH (delete_asm
);
14315 /* The following failures are due to invalid names so they're not
14316 considered certain mismatches. */
14319 if (new_len
< 5 || delete_len
< 6)
14321 if (new_name
[0] == '_')
14322 ++new_name
, --new_len
;
14323 if (new_name
[0] == '_')
14324 ++new_name
, --new_len
;
14325 if (delete_name
[0] == '_')
14326 ++delete_name
, --delete_len
;
14327 if (delete_name
[0] == '_')
14328 ++delete_name
, --delete_len
;
14329 if (new_len
< 4 || delete_len
< 5)
14332 /* The following failures are due to names of user-defined operators
14333 so they're also not considered certain mismatches. */
14335 /* *_len is now just the length after initial underscores. */
14336 if (new_name
[0] != 'Z' || new_name
[1] != 'n')
14338 if (delete_name
[0] != 'Z' || delete_name
[1] != 'd')
14341 /* The following failures are certain mismatches. */
14344 /* _Znw must match _Zdl, _Zna must match _Zda. */
14345 if ((new_name
[2] != 'w' || delete_name
[2] != 'l')
14346 && (new_name
[2] != 'a' || delete_name
[2] != 'a'))
14348 /* 'j', 'm' and 'y' correspond to size_t. */
14349 if (new_name
[3] != 'j' && new_name
[3] != 'm' && new_name
[3] != 'y')
14351 if (delete_name
[3] != 'P' || delete_name
[4] != 'v')
14354 || (new_len
== 18 && !memcmp (new_name
+ 4, "RKSt9nothrow_t", 14)))
14356 /* _ZnXY or _ZnXYRKSt9nothrow_t matches
14357 _ZdXPv, _ZdXPvY and _ZdXPvRKSt9nothrow_t. */
14358 if (delete_len
== 5)
14360 if (delete_len
== 6 && delete_name
[5] == new_name
[3])
14362 if (delete_len
== 19 && !memcmp (delete_name
+ 5, "RKSt9nothrow_t", 14))
14365 else if ((new_len
== 19 && !memcmp (new_name
+ 4, "St11align_val_t", 15))
14367 && !memcmp (new_name
+ 4, "St11align_val_tRKSt9nothrow_t", 29)))
14369 /* _ZnXYSt11align_val_t or _ZnXYSt11align_val_tRKSt9nothrow_t matches
14370 _ZdXPvSt11align_val_t or _ZdXPvYSt11align_val_t or or
14371 _ZdXPvSt11align_val_tRKSt9nothrow_t. */
14372 if (delete_len
== 20 && !memcmp (delete_name
+ 5, "St11align_val_t", 15))
14374 if (delete_len
== 21
14375 && delete_name
[5] == new_name
[3]
14376 && !memcmp (delete_name
+ 6, "St11align_val_t", 15))
14378 if (delete_len
== 34
14379 && !memcmp (delete_name
+ 5, "St11align_val_tRKSt9nothrow_t", 29))
14383 /* The negative result is conservative. */
14388 /* Return the zero-based number corresponding to the argument being
14389 deallocated if FNDECL is a deallocation function or an out-of-bounds
14390 value if it isn't. */
14393 fndecl_dealloc_argno (tree fndecl
)
14395 /* A call to operator delete isn't recognized as one to a built-in. */
14396 if (DECL_IS_OPERATOR_DELETE_P (fndecl
))
14398 if (DECL_IS_REPLACEABLE_OPERATOR (fndecl
))
14401 /* Avoid placement delete that's not been inlined. */
14402 tree fname
= DECL_ASSEMBLER_NAME (fndecl
);
14403 if (id_equal (fname
, "_ZdlPvS_") // ordinary form
14404 || id_equal (fname
, "_ZdaPvS_")) // array form
14409 /* TODO: Handle user-defined functions with attribute malloc? Handle
14410 known non-built-ins like fopen? */
14411 if (fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
14413 switch (DECL_FUNCTION_CODE (fndecl
))
14415 case BUILT_IN_FREE
:
14416 case BUILT_IN_REALLOC
:
14424 tree attrs
= DECL_ATTRIBUTES (fndecl
);
14428 for (tree atfree
= attrs
;
14429 (atfree
= lookup_attribute ("*dealloc", atfree
));
14430 atfree
= TREE_CHAIN (atfree
))
14432 tree alloc
= TREE_VALUE (atfree
);
14436 tree pos
= TREE_CHAIN (alloc
);
14440 pos
= TREE_VALUE (pos
);
14441 return TREE_INT_CST_LOW (pos
) - 1;
14447 /* If EXPR refers to a character array or pointer declared attribute
14448 nonstring, return a decl for that array or pointer and set *REF
14449 to the referenced enclosing object or pointer. Otherwise return
14453 get_attr_nonstring_decl (tree expr
, tree
*ref
)
14456 tree var
= NULL_TREE
;
14457 if (TREE_CODE (decl
) == SSA_NAME
)
14459 gimple
*def
= SSA_NAME_DEF_STMT (decl
);
14461 if (is_gimple_assign (def
))
14463 tree_code code
= gimple_assign_rhs_code (def
);
14464 if (code
== ADDR_EXPR
14465 || code
== COMPONENT_REF
14466 || code
== VAR_DECL
)
14467 decl
= gimple_assign_rhs1 (def
);
14470 var
= SSA_NAME_VAR (decl
);
14473 if (TREE_CODE (decl
) == ADDR_EXPR
)
14474 decl
= TREE_OPERAND (decl
, 0);
14476 /* To simplify calling code, store the referenced DECL regardless of
14477 the attribute determined below, but avoid storing the SSA_NAME_VAR
14478 obtained above (it's not useful for dataflow purposes). */
14482 /* Use the SSA_NAME_VAR that was determined above to see if it's
14483 declared nonstring. Otherwise drill down into the referenced
14487 else if (TREE_CODE (decl
) == ARRAY_REF
)
14488 decl
= TREE_OPERAND (decl
, 0);
14489 else if (TREE_CODE (decl
) == COMPONENT_REF
)
14490 decl
= TREE_OPERAND (decl
, 1);
14491 else if (TREE_CODE (decl
) == MEM_REF
)
14492 return get_attr_nonstring_decl (TREE_OPERAND (decl
, 0), ref
);
14495 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl
)))
14503 namespace selftest
{
14505 /* Selftests for tree. */
14507 /* Verify that integer constants are sane. */
14510 test_integer_constants ()
14512 ASSERT_TRUE (integer_type_node
!= NULL
);
14513 ASSERT_TRUE (build_int_cst (integer_type_node
, 0) != NULL
);
14515 tree type
= integer_type_node
;
14517 tree zero
= build_zero_cst (type
);
14518 ASSERT_EQ (INTEGER_CST
, TREE_CODE (zero
));
14519 ASSERT_EQ (type
, TREE_TYPE (zero
));
14521 tree one
= build_int_cst (type
, 1);
14522 ASSERT_EQ (INTEGER_CST
, TREE_CODE (one
));
14523 ASSERT_EQ (type
, TREE_TYPE (zero
));
14526 /* Verify identifiers. */
14529 test_identifiers ()
14531 tree identifier
= get_identifier ("foo");
14532 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier
));
14533 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier
));
14536 /* Verify LABEL_DECL. */
14541 tree identifier
= get_identifier ("err");
14542 tree label_decl
= build_decl (UNKNOWN_LOCATION
, LABEL_DECL
,
14543 identifier
, void_type_node
);
14544 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl
));
14545 ASSERT_FALSE (FORCED_LABEL (label_decl
));
14548 /* Return a new VECTOR_CST node whose type is TYPE and whose values
14549 are given by VALS. */
14552 build_vector (tree type
, const vec
<tree
> &vals MEM_STAT_DECL
)
14554 gcc_assert (known_eq (vals
.length (), TYPE_VECTOR_SUBPARTS (type
)));
14555 tree_vector_builder
builder (type
, vals
.length (), 1);
14556 builder
.splice (vals
);
14557 return builder
.build ();
14560 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
14563 check_vector_cst (const vec
<tree
> &expected
, tree actual
)
14565 ASSERT_KNOWN_EQ (expected
.length (),
14566 TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual
)));
14567 for (unsigned int i
= 0; i
< expected
.length (); ++i
)
14568 ASSERT_EQ (wi::to_wide (expected
[i
]),
14569 wi::to_wide (vector_cst_elt (actual
, i
)));
14572 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
14573 and that its elements match EXPECTED. */
14576 check_vector_cst_duplicate (const vec
<tree
> &expected
, tree actual
,
14577 unsigned int npatterns
)
14579 ASSERT_EQ (npatterns
, VECTOR_CST_NPATTERNS (actual
));
14580 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual
));
14581 ASSERT_EQ (npatterns
, vector_cst_encoded_nelts (actual
));
14582 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual
));
14583 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual
));
14584 check_vector_cst (expected
, actual
);
14587 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
14588 and NPATTERNS background elements, and that its elements match
14592 check_vector_cst_fill (const vec
<tree
> &expected
, tree actual
,
14593 unsigned int npatterns
)
14595 ASSERT_EQ (npatterns
, VECTOR_CST_NPATTERNS (actual
));
14596 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual
));
14597 ASSERT_EQ (2 * npatterns
, vector_cst_encoded_nelts (actual
));
14598 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual
));
14599 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual
));
14600 check_vector_cst (expected
, actual
);
14603 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
14604 and that its elements match EXPECTED. */
14607 check_vector_cst_stepped (const vec
<tree
> &expected
, tree actual
,
14608 unsigned int npatterns
)
14610 ASSERT_EQ (npatterns
, VECTOR_CST_NPATTERNS (actual
));
14611 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual
));
14612 ASSERT_EQ (3 * npatterns
, vector_cst_encoded_nelts (actual
));
14613 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual
));
14614 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual
));
14615 check_vector_cst (expected
, actual
);
14618 /* Test the creation of VECTOR_CSTs. */
14621 test_vector_cst_patterns (ALONE_CXX_MEM_STAT_INFO
)
14623 auto_vec
<tree
, 8> elements (8);
14624 elements
.quick_grow (8);
14625 tree element_type
= build_nonstandard_integer_type (16, true);
14626 tree vector_type
= build_vector_type (element_type
, 8);
14628 /* Test a simple linear series with a base of 0 and a step of 1:
14629 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
14630 for (unsigned int i
= 0; i
< 8; ++i
)
14631 elements
[i
] = build_int_cst (element_type
, i
);
14632 tree vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
14633 check_vector_cst_stepped (elements
, vector
, 1);
14635 /* Try the same with the first element replaced by 100:
14636 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
14637 elements
[0] = build_int_cst (element_type
, 100);
14638 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
14639 check_vector_cst_stepped (elements
, vector
, 1);
14641 /* Try a series that wraps around.
14642 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
14643 for (unsigned int i
= 1; i
< 8; ++i
)
14644 elements
[i
] = build_int_cst (element_type
, (65530 + i
) & 0xffff);
14645 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
14646 check_vector_cst_stepped (elements
, vector
, 1);
14648 /* Try a downward series:
14649 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
14650 for (unsigned int i
= 1; i
< 8; ++i
)
14651 elements
[i
] = build_int_cst (element_type
, 80 - i
);
14652 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
14653 check_vector_cst_stepped (elements
, vector
, 1);
14655 /* Try two interleaved series with different bases and steps:
14656 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
14657 elements
[1] = build_int_cst (element_type
, 53);
14658 for (unsigned int i
= 2; i
< 8; i
+= 2)
14660 elements
[i
] = build_int_cst (element_type
, 70 - i
* 2);
14661 elements
[i
+ 1] = build_int_cst (element_type
, 200 + i
* 3);
14663 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
14664 check_vector_cst_stepped (elements
, vector
, 2);
14666 /* Try a duplicated value:
14667 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
14668 for (unsigned int i
= 1; i
< 8; ++i
)
14669 elements
[i
] = elements
[0];
14670 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
14671 check_vector_cst_duplicate (elements
, vector
, 1);
14673 /* Try an interleaved duplicated value:
14674 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
14675 elements
[1] = build_int_cst (element_type
, 55);
14676 for (unsigned int i
= 2; i
< 8; ++i
)
14677 elements
[i
] = elements
[i
- 2];
14678 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
14679 check_vector_cst_duplicate (elements
, vector
, 2);
14681 /* Try a duplicated value with 2 exceptions
14682 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
14683 elements
[0] = build_int_cst (element_type
, 41);
14684 elements
[1] = build_int_cst (element_type
, 97);
14685 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
14686 check_vector_cst_fill (elements
, vector
, 2);
14688 /* Try with and without a step
14689 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
14690 for (unsigned int i
= 3; i
< 8; i
+= 2)
14691 elements
[i
] = build_int_cst (element_type
, i
* 7);
14692 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
14693 check_vector_cst_stepped (elements
, vector
, 2);
14695 /* Try a fully-general constant:
14696 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
14697 elements
[5] = build_int_cst (element_type
, 9990);
14698 vector
= build_vector (vector_type
, elements PASS_MEM_STAT
);
14699 check_vector_cst_fill (elements
, vector
, 4);
14702 /* Verify that STRIP_NOPS (NODE) is EXPECTED.
14703 Helper function for test_location_wrappers, to deal with STRIP_NOPS
14704 modifying its argument in-place. */
14707 check_strip_nops (tree node
, tree expected
)
14710 ASSERT_EQ (expected
, node
);
14713 /* Verify location wrappers. */
14716 test_location_wrappers ()
14718 location_t loc
= BUILTINS_LOCATION
;
14720 ASSERT_EQ (NULL_TREE
, maybe_wrap_with_location (NULL_TREE
, loc
));
14722 /* Wrapping a constant. */
14723 tree int_cst
= build_int_cst (integer_type_node
, 42);
14724 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_cst
));
14725 ASSERT_FALSE (location_wrapper_p (int_cst
));
14727 tree wrapped_int_cst
= maybe_wrap_with_location (int_cst
, loc
);
14728 ASSERT_TRUE (location_wrapper_p (wrapped_int_cst
));
14729 ASSERT_EQ (loc
, EXPR_LOCATION (wrapped_int_cst
));
14730 ASSERT_EQ (int_cst
, tree_strip_any_location_wrapper (wrapped_int_cst
));
14732 /* We shouldn't add wrapper nodes for UNKNOWN_LOCATION. */
14733 ASSERT_EQ (int_cst
, maybe_wrap_with_location (int_cst
, UNKNOWN_LOCATION
));
14735 /* We shouldn't add wrapper nodes for nodes that CAN_HAVE_LOCATION_P. */
14736 tree cast
= build1 (NOP_EXPR
, char_type_node
, int_cst
);
14737 ASSERT_TRUE (CAN_HAVE_LOCATION_P (cast
));
14738 ASSERT_EQ (cast
, maybe_wrap_with_location (cast
, loc
));
14740 /* Wrapping a STRING_CST. */
14741 tree string_cst
= build_string (4, "foo");
14742 ASSERT_FALSE (CAN_HAVE_LOCATION_P (string_cst
));
14743 ASSERT_FALSE (location_wrapper_p (string_cst
));
14745 tree wrapped_string_cst
= maybe_wrap_with_location (string_cst
, loc
);
14746 ASSERT_TRUE (location_wrapper_p (wrapped_string_cst
));
14747 ASSERT_EQ (VIEW_CONVERT_EXPR
, TREE_CODE (wrapped_string_cst
));
14748 ASSERT_EQ (loc
, EXPR_LOCATION (wrapped_string_cst
));
14749 ASSERT_EQ (string_cst
, tree_strip_any_location_wrapper (wrapped_string_cst
));
14752 /* Wrapping a variable. */
14753 tree int_var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
,
14754 get_identifier ("some_int_var"),
14755 integer_type_node
);
14756 ASSERT_FALSE (CAN_HAVE_LOCATION_P (int_var
));
14757 ASSERT_FALSE (location_wrapper_p (int_var
));
14759 tree wrapped_int_var
= maybe_wrap_with_location (int_var
, loc
);
14760 ASSERT_TRUE (location_wrapper_p (wrapped_int_var
));
14761 ASSERT_EQ (loc
, EXPR_LOCATION (wrapped_int_var
));
14762 ASSERT_EQ (int_var
, tree_strip_any_location_wrapper (wrapped_int_var
));
14764 /* Verify that "reinterpret_cast<int>(some_int_var)" is not a location
14766 tree r_cast
= build1 (NON_LVALUE_EXPR
, integer_type_node
, int_var
);
14767 ASSERT_FALSE (location_wrapper_p (r_cast
));
14768 ASSERT_EQ (r_cast
, tree_strip_any_location_wrapper (r_cast
));
14770 /* Verify that STRIP_NOPS removes wrappers. */
14771 check_strip_nops (wrapped_int_cst
, int_cst
);
14772 check_strip_nops (wrapped_string_cst
, string_cst
);
14773 check_strip_nops (wrapped_int_var
, int_var
);
14776 /* Test various tree predicates. Verify that location wrappers don't
14777 affect the results. */
14782 /* Build various constants and wrappers around them. */
14784 location_t loc
= BUILTINS_LOCATION
;
14786 tree i_0
= build_int_cst (integer_type_node
, 0);
14787 tree wr_i_0
= maybe_wrap_with_location (i_0
, loc
);
14789 tree i_1
= build_int_cst (integer_type_node
, 1);
14790 tree wr_i_1
= maybe_wrap_with_location (i_1
, loc
);
14792 tree i_m1
= build_int_cst (integer_type_node
, -1);
14793 tree wr_i_m1
= maybe_wrap_with_location (i_m1
, loc
);
14795 tree f_0
= build_real_from_int_cst (float_type_node
, i_0
);
14796 tree wr_f_0
= maybe_wrap_with_location (f_0
, loc
);
14797 tree f_1
= build_real_from_int_cst (float_type_node
, i_1
);
14798 tree wr_f_1
= maybe_wrap_with_location (f_1
, loc
);
14799 tree f_m1
= build_real_from_int_cst (float_type_node
, i_m1
);
14800 tree wr_f_m1
= maybe_wrap_with_location (f_m1
, loc
);
14802 tree c_i_0
= build_complex (NULL_TREE
, i_0
, i_0
);
14803 tree c_i_1
= build_complex (NULL_TREE
, i_1
, i_0
);
14804 tree c_i_m1
= build_complex (NULL_TREE
, i_m1
, i_0
);
14806 tree c_f_0
= build_complex (NULL_TREE
, f_0
, f_0
);
14807 tree c_f_1
= build_complex (NULL_TREE
, f_1
, f_0
);
14808 tree c_f_m1
= build_complex (NULL_TREE
, f_m1
, f_0
);
14810 /* TODO: vector constants. */
14812 /* Test integer_onep. */
14813 ASSERT_FALSE (integer_onep (i_0
));
14814 ASSERT_FALSE (integer_onep (wr_i_0
));
14815 ASSERT_TRUE (integer_onep (i_1
));
14816 ASSERT_TRUE (integer_onep (wr_i_1
));
14817 ASSERT_FALSE (integer_onep (i_m1
));
14818 ASSERT_FALSE (integer_onep (wr_i_m1
));
14819 ASSERT_FALSE (integer_onep (f_0
));
14820 ASSERT_FALSE (integer_onep (wr_f_0
));
14821 ASSERT_FALSE (integer_onep (f_1
));
14822 ASSERT_FALSE (integer_onep (wr_f_1
));
14823 ASSERT_FALSE (integer_onep (f_m1
));
14824 ASSERT_FALSE (integer_onep (wr_f_m1
));
14825 ASSERT_FALSE (integer_onep (c_i_0
));
14826 ASSERT_TRUE (integer_onep (c_i_1
));
14827 ASSERT_FALSE (integer_onep (c_i_m1
));
14828 ASSERT_FALSE (integer_onep (c_f_0
));
14829 ASSERT_FALSE (integer_onep (c_f_1
));
14830 ASSERT_FALSE (integer_onep (c_f_m1
));
14832 /* Test integer_zerop. */
14833 ASSERT_TRUE (integer_zerop (i_0
));
14834 ASSERT_TRUE (integer_zerop (wr_i_0
));
14835 ASSERT_FALSE (integer_zerop (i_1
));
14836 ASSERT_FALSE (integer_zerop (wr_i_1
));
14837 ASSERT_FALSE (integer_zerop (i_m1
));
14838 ASSERT_FALSE (integer_zerop (wr_i_m1
));
14839 ASSERT_FALSE (integer_zerop (f_0
));
14840 ASSERT_FALSE (integer_zerop (wr_f_0
));
14841 ASSERT_FALSE (integer_zerop (f_1
));
14842 ASSERT_FALSE (integer_zerop (wr_f_1
));
14843 ASSERT_FALSE (integer_zerop (f_m1
));
14844 ASSERT_FALSE (integer_zerop (wr_f_m1
));
14845 ASSERT_TRUE (integer_zerop (c_i_0
));
14846 ASSERT_FALSE (integer_zerop (c_i_1
));
14847 ASSERT_FALSE (integer_zerop (c_i_m1
));
14848 ASSERT_FALSE (integer_zerop (c_f_0
));
14849 ASSERT_FALSE (integer_zerop (c_f_1
));
14850 ASSERT_FALSE (integer_zerop (c_f_m1
));
14852 /* Test integer_all_onesp. */
14853 ASSERT_FALSE (integer_all_onesp (i_0
));
14854 ASSERT_FALSE (integer_all_onesp (wr_i_0
));
14855 ASSERT_FALSE (integer_all_onesp (i_1
));
14856 ASSERT_FALSE (integer_all_onesp (wr_i_1
));
14857 ASSERT_TRUE (integer_all_onesp (i_m1
));
14858 ASSERT_TRUE (integer_all_onesp (wr_i_m1
));
14859 ASSERT_FALSE (integer_all_onesp (f_0
));
14860 ASSERT_FALSE (integer_all_onesp (wr_f_0
));
14861 ASSERT_FALSE (integer_all_onesp (f_1
));
14862 ASSERT_FALSE (integer_all_onesp (wr_f_1
));
14863 ASSERT_FALSE (integer_all_onesp (f_m1
));
14864 ASSERT_FALSE (integer_all_onesp (wr_f_m1
));
14865 ASSERT_FALSE (integer_all_onesp (c_i_0
));
14866 ASSERT_FALSE (integer_all_onesp (c_i_1
));
14867 ASSERT_FALSE (integer_all_onesp (c_i_m1
));
14868 ASSERT_FALSE (integer_all_onesp (c_f_0
));
14869 ASSERT_FALSE (integer_all_onesp (c_f_1
));
14870 ASSERT_FALSE (integer_all_onesp (c_f_m1
));
14872 /* Test integer_minus_onep. */
14873 ASSERT_FALSE (integer_minus_onep (i_0
));
14874 ASSERT_FALSE (integer_minus_onep (wr_i_0
));
14875 ASSERT_FALSE (integer_minus_onep (i_1
));
14876 ASSERT_FALSE (integer_minus_onep (wr_i_1
));
14877 ASSERT_TRUE (integer_minus_onep (i_m1
));
14878 ASSERT_TRUE (integer_minus_onep (wr_i_m1
));
14879 ASSERT_FALSE (integer_minus_onep (f_0
));
14880 ASSERT_FALSE (integer_minus_onep (wr_f_0
));
14881 ASSERT_FALSE (integer_minus_onep (f_1
));
14882 ASSERT_FALSE (integer_minus_onep (wr_f_1
));
14883 ASSERT_FALSE (integer_minus_onep (f_m1
));
14884 ASSERT_FALSE (integer_minus_onep (wr_f_m1
));
14885 ASSERT_FALSE (integer_minus_onep (c_i_0
));
14886 ASSERT_FALSE (integer_minus_onep (c_i_1
));
14887 ASSERT_TRUE (integer_minus_onep (c_i_m1
));
14888 ASSERT_FALSE (integer_minus_onep (c_f_0
));
14889 ASSERT_FALSE (integer_minus_onep (c_f_1
));
14890 ASSERT_FALSE (integer_minus_onep (c_f_m1
));
14892 /* Test integer_each_onep. */
14893 ASSERT_FALSE (integer_each_onep (i_0
));
14894 ASSERT_FALSE (integer_each_onep (wr_i_0
));
14895 ASSERT_TRUE (integer_each_onep (i_1
));
14896 ASSERT_TRUE (integer_each_onep (wr_i_1
));
14897 ASSERT_FALSE (integer_each_onep (i_m1
));
14898 ASSERT_FALSE (integer_each_onep (wr_i_m1
));
14899 ASSERT_FALSE (integer_each_onep (f_0
));
14900 ASSERT_FALSE (integer_each_onep (wr_f_0
));
14901 ASSERT_FALSE (integer_each_onep (f_1
));
14902 ASSERT_FALSE (integer_each_onep (wr_f_1
));
14903 ASSERT_FALSE (integer_each_onep (f_m1
));
14904 ASSERT_FALSE (integer_each_onep (wr_f_m1
));
14905 ASSERT_FALSE (integer_each_onep (c_i_0
));
14906 ASSERT_FALSE (integer_each_onep (c_i_1
));
14907 ASSERT_FALSE (integer_each_onep (c_i_m1
));
14908 ASSERT_FALSE (integer_each_onep (c_f_0
));
14909 ASSERT_FALSE (integer_each_onep (c_f_1
));
14910 ASSERT_FALSE (integer_each_onep (c_f_m1
));
14912 /* Test integer_truep. */
14913 ASSERT_FALSE (integer_truep (i_0
));
14914 ASSERT_FALSE (integer_truep (wr_i_0
));
14915 ASSERT_TRUE (integer_truep (i_1
));
14916 ASSERT_TRUE (integer_truep (wr_i_1
));
14917 ASSERT_FALSE (integer_truep (i_m1
));
14918 ASSERT_FALSE (integer_truep (wr_i_m1
));
14919 ASSERT_FALSE (integer_truep (f_0
));
14920 ASSERT_FALSE (integer_truep (wr_f_0
));
14921 ASSERT_FALSE (integer_truep (f_1
));
14922 ASSERT_FALSE (integer_truep (wr_f_1
));
14923 ASSERT_FALSE (integer_truep (f_m1
));
14924 ASSERT_FALSE (integer_truep (wr_f_m1
));
14925 ASSERT_FALSE (integer_truep (c_i_0
));
14926 ASSERT_TRUE (integer_truep (c_i_1
));
14927 ASSERT_FALSE (integer_truep (c_i_m1
));
14928 ASSERT_FALSE (integer_truep (c_f_0
));
14929 ASSERT_FALSE (integer_truep (c_f_1
));
14930 ASSERT_FALSE (integer_truep (c_f_m1
));
14932 /* Test integer_nonzerop. */
14933 ASSERT_FALSE (integer_nonzerop (i_0
));
14934 ASSERT_FALSE (integer_nonzerop (wr_i_0
));
14935 ASSERT_TRUE (integer_nonzerop (i_1
));
14936 ASSERT_TRUE (integer_nonzerop (wr_i_1
));
14937 ASSERT_TRUE (integer_nonzerop (i_m1
));
14938 ASSERT_TRUE (integer_nonzerop (wr_i_m1
));
14939 ASSERT_FALSE (integer_nonzerop (f_0
));
14940 ASSERT_FALSE (integer_nonzerop (wr_f_0
));
14941 ASSERT_FALSE (integer_nonzerop (f_1
));
14942 ASSERT_FALSE (integer_nonzerop (wr_f_1
));
14943 ASSERT_FALSE (integer_nonzerop (f_m1
));
14944 ASSERT_FALSE (integer_nonzerop (wr_f_m1
));
14945 ASSERT_FALSE (integer_nonzerop (c_i_0
));
14946 ASSERT_TRUE (integer_nonzerop (c_i_1
));
14947 ASSERT_TRUE (integer_nonzerop (c_i_m1
));
14948 ASSERT_FALSE (integer_nonzerop (c_f_0
));
14949 ASSERT_FALSE (integer_nonzerop (c_f_1
));
14950 ASSERT_FALSE (integer_nonzerop (c_f_m1
));
14952 /* Test real_zerop. */
14953 ASSERT_FALSE (real_zerop (i_0
));
14954 ASSERT_FALSE (real_zerop (wr_i_0
));
14955 ASSERT_FALSE (real_zerop (i_1
));
14956 ASSERT_FALSE (real_zerop (wr_i_1
));
14957 ASSERT_FALSE (real_zerop (i_m1
));
14958 ASSERT_FALSE (real_zerop (wr_i_m1
));
14959 ASSERT_TRUE (real_zerop (f_0
));
14960 ASSERT_TRUE (real_zerop (wr_f_0
));
14961 ASSERT_FALSE (real_zerop (f_1
));
14962 ASSERT_FALSE (real_zerop (wr_f_1
));
14963 ASSERT_FALSE (real_zerop (f_m1
));
14964 ASSERT_FALSE (real_zerop (wr_f_m1
));
14965 ASSERT_FALSE (real_zerop (c_i_0
));
14966 ASSERT_FALSE (real_zerop (c_i_1
));
14967 ASSERT_FALSE (real_zerop (c_i_m1
));
14968 ASSERT_TRUE (real_zerop (c_f_0
));
14969 ASSERT_FALSE (real_zerop (c_f_1
));
14970 ASSERT_FALSE (real_zerop (c_f_m1
));
14972 /* Test real_onep. */
14973 ASSERT_FALSE (real_onep (i_0
));
14974 ASSERT_FALSE (real_onep (wr_i_0
));
14975 ASSERT_FALSE (real_onep (i_1
));
14976 ASSERT_FALSE (real_onep (wr_i_1
));
14977 ASSERT_FALSE (real_onep (i_m1
));
14978 ASSERT_FALSE (real_onep (wr_i_m1
));
14979 ASSERT_FALSE (real_onep (f_0
));
14980 ASSERT_FALSE (real_onep (wr_f_0
));
14981 ASSERT_TRUE (real_onep (f_1
));
14982 ASSERT_TRUE (real_onep (wr_f_1
));
14983 ASSERT_FALSE (real_onep (f_m1
));
14984 ASSERT_FALSE (real_onep (wr_f_m1
));
14985 ASSERT_FALSE (real_onep (c_i_0
));
14986 ASSERT_FALSE (real_onep (c_i_1
));
14987 ASSERT_FALSE (real_onep (c_i_m1
));
14988 ASSERT_FALSE (real_onep (c_f_0
));
14989 ASSERT_TRUE (real_onep (c_f_1
));
14990 ASSERT_FALSE (real_onep (c_f_m1
));
14992 /* Test real_minus_onep. */
14993 ASSERT_FALSE (real_minus_onep (i_0
));
14994 ASSERT_FALSE (real_minus_onep (wr_i_0
));
14995 ASSERT_FALSE (real_minus_onep (i_1
));
14996 ASSERT_FALSE (real_minus_onep (wr_i_1
));
14997 ASSERT_FALSE (real_minus_onep (i_m1
));
14998 ASSERT_FALSE (real_minus_onep (wr_i_m1
));
14999 ASSERT_FALSE (real_minus_onep (f_0
));
15000 ASSERT_FALSE (real_minus_onep (wr_f_0
));
15001 ASSERT_FALSE (real_minus_onep (f_1
));
15002 ASSERT_FALSE (real_minus_onep (wr_f_1
));
15003 ASSERT_TRUE (real_minus_onep (f_m1
));
15004 ASSERT_TRUE (real_minus_onep (wr_f_m1
));
15005 ASSERT_FALSE (real_minus_onep (c_i_0
));
15006 ASSERT_FALSE (real_minus_onep (c_i_1
));
15007 ASSERT_FALSE (real_minus_onep (c_i_m1
));
15008 ASSERT_FALSE (real_minus_onep (c_f_0
));
15009 ASSERT_FALSE (real_minus_onep (c_f_1
));
15010 ASSERT_TRUE (real_minus_onep (c_f_m1
));
15013 ASSERT_TRUE (zerop (i_0
));
15014 ASSERT_TRUE (zerop (wr_i_0
));
15015 ASSERT_FALSE (zerop (i_1
));
15016 ASSERT_FALSE (zerop (wr_i_1
));
15017 ASSERT_FALSE (zerop (i_m1
));
15018 ASSERT_FALSE (zerop (wr_i_m1
));
15019 ASSERT_TRUE (zerop (f_0
));
15020 ASSERT_TRUE (zerop (wr_f_0
));
15021 ASSERT_FALSE (zerop (f_1
));
15022 ASSERT_FALSE (zerop (wr_f_1
));
15023 ASSERT_FALSE (zerop (f_m1
));
15024 ASSERT_FALSE (zerop (wr_f_m1
));
15025 ASSERT_TRUE (zerop (c_i_0
));
15026 ASSERT_FALSE (zerop (c_i_1
));
15027 ASSERT_FALSE (zerop (c_i_m1
));
15028 ASSERT_TRUE (zerop (c_f_0
));
15029 ASSERT_FALSE (zerop (c_f_1
));
15030 ASSERT_FALSE (zerop (c_f_m1
));
15032 /* Test tree_expr_nonnegative_p. */
15033 ASSERT_TRUE (tree_expr_nonnegative_p (i_0
));
15034 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_0
));
15035 ASSERT_TRUE (tree_expr_nonnegative_p (i_1
));
15036 ASSERT_TRUE (tree_expr_nonnegative_p (wr_i_1
));
15037 ASSERT_FALSE (tree_expr_nonnegative_p (i_m1
));
15038 ASSERT_FALSE (tree_expr_nonnegative_p (wr_i_m1
));
15039 ASSERT_TRUE (tree_expr_nonnegative_p (f_0
));
15040 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_0
));
15041 ASSERT_TRUE (tree_expr_nonnegative_p (f_1
));
15042 ASSERT_TRUE (tree_expr_nonnegative_p (wr_f_1
));
15043 ASSERT_FALSE (tree_expr_nonnegative_p (f_m1
));
15044 ASSERT_FALSE (tree_expr_nonnegative_p (wr_f_m1
));
15045 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_0
));
15046 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_1
));
15047 ASSERT_FALSE (tree_expr_nonnegative_p (c_i_m1
));
15048 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_0
));
15049 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_1
));
15050 ASSERT_FALSE (tree_expr_nonnegative_p (c_f_m1
));
15052 /* Test tree_expr_nonzero_p. */
15053 ASSERT_FALSE (tree_expr_nonzero_p (i_0
));
15054 ASSERT_FALSE (tree_expr_nonzero_p (wr_i_0
));
15055 ASSERT_TRUE (tree_expr_nonzero_p (i_1
));
15056 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_1
));
15057 ASSERT_TRUE (tree_expr_nonzero_p (i_m1
));
15058 ASSERT_TRUE (tree_expr_nonzero_p (wr_i_m1
));
15060 /* Test integer_valued_real_p. */
15061 ASSERT_FALSE (integer_valued_real_p (i_0
));
15062 ASSERT_TRUE (integer_valued_real_p (f_0
));
15063 ASSERT_TRUE (integer_valued_real_p (wr_f_0
));
15064 ASSERT_TRUE (integer_valued_real_p (f_1
));
15065 ASSERT_TRUE (integer_valued_real_p (wr_f_1
));
15067 /* Test integer_pow2p. */
15068 ASSERT_FALSE (integer_pow2p (i_0
));
15069 ASSERT_TRUE (integer_pow2p (i_1
));
15070 ASSERT_TRUE (integer_pow2p (wr_i_1
));
15072 /* Test uniform_integer_cst_p. */
15073 ASSERT_TRUE (uniform_integer_cst_p (i_0
));
15074 ASSERT_TRUE (uniform_integer_cst_p (wr_i_0
));
15075 ASSERT_TRUE (uniform_integer_cst_p (i_1
));
15076 ASSERT_TRUE (uniform_integer_cst_p (wr_i_1
));
15077 ASSERT_TRUE (uniform_integer_cst_p (i_m1
));
15078 ASSERT_TRUE (uniform_integer_cst_p (wr_i_m1
));
15079 ASSERT_FALSE (uniform_integer_cst_p (f_0
));
15080 ASSERT_FALSE (uniform_integer_cst_p (wr_f_0
));
15081 ASSERT_FALSE (uniform_integer_cst_p (f_1
));
15082 ASSERT_FALSE (uniform_integer_cst_p (wr_f_1
));
15083 ASSERT_FALSE (uniform_integer_cst_p (f_m1
));
15084 ASSERT_FALSE (uniform_integer_cst_p (wr_f_m1
));
15085 ASSERT_FALSE (uniform_integer_cst_p (c_i_0
));
15086 ASSERT_FALSE (uniform_integer_cst_p (c_i_1
));
15087 ASSERT_FALSE (uniform_integer_cst_p (c_i_m1
));
15088 ASSERT_FALSE (uniform_integer_cst_p (c_f_0
));
15089 ASSERT_FALSE (uniform_integer_cst_p (c_f_1
));
15090 ASSERT_FALSE (uniform_integer_cst_p (c_f_m1
));
15093 /* Check that string escaping works correctly. */
15096 test_escaped_strings (void)
15099 escaped_string msg
;
15102 /* ASSERT_STREQ does not accept NULL as a valid test
15103 result, so we have to use ASSERT_EQ instead. */
15104 ASSERT_EQ (NULL
, (const char *) msg
);
15107 ASSERT_STREQ ("", (const char *) msg
);
15109 msg
.escape ("foobar");
15110 ASSERT_STREQ ("foobar", (const char *) msg
);
15112 /* Ensure that we have -fmessage-length set to 0. */
15113 saved_cutoff
= pp_line_cutoff (global_dc
->printer
);
15114 pp_line_cutoff (global_dc
->printer
) = 0;
15116 msg
.escape ("foo\nbar");
15117 ASSERT_STREQ ("foo\\nbar", (const char *) msg
);
15119 msg
.escape ("\a\b\f\n\r\t\v");
15120 ASSERT_STREQ ("\\a\\b\\f\\n\\r\\t\\v", (const char *) msg
);
15122 /* Now repeat the tests with -fmessage-length set to 5. */
15123 pp_line_cutoff (global_dc
->printer
) = 5;
15125 /* Note that the newline is not translated into an escape. */
15126 msg
.escape ("foo\nbar");
15127 ASSERT_STREQ ("foo\nbar", (const char *) msg
);
15129 msg
.escape ("\a\b\f\n\r\t\v");
15130 ASSERT_STREQ ("\\a\\b\\f\n\\r\\t\\v", (const char *) msg
);
15132 /* Restore the original message length setting. */
15133 pp_line_cutoff (global_dc
->printer
) = saved_cutoff
;
15136 /* Run all of the selftests within this file. */
15141 test_integer_constants ();
15142 test_identifiers ();
15144 test_vector_cst_patterns ();
15145 test_location_wrappers ();
15146 test_predicates ();
15147 test_escaped_strings ();
15150 } // namespace selftest
15152 #endif /* CHECKING_P */
15154 #include "gt-tree.h"