1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
32 #include "coretypes.h"
37 #include "tree-pass.h"
40 #include "diagnostic.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
47 #include "toplev.h" /* get_random_seed */
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
58 #include "langhooks-def.h"
59 #include "tree-diagnostic.h"
62 #include "print-tree.h"
63 #include "ipa-utils.h"
65 #include "stringpool.h"
69 #include "tree-vector-builder.h"
71 /* Tree code classes. */
73 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
74 #define END_OF_BASE_TREE_CODES tcc_exceptional,
76 const enum tree_code_class tree_code_type
[] = {
77 #include "all-tree.def"
81 #undef END_OF_BASE_TREE_CODES
83 /* Table indexed by tree code giving number of expression
84 operands beyond the fixed part of the node structure.
85 Not used for types or decls. */
87 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
88 #define END_OF_BASE_TREE_CODES 0,
90 const unsigned char tree_code_length
[] = {
91 #include "all-tree.def"
95 #undef END_OF_BASE_TREE_CODES
97 /* Names of tree components.
98 Used for printing out the tree and error messages. */
99 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
100 #define END_OF_BASE_TREE_CODES "@dummy",
102 static const char *const tree_code_name
[] = {
103 #include "all-tree.def"
107 #undef END_OF_BASE_TREE_CODES
109 /* Each tree code class has an associated string representation.
110 These must correspond to the tree_code_class entries. */
112 const char *const tree_code_class_strings
[] =
127 /* obstack.[ch] explicitly declined to prototype this. */
128 extern int _obstack_allocated_p (struct obstack
*h
, void *obj
);
130 /* Statistics-gathering stuff. */
132 static int tree_code_counts
[MAX_TREE_CODES
];
133 int tree_node_counts
[(int) all_kinds
];
134 int tree_node_sizes
[(int) all_kinds
];
136 /* Keep in sync with tree.h:enum tree_node_kind. */
137 static const char * const tree_node_kind_names
[] = {
156 /* Unique id for next decl created. */
157 static GTY(()) int next_decl_uid
;
158 /* Unique id for next type created. */
159 static GTY(()) unsigned next_type_uid
= 1;
160 /* Unique id for next debug decl created. Use negative numbers,
161 to catch erroneous uses. */
162 static GTY(()) int next_debug_decl_uid
;
164 /* Since we cannot rehash a type after it is in the table, we have to
165 keep the hash code. */
167 struct GTY((for_user
)) type_hash
{
172 /* Initial size of the hash table (rounded to next prime). */
173 #define TYPE_HASH_INITIAL_SIZE 1000
175 struct type_cache_hasher
: ggc_cache_ptr_hash
<type_hash
>
177 static hashval_t
hash (type_hash
*t
) { return t
->hash
; }
178 static bool equal (type_hash
*a
, type_hash
*b
);
181 keep_cache_entry (type_hash
*&t
)
183 return ggc_marked_p (t
->type
);
187 /* Now here is the hash table. When recording a type, it is added to
188 the slot whose index is the hash code. Note that the hash table is
189 used for several kinds of types (function types, array types and
190 array index range types, for now). While all these live in the
191 same table, they are completely independent, and the hash code is
192 computed differently for each of these. */
194 static GTY ((cache
)) hash_table
<type_cache_hasher
> *type_hash_table
;
196 /* Hash table and temporary node for larger integer const values. */
197 static GTY (()) tree int_cst_node
;
199 struct int_cst_hasher
: ggc_cache_ptr_hash
<tree_node
>
201 static hashval_t
hash (tree t
);
202 static bool equal (tree x
, tree y
);
205 static GTY ((cache
)) hash_table
<int_cst_hasher
> *int_cst_hash_table
;
207 /* Hash table for optimization flags and target option flags. Use the same
208 hash table for both sets of options. Nodes for building the current
209 optimization and target option nodes. The assumption is most of the time
210 the options created will already be in the hash table, so we avoid
211 allocating and freeing up a node repeatably. */
212 static GTY (()) tree cl_optimization_node
;
213 static GTY (()) tree cl_target_option_node
;
215 struct cl_option_hasher
: ggc_cache_ptr_hash
<tree_node
>
217 static hashval_t
hash (tree t
);
218 static bool equal (tree x
, tree y
);
221 static GTY ((cache
)) hash_table
<cl_option_hasher
> *cl_option_hash_table
;
223 /* General tree->tree mapping structure for use in hash tables. */
227 hash_table
<tree_decl_map_cache_hasher
> *debug_expr_for_decl
;
230 hash_table
<tree_decl_map_cache_hasher
> *value_expr_for_decl
;
232 struct tree_vec_map_cache_hasher
: ggc_cache_ptr_hash
<tree_vec_map
>
234 static hashval_t
hash (tree_vec_map
*m
) { return DECL_UID (m
->base
.from
); }
237 equal (tree_vec_map
*a
, tree_vec_map
*b
)
239 return a
->base
.from
== b
->base
.from
;
243 keep_cache_entry (tree_vec_map
*&m
)
245 return ggc_marked_p (m
->base
.from
);
250 hash_table
<tree_vec_map_cache_hasher
> *debug_args_for_decl
;
252 static void set_type_quals (tree
, int);
253 static void print_type_hash_statistics (void);
254 static void print_debug_expr_statistics (void);
255 static void print_value_expr_statistics (void);
257 tree global_trees
[TI_MAX
];
258 tree integer_types
[itk_none
];
260 bool int_n_enabled_p
[NUM_INT_N_ENTS
];
261 struct int_n_trees_t int_n_trees
[NUM_INT_N_ENTS
];
263 bool tree_contains_struct
[MAX_TREE_CODES
][64];
265 /* Number of operands for each OpenMP clause. */
266 unsigned const char omp_clause_num_ops
[] =
268 0, /* OMP_CLAUSE_ERROR */
269 1, /* OMP_CLAUSE_PRIVATE */
270 1, /* OMP_CLAUSE_SHARED */
271 1, /* OMP_CLAUSE_FIRSTPRIVATE */
272 2, /* OMP_CLAUSE_LASTPRIVATE */
273 5, /* OMP_CLAUSE_REDUCTION */
274 1, /* OMP_CLAUSE_COPYIN */
275 1, /* OMP_CLAUSE_COPYPRIVATE */
276 3, /* OMP_CLAUSE_LINEAR */
277 2, /* OMP_CLAUSE_ALIGNED */
278 1, /* OMP_CLAUSE_DEPEND */
279 1, /* OMP_CLAUSE_UNIFORM */
280 1, /* OMP_CLAUSE_TO_DECLARE */
281 1, /* OMP_CLAUSE_LINK */
282 2, /* OMP_CLAUSE_FROM */
283 2, /* OMP_CLAUSE_TO */
284 2, /* OMP_CLAUSE_MAP */
285 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
286 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
287 2, /* OMP_CLAUSE__CACHE_ */
288 2, /* OMP_CLAUSE_GANG */
289 1, /* OMP_CLAUSE_ASYNC */
290 1, /* OMP_CLAUSE_WAIT */
291 0, /* OMP_CLAUSE_AUTO */
292 0, /* OMP_CLAUSE_SEQ */
293 1, /* OMP_CLAUSE__LOOPTEMP_ */
294 1, /* OMP_CLAUSE_IF */
295 1, /* OMP_CLAUSE_NUM_THREADS */
296 1, /* OMP_CLAUSE_SCHEDULE */
297 0, /* OMP_CLAUSE_NOWAIT */
298 1, /* OMP_CLAUSE_ORDERED */
299 0, /* OMP_CLAUSE_DEFAULT */
300 3, /* OMP_CLAUSE_COLLAPSE */
301 0, /* OMP_CLAUSE_UNTIED */
302 1, /* OMP_CLAUSE_FINAL */
303 0, /* OMP_CLAUSE_MERGEABLE */
304 1, /* OMP_CLAUSE_DEVICE */
305 1, /* OMP_CLAUSE_DIST_SCHEDULE */
306 0, /* OMP_CLAUSE_INBRANCH */
307 0, /* OMP_CLAUSE_NOTINBRANCH */
308 1, /* OMP_CLAUSE_NUM_TEAMS */
309 1, /* OMP_CLAUSE_THREAD_LIMIT */
310 0, /* OMP_CLAUSE_PROC_BIND */
311 1, /* OMP_CLAUSE_SAFELEN */
312 1, /* OMP_CLAUSE_SIMDLEN */
313 0, /* OMP_CLAUSE_FOR */
314 0, /* OMP_CLAUSE_PARALLEL */
315 0, /* OMP_CLAUSE_SECTIONS */
316 0, /* OMP_CLAUSE_TASKGROUP */
317 1, /* OMP_CLAUSE_PRIORITY */
318 1, /* OMP_CLAUSE_GRAINSIZE */
319 1, /* OMP_CLAUSE_NUM_TASKS */
320 0, /* OMP_CLAUSE_NOGROUP */
321 0, /* OMP_CLAUSE_THREADS */
322 0, /* OMP_CLAUSE_SIMD */
323 1, /* OMP_CLAUSE_HINT */
324 0, /* OMP_CLAUSE_DEFALTMAP */
325 1, /* OMP_CLAUSE__SIMDUID_ */
326 0, /* OMP_CLAUSE__SIMT_ */
327 0, /* OMP_CLAUSE_INDEPENDENT */
328 1, /* OMP_CLAUSE_WORKER */
329 1, /* OMP_CLAUSE_VECTOR */
330 1, /* OMP_CLAUSE_NUM_GANGS */
331 1, /* OMP_CLAUSE_NUM_WORKERS */
332 1, /* OMP_CLAUSE_VECTOR_LENGTH */
333 3, /* OMP_CLAUSE_TILE */
334 2, /* OMP_CLAUSE__GRIDDIM_ */
337 const char * const omp_clause_code_name
[] =
409 /* Return the tree node structure used by tree code CODE. */
411 static inline enum tree_node_structure_enum
412 tree_node_structure_for_code (enum tree_code code
)
414 switch (TREE_CODE_CLASS (code
))
416 case tcc_declaration
:
421 return TS_FIELD_DECL
;
427 return TS_LABEL_DECL
;
429 return TS_RESULT_DECL
;
430 case DEBUG_EXPR_DECL
:
433 return TS_CONST_DECL
;
437 return TS_FUNCTION_DECL
;
438 case TRANSLATION_UNIT_DECL
:
439 return TS_TRANSLATION_UNIT_DECL
;
441 return TS_DECL_NON_COMMON
;
445 return TS_TYPE_NON_COMMON
;
454 default: /* tcc_constant and tcc_exceptional */
459 /* tcc_constant cases. */
460 case VOID_CST
: return TS_TYPED
;
461 case INTEGER_CST
: return TS_INT_CST
;
462 case REAL_CST
: return TS_REAL_CST
;
463 case FIXED_CST
: return TS_FIXED_CST
;
464 case COMPLEX_CST
: return TS_COMPLEX
;
465 case VECTOR_CST
: return TS_VECTOR
;
466 case STRING_CST
: return TS_STRING
;
467 /* tcc_exceptional cases. */
468 case ERROR_MARK
: return TS_COMMON
;
469 case IDENTIFIER_NODE
: return TS_IDENTIFIER
;
470 case TREE_LIST
: return TS_LIST
;
471 case TREE_VEC
: return TS_VEC
;
472 case SSA_NAME
: return TS_SSA_NAME
;
473 case PLACEHOLDER_EXPR
: return TS_COMMON
;
474 case STATEMENT_LIST
: return TS_STATEMENT_LIST
;
475 case BLOCK
: return TS_BLOCK
;
476 case CONSTRUCTOR
: return TS_CONSTRUCTOR
;
477 case TREE_BINFO
: return TS_BINFO
;
478 case OMP_CLAUSE
: return TS_OMP_CLAUSE
;
479 case OPTIMIZATION_NODE
: return TS_OPTIMIZATION
;
480 case TARGET_OPTION_NODE
: return TS_TARGET_OPTION
;
488 /* Initialize tree_contains_struct to describe the hierarchy of tree
492 initialize_tree_contains_struct (void)
496 for (i
= ERROR_MARK
; i
< LAST_AND_UNUSED_TREE_CODE
; i
++)
499 enum tree_node_structure_enum ts_code
;
501 code
= (enum tree_code
) i
;
502 ts_code
= tree_node_structure_for_code (code
);
504 /* Mark the TS structure itself. */
505 tree_contains_struct
[code
][ts_code
] = 1;
507 /* Mark all the structures that TS is derived from. */
512 case TS_OPTIMIZATION
:
513 case TS_TARGET_OPTION
:
527 case TS_STATEMENT_LIST
:
528 MARK_TS_TYPED (code
);
532 case TS_DECL_MINIMAL
:
538 MARK_TS_COMMON (code
);
541 case TS_TYPE_WITH_LANG_SPECIFIC
:
542 MARK_TS_TYPE_COMMON (code
);
545 case TS_TYPE_NON_COMMON
:
546 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code
);
550 MARK_TS_DECL_MINIMAL (code
);
555 MARK_TS_DECL_COMMON (code
);
558 case TS_DECL_NON_COMMON
:
559 MARK_TS_DECL_WITH_VIS (code
);
562 case TS_DECL_WITH_VIS
:
566 MARK_TS_DECL_WRTL (code
);
570 MARK_TS_DECL_COMMON (code
);
574 MARK_TS_DECL_WITH_VIS (code
);
578 case TS_FUNCTION_DECL
:
579 MARK_TS_DECL_NON_COMMON (code
);
582 case TS_TRANSLATION_UNIT_DECL
:
583 MARK_TS_DECL_COMMON (code
);
591 /* Basic consistency checks for attributes used in fold. */
592 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_NON_COMMON
]);
593 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_NON_COMMON
]);
594 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_DECL_COMMON
]);
595 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_COMMON
]);
596 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_COMMON
]);
597 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_COMMON
]);
598 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_COMMON
]);
599 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_COMMON
]);
600 gcc_assert (tree_contains_struct
[TRANSLATION_UNIT_DECL
][TS_DECL_COMMON
]);
601 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_COMMON
]);
602 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_DECL_COMMON
]);
603 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_WRTL
]);
604 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_WRTL
]);
605 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_WRTL
]);
606 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_WRTL
]);
607 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_WRTL
]);
608 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_DECL_MINIMAL
]);
609 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_MINIMAL
]);
610 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_MINIMAL
]);
611 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_MINIMAL
]);
612 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_MINIMAL
]);
613 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_MINIMAL
]);
614 gcc_assert (tree_contains_struct
[TRANSLATION_UNIT_DECL
][TS_DECL_MINIMAL
]);
615 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_MINIMAL
]);
616 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_DECL_MINIMAL
]);
617 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_WITH_VIS
]);
618 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_WITH_VIS
]);
619 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_WITH_VIS
]);
620 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_VAR_DECL
]);
621 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_FIELD_DECL
]);
622 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_PARM_DECL
]);
623 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_LABEL_DECL
]);
624 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_RESULT_DECL
]);
625 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_CONST_DECL
]);
626 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_TYPE_DECL
]);
627 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_FUNCTION_DECL
]);
628 gcc_assert (tree_contains_struct
[IMPORTED_DECL
][TS_DECL_MINIMAL
]);
629 gcc_assert (tree_contains_struct
[IMPORTED_DECL
][TS_DECL_COMMON
]);
630 gcc_assert (tree_contains_struct
[NAMELIST_DECL
][TS_DECL_MINIMAL
]);
631 gcc_assert (tree_contains_struct
[NAMELIST_DECL
][TS_DECL_COMMON
]);
640 /* Initialize the hash table of types. */
642 = hash_table
<type_cache_hasher
>::create_ggc (TYPE_HASH_INITIAL_SIZE
);
645 = hash_table
<tree_decl_map_cache_hasher
>::create_ggc (512);
648 = hash_table
<tree_decl_map_cache_hasher
>::create_ggc (512);
650 int_cst_hash_table
= hash_table
<int_cst_hasher
>::create_ggc (1024);
652 int_cst_node
= make_int_cst (1, 1);
654 cl_option_hash_table
= hash_table
<cl_option_hasher
>::create_ggc (64);
656 cl_optimization_node
= make_node (OPTIMIZATION_NODE
);
657 cl_target_option_node
= make_node (TARGET_OPTION_NODE
);
659 /* Initialize the tree_contains_struct array. */
660 initialize_tree_contains_struct ();
661 lang_hooks
.init_ts ();
665 /* The name of the object as the assembler will see it (but before any
666 translations made by ASM_OUTPUT_LABELREF). Often this is the same
667 as DECL_NAME. It is an IDENTIFIER_NODE. */
669 decl_assembler_name (tree decl
)
671 if (!DECL_ASSEMBLER_NAME_SET_P (decl
))
672 lang_hooks
.set_decl_assembler_name (decl
);
673 return DECL_ASSEMBLER_NAME_RAW (decl
);
676 /* The DECL_ASSEMBLER_NAME_RAW of DECL is being explicitly set to NAME
677 (either of which may be NULL). Inform the FE, if this changes the
681 overwrite_decl_assembler_name (tree decl
, tree name
)
683 if (DECL_ASSEMBLER_NAME_RAW (decl
) != name
)
684 lang_hooks
.overwrite_decl_assembler_name (decl
, name
);
687 /* When the target supports COMDAT groups, this indicates which group the
688 DECL is associated with. This can be either an IDENTIFIER_NODE or a
689 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
691 decl_comdat_group (const_tree node
)
693 struct symtab_node
*snode
= symtab_node::get (node
);
696 return snode
->get_comdat_group ();
699 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
701 decl_comdat_group_id (const_tree node
)
703 struct symtab_node
*snode
= symtab_node::get (node
);
706 return snode
->get_comdat_group_id ();
709 /* When the target supports named section, return its name as IDENTIFIER_NODE
710 or NULL if it is in no section. */
712 decl_section_name (const_tree node
)
714 struct symtab_node
*snode
= symtab_node::get (node
);
717 return snode
->get_section ();
720 /* Set section name of NODE to VALUE (that is expected to be
723 set_decl_section_name (tree node
, const char *value
)
725 struct symtab_node
*snode
;
729 snode
= symtab_node::get (node
);
733 else if (VAR_P (node
))
734 snode
= varpool_node::get_create (node
);
736 snode
= cgraph_node::get_create (node
);
737 snode
->set_section (value
);
740 /* Return TLS model of a variable NODE. */
742 decl_tls_model (const_tree node
)
744 struct varpool_node
*snode
= varpool_node::get (node
);
746 return TLS_MODEL_NONE
;
747 return snode
->tls_model
;
750 /* Set TLS model of variable NODE to MODEL. */
752 set_decl_tls_model (tree node
, enum tls_model model
)
754 struct varpool_node
*vnode
;
756 if (model
== TLS_MODEL_NONE
)
758 vnode
= varpool_node::get (node
);
763 vnode
= varpool_node::get_create (node
);
764 vnode
->tls_model
= model
;
767 /* Compute the number of bytes occupied by a tree with code CODE.
768 This function cannot be used for nodes that have variable sizes,
769 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
771 tree_code_size (enum tree_code code
)
773 switch (TREE_CODE_CLASS (code
))
775 case tcc_declaration
: /* A decl node */
778 case FIELD_DECL
: return sizeof (tree_field_decl
);
779 case PARM_DECL
: return sizeof (tree_parm_decl
);
780 case VAR_DECL
: return sizeof (tree_var_decl
);
781 case LABEL_DECL
: return sizeof (tree_label_decl
);
782 case RESULT_DECL
: return sizeof (tree_result_decl
);
783 case CONST_DECL
: return sizeof (tree_const_decl
);
784 case TYPE_DECL
: return sizeof (tree_type_decl
);
785 case FUNCTION_DECL
: return sizeof (tree_function_decl
);
786 case DEBUG_EXPR_DECL
: return sizeof (tree_decl_with_rtl
);
787 case TRANSLATION_UNIT_DECL
: return sizeof (tree_translation_unit_decl
);
790 case NAMELIST_DECL
: return sizeof (tree_decl_non_common
);
792 gcc_checking_assert (code
>= NUM_TREE_CODES
);
793 return lang_hooks
.tree_size (code
);
796 case tcc_type
: /* a type node */
807 case FIXED_POINT_TYPE
:
813 case QUAL_UNION_TYPE
:
815 case POINTER_BOUNDS_TYPE
:
818 case LANG_TYPE
: return sizeof (tree_type_non_common
);
820 gcc_checking_assert (code
>= NUM_TREE_CODES
);
821 return lang_hooks
.tree_size (code
);
824 case tcc_reference
: /* a reference */
825 case tcc_expression
: /* an expression */
826 case tcc_statement
: /* an expression with side effects */
827 case tcc_comparison
: /* a comparison expression */
828 case tcc_unary
: /* a unary arithmetic expression */
829 case tcc_binary
: /* a binary arithmetic expression */
830 return (sizeof (struct tree_exp
)
831 + (TREE_CODE_LENGTH (code
) - 1) * sizeof (tree
));
833 case tcc_constant
: /* a constant */
836 case VOID_CST
: return sizeof (tree_typed
);
837 case INTEGER_CST
: gcc_unreachable ();
838 case REAL_CST
: return sizeof (tree_real_cst
);
839 case FIXED_CST
: return sizeof (tree_fixed_cst
);
840 case COMPLEX_CST
: return sizeof (tree_complex
);
841 case VECTOR_CST
: gcc_unreachable ();
842 case STRING_CST
: gcc_unreachable ();
844 gcc_checking_assert (code
>= NUM_TREE_CODES
);
845 return lang_hooks
.tree_size (code
);
848 case tcc_exceptional
: /* something random, like an identifier. */
851 case IDENTIFIER_NODE
: return lang_hooks
.identifier_size
;
852 case TREE_LIST
: return sizeof (tree_list
);
855 case PLACEHOLDER_EXPR
: return sizeof (tree_common
);
857 case TREE_VEC
: gcc_unreachable ();
858 case OMP_CLAUSE
: gcc_unreachable ();
860 case SSA_NAME
: return sizeof (tree_ssa_name
);
862 case STATEMENT_LIST
: return sizeof (tree_statement_list
);
863 case BLOCK
: return sizeof (struct tree_block
);
864 case CONSTRUCTOR
: return sizeof (tree_constructor
);
865 case OPTIMIZATION_NODE
: return sizeof (tree_optimization_option
);
866 case TARGET_OPTION_NODE
: return sizeof (tree_target_option
);
869 gcc_checking_assert (code
>= NUM_TREE_CODES
);
870 return lang_hooks
.tree_size (code
);
878 /* Compute the number of bytes occupied by NODE. This routine only
879 looks at TREE_CODE, except for those nodes that have variable sizes. */
881 tree_size (const_tree node
)
883 const enum tree_code code
= TREE_CODE (node
);
887 return (sizeof (struct tree_int_cst
)
888 + (TREE_INT_CST_EXT_NUNITS (node
) - 1) * sizeof (HOST_WIDE_INT
));
891 return (offsetof (struct tree_binfo
, base_binfos
)
893 ::embedded_size (BINFO_N_BASE_BINFOS (node
)));
896 return (sizeof (struct tree_vec
)
897 + (TREE_VEC_LENGTH (node
) - 1) * sizeof (tree
));
900 return (sizeof (struct tree_vector
)
901 + (vector_cst_encoded_nelts (node
) - 1) * sizeof (tree
));
904 return TREE_STRING_LENGTH (node
) + offsetof (struct tree_string
, str
) + 1;
907 return (sizeof (struct tree_omp_clause
)
908 + (omp_clause_num_ops
[OMP_CLAUSE_CODE (node
)] - 1)
912 if (TREE_CODE_CLASS (code
) == tcc_vl_exp
)
913 return (sizeof (struct tree_exp
)
914 + (VL_EXP_OPERAND_LENGTH (node
) - 1) * sizeof (tree
));
916 return tree_code_size (code
);
920 /* Record interesting allocation statistics for a tree node with CODE
924 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED
,
925 size_t length ATTRIBUTE_UNUSED
)
927 enum tree_code_class type
= TREE_CODE_CLASS (code
);
930 if (!GATHER_STATISTICS
)
935 case tcc_declaration
: /* A decl node */
939 case tcc_type
: /* a type node */
943 case tcc_statement
: /* an expression with side effects */
947 case tcc_reference
: /* a reference */
951 case tcc_expression
: /* an expression */
952 case tcc_comparison
: /* a comparison expression */
953 case tcc_unary
: /* a unary arithmetic expression */
954 case tcc_binary
: /* a binary arithmetic expression */
958 case tcc_constant
: /* a constant */
962 case tcc_exceptional
: /* something random, like an identifier. */
965 case IDENTIFIER_NODE
:
978 kind
= ssa_name_kind
;
990 kind
= omp_clause_kind
;
1007 tree_code_counts
[(int) code
]++;
1008 tree_node_counts
[(int) kind
]++;
1009 tree_node_sizes
[(int) kind
] += length
;
1012 /* Allocate and return a new UID from the DECL_UID namespace. */
1015 allocate_decl_uid (void)
1017 return next_decl_uid
++;
1020 /* Return a newly allocated node of code CODE. For decl and type
1021 nodes, some other fields are initialized. The rest of the node is
1022 initialized to zero. This function cannot be used for TREE_VEC,
1023 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1026 Achoo! I got a code in the node. */
1029 make_node (enum tree_code code MEM_STAT_DECL
)
1032 enum tree_code_class type
= TREE_CODE_CLASS (code
);
1033 size_t length
= tree_code_size (code
);
1035 record_node_allocation_statistics (code
, length
);
1037 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
1038 TREE_SET_CODE (t
, code
);
1043 TREE_SIDE_EFFECTS (t
) = 1;
1046 case tcc_declaration
:
1047 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_COMMON
))
1049 if (code
== FUNCTION_DECL
)
1051 SET_DECL_ALIGN (t
, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY
));
1052 SET_DECL_MODE (t
, FUNCTION_MODE
);
1055 SET_DECL_ALIGN (t
, 1);
1057 DECL_SOURCE_LOCATION (t
) = input_location
;
1058 if (TREE_CODE (t
) == DEBUG_EXPR_DECL
)
1059 DECL_UID (t
) = --next_debug_decl_uid
;
1062 DECL_UID (t
) = allocate_decl_uid ();
1063 SET_DECL_PT_UID (t
, -1);
1065 if (TREE_CODE (t
) == LABEL_DECL
)
1066 LABEL_DECL_UID (t
) = -1;
1071 TYPE_UID (t
) = next_type_uid
++;
1072 SET_TYPE_ALIGN (t
, BITS_PER_UNIT
);
1073 TYPE_USER_ALIGN (t
) = 0;
1074 TYPE_MAIN_VARIANT (t
) = t
;
1075 TYPE_CANONICAL (t
) = t
;
1077 /* Default to no attributes for type, but let target change that. */
1078 TYPE_ATTRIBUTES (t
) = NULL_TREE
;
1079 targetm
.set_default_type_attributes (t
);
1081 /* We have not yet computed the alias set for this type. */
1082 TYPE_ALIAS_SET (t
) = -1;
1086 TREE_CONSTANT (t
) = 1;
1089 case tcc_expression
:
1095 case PREDECREMENT_EXPR
:
1096 case PREINCREMENT_EXPR
:
1097 case POSTDECREMENT_EXPR
:
1098 case POSTINCREMENT_EXPR
:
1099 /* All of these have side-effects, no matter what their
1101 TREE_SIDE_EFFECTS (t
) = 1;
1109 case tcc_exceptional
:
1112 case TARGET_OPTION_NODE
:
1113 TREE_TARGET_OPTION(t
)
1114 = ggc_cleared_alloc
<struct cl_target_option
> ();
1117 case OPTIMIZATION_NODE
:
1118 TREE_OPTIMIZATION (t
)
1119 = ggc_cleared_alloc
<struct cl_optimization
> ();
1128 /* Other classes need no special treatment. */
1135 /* Free tree node. */
1138 free_node (tree node
)
1140 enum tree_code code
= TREE_CODE (node
);
1141 if (GATHER_STATISTICS
)
1143 tree_code_counts
[(int) TREE_CODE (node
)]--;
1144 tree_node_counts
[(int) t_kind
]--;
1145 tree_node_sizes
[(int) t_kind
] -= tree_size (node
);
1147 if (CODE_CONTAINS_STRUCT (code
, TS_CONSTRUCTOR
))
1148 vec_free (CONSTRUCTOR_ELTS (node
));
1149 else if (code
== BLOCK
)
1150 vec_free (BLOCK_NONLOCALIZED_VARS (node
));
1151 else if (code
== TREE_BINFO
)
1152 vec_free (BINFO_BASE_ACCESSES (node
));
1156 /* Return a new node with the same contents as NODE except that its
1157 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1160 copy_node (tree node MEM_STAT_DECL
)
1163 enum tree_code code
= TREE_CODE (node
);
1166 gcc_assert (code
!= STATEMENT_LIST
);
1168 length
= tree_size (node
);
1169 record_node_allocation_statistics (code
, length
);
1170 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
1171 memcpy (t
, node
, length
);
1173 if (CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
1175 TREE_ASM_WRITTEN (t
) = 0;
1176 TREE_VISITED (t
) = 0;
1178 if (TREE_CODE_CLASS (code
) == tcc_declaration
)
1180 if (code
== DEBUG_EXPR_DECL
)
1181 DECL_UID (t
) = --next_debug_decl_uid
;
1184 DECL_UID (t
) = allocate_decl_uid ();
1185 if (DECL_PT_UID_SET_P (node
))
1186 SET_DECL_PT_UID (t
, DECL_PT_UID (node
));
1188 if ((TREE_CODE (node
) == PARM_DECL
|| VAR_P (node
))
1189 && DECL_HAS_VALUE_EXPR_P (node
))
1191 SET_DECL_VALUE_EXPR (t
, DECL_VALUE_EXPR (node
));
1192 DECL_HAS_VALUE_EXPR_P (t
) = 1;
1194 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1197 DECL_HAS_DEBUG_EXPR_P (t
) = 0;
1198 t
->decl_with_vis
.symtab_node
= NULL
;
1200 if (VAR_P (node
) && DECL_HAS_INIT_PRIORITY_P (node
))
1202 SET_DECL_INIT_PRIORITY (t
, DECL_INIT_PRIORITY (node
));
1203 DECL_HAS_INIT_PRIORITY_P (t
) = 1;
1205 if (TREE_CODE (node
) == FUNCTION_DECL
)
1207 DECL_STRUCT_FUNCTION (t
) = NULL
;
1208 t
->decl_with_vis
.symtab_node
= NULL
;
1211 else if (TREE_CODE_CLASS (code
) == tcc_type
)
1213 TYPE_UID (t
) = next_type_uid
++;
1214 /* The following is so that the debug code for
1215 the copy is different from the original type.
1216 The two statements usually duplicate each other
1217 (because they clear fields of the same union),
1218 but the optimizer should catch that. */
1219 TYPE_SYMTAB_ADDRESS (t
) = 0;
1220 TYPE_SYMTAB_DIE (t
) = 0;
1222 /* Do not copy the values cache. */
1223 if (TYPE_CACHED_VALUES_P (t
))
1225 TYPE_CACHED_VALUES_P (t
) = 0;
1226 TYPE_CACHED_VALUES (t
) = NULL_TREE
;
1229 else if (code
== TARGET_OPTION_NODE
)
1231 TREE_TARGET_OPTION (t
) = ggc_alloc
<struct cl_target_option
>();
1232 memcpy (TREE_TARGET_OPTION (t
), TREE_TARGET_OPTION (node
),
1233 sizeof (struct cl_target_option
));
1235 else if (code
== OPTIMIZATION_NODE
)
1237 TREE_OPTIMIZATION (t
) = ggc_alloc
<struct cl_optimization
>();
1238 memcpy (TREE_OPTIMIZATION (t
), TREE_OPTIMIZATION (node
),
1239 sizeof (struct cl_optimization
));
1245 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1246 For example, this can copy a list made of TREE_LIST nodes. */
1249 copy_list (tree list
)
1257 head
= prev
= copy_node (list
);
1258 next
= TREE_CHAIN (list
);
1261 TREE_CHAIN (prev
) = copy_node (next
);
1262 prev
= TREE_CHAIN (prev
);
1263 next
= TREE_CHAIN (next
);
1269 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1270 INTEGER_CST with value CST and type TYPE. */
1273 get_int_cst_ext_nunits (tree type
, const wide_int
&cst
)
1275 gcc_checking_assert (cst
.get_precision () == TYPE_PRECISION (type
));
1276 /* We need extra HWIs if CST is an unsigned integer with its
1278 if (TYPE_UNSIGNED (type
) && wi::neg_p (cst
))
1279 return cst
.get_precision () / HOST_BITS_PER_WIDE_INT
+ 1;
1280 return cst
.get_len ();
1283 /* Return a new INTEGER_CST with value CST and type TYPE. */
1286 build_new_int_cst (tree type
, const wide_int
&cst
)
1288 unsigned int len
= cst
.get_len ();
1289 unsigned int ext_len
= get_int_cst_ext_nunits (type
, cst
);
1290 tree nt
= make_int_cst (len
, ext_len
);
1295 TREE_INT_CST_ELT (nt
, ext_len
)
1296 = zext_hwi (-1, cst
.get_precision () % HOST_BITS_PER_WIDE_INT
);
1297 for (unsigned int i
= len
; i
< ext_len
; ++i
)
1298 TREE_INT_CST_ELT (nt
, i
) = -1;
1300 else if (TYPE_UNSIGNED (type
)
1301 && cst
.get_precision () < len
* HOST_BITS_PER_WIDE_INT
)
1304 TREE_INT_CST_ELT (nt
, len
)
1305 = zext_hwi (cst
.elt (len
),
1306 cst
.get_precision () % HOST_BITS_PER_WIDE_INT
);
1309 for (unsigned int i
= 0; i
< len
; i
++)
1310 TREE_INT_CST_ELT (nt
, i
) = cst
.elt (i
);
1311 TREE_TYPE (nt
) = type
;
1315 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1318 build_int_cst (tree type
, HOST_WIDE_INT low
)
1320 /* Support legacy code. */
1322 type
= integer_type_node
;
1324 return wide_int_to_tree (type
, wi::shwi (low
, TYPE_PRECISION (type
)));
1328 build_int_cstu (tree type
, unsigned HOST_WIDE_INT cst
)
1330 return wide_int_to_tree (type
, wi::uhwi (cst
, TYPE_PRECISION (type
)));
1333 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1336 build_int_cst_type (tree type
, HOST_WIDE_INT low
)
1339 return wide_int_to_tree (type
, wi::shwi (low
, TYPE_PRECISION (type
)));
1342 /* Constructs tree in type TYPE from with value given by CST. Signedness
1343 of CST is assumed to be the same as the signedness of TYPE. */
1346 double_int_to_tree (tree type
, double_int cst
)
1348 return wide_int_to_tree (type
, widest_int::from (cst
, TYPE_SIGN (type
)));
1351 /* We force the wide_int CST to the range of the type TYPE by sign or
1352 zero extending it. OVERFLOWABLE indicates if we are interested in
1353 overflow of the value, when >0 we are only interested in signed
1354 overflow, for <0 we are interested in any overflow. OVERFLOWED
1355 indicates whether overflow has already occurred. CONST_OVERFLOWED
1356 indicates whether constant overflow has already occurred. We force
1357 T's value to be within range of T's type (by setting to 0 or 1 all
1358 the bits outside the type's range). We set TREE_OVERFLOWED if,
1359 OVERFLOWED is nonzero,
1360 or OVERFLOWABLE is >0 and signed overflow occurs
1361 or OVERFLOWABLE is <0 and any overflow occurs
1362 We return a new tree node for the extended wide_int. The node
1363 is shared if no overflow flags are set. */
1367 force_fit_type (tree type
, const wide_int_ref
&cst
,
1368 int overflowable
, bool overflowed
)
1370 signop sign
= TYPE_SIGN (type
);
1372 /* If we need to set overflow flags, return a new unshared node. */
1373 if (overflowed
|| !wi::fits_to_tree_p (cst
, type
))
1377 || (overflowable
> 0 && sign
== SIGNED
))
1379 wide_int tmp
= wide_int::from (cst
, TYPE_PRECISION (type
), sign
);
1380 tree t
= build_new_int_cst (type
, tmp
);
1381 TREE_OVERFLOW (t
) = 1;
1386 /* Else build a shared node. */
1387 return wide_int_to_tree (type
, cst
);
1390 /* These are the hash table functions for the hash table of INTEGER_CST
1391 nodes of a sizetype. */
1393 /* Return the hash code X, an INTEGER_CST. */
1396 int_cst_hasher::hash (tree x
)
1398 const_tree
const t
= x
;
1399 hashval_t code
= TYPE_UID (TREE_TYPE (t
));
1402 for (i
= 0; i
< TREE_INT_CST_NUNITS (t
); i
++)
1403 code
= iterative_hash_host_wide_int (TREE_INT_CST_ELT(t
, i
), code
);
1408 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1409 is the same as that given by *Y, which is the same. */
1412 int_cst_hasher::equal (tree x
, tree y
)
1414 const_tree
const xt
= x
;
1415 const_tree
const yt
= y
;
1417 if (TREE_TYPE (xt
) != TREE_TYPE (yt
)
1418 || TREE_INT_CST_NUNITS (xt
) != TREE_INT_CST_NUNITS (yt
)
1419 || TREE_INT_CST_EXT_NUNITS (xt
) != TREE_INT_CST_EXT_NUNITS (yt
))
1422 for (int i
= 0; i
< TREE_INT_CST_NUNITS (xt
); i
++)
1423 if (TREE_INT_CST_ELT (xt
, i
) != TREE_INT_CST_ELT (yt
, i
))
1429 /* Create an INT_CST node of TYPE and value CST.
1430 The returned node is always shared. For small integers we use a
1431 per-type vector cache, for larger ones we use a single hash table.
1432 The value is extended from its precision according to the sign of
1433 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1434 the upper bits and ensures that hashing and value equality based
1435 upon the underlying HOST_WIDE_INTs works without masking. */
1438 wide_int_to_tree (tree type
, const wide_int_ref
&pcst
)
1445 unsigned int prec
= TYPE_PRECISION (type
);
1446 signop sgn
= TYPE_SIGN (type
);
1448 /* Verify that everything is canonical. */
1449 int l
= pcst
.get_len ();
1452 if (pcst
.elt (l
- 1) == 0)
1453 gcc_checking_assert (pcst
.elt (l
- 2) < 0);
1454 if (pcst
.elt (l
- 1) == HOST_WIDE_INT_M1
)
1455 gcc_checking_assert (pcst
.elt (l
- 2) >= 0);
1458 wide_int cst
= wide_int::from (pcst
, prec
, sgn
);
1459 unsigned int ext_len
= get_int_cst_ext_nunits (type
, cst
);
1463 /* We just need to store a single HOST_WIDE_INT. */
1465 if (TYPE_UNSIGNED (type
))
1466 hwi
= cst
.to_uhwi ();
1468 hwi
= cst
.to_shwi ();
1470 switch (TREE_CODE (type
))
1473 gcc_assert (hwi
== 0);
1477 case REFERENCE_TYPE
:
1478 case POINTER_BOUNDS_TYPE
:
1479 /* Cache NULL pointer and zero bounds. */
1488 /* Cache false or true. */
1490 if (IN_RANGE (hwi
, 0, 1))
1496 if (TYPE_SIGN (type
) == UNSIGNED
)
1499 limit
= INTEGER_SHARE_LIMIT
;
1500 if (IN_RANGE (hwi
, 0, INTEGER_SHARE_LIMIT
- 1))
1505 /* Cache [-1, N). */
1506 limit
= INTEGER_SHARE_LIMIT
+ 1;
1507 if (IN_RANGE (hwi
, -1, INTEGER_SHARE_LIMIT
- 1))
1521 /* Look for it in the type's vector of small shared ints. */
1522 if (!TYPE_CACHED_VALUES_P (type
))
1524 TYPE_CACHED_VALUES_P (type
) = 1;
1525 TYPE_CACHED_VALUES (type
) = make_tree_vec (limit
);
1528 t
= TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
);
1530 /* Make sure no one is clobbering the shared constant. */
1531 gcc_checking_assert (TREE_TYPE (t
) == type
1532 && TREE_INT_CST_NUNITS (t
) == 1
1533 && TREE_INT_CST_OFFSET_NUNITS (t
) == 1
1534 && TREE_INT_CST_EXT_NUNITS (t
) == 1
1535 && TREE_INT_CST_ELT (t
, 0) == hwi
);
1538 /* Create a new shared int. */
1539 t
= build_new_int_cst (type
, cst
);
1540 TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
) = t
;
1545 /* Use the cache of larger shared ints, using int_cst_node as
1548 TREE_INT_CST_ELT (int_cst_node
, 0) = hwi
;
1549 TREE_TYPE (int_cst_node
) = type
;
1551 tree
*slot
= int_cst_hash_table
->find_slot (int_cst_node
, INSERT
);
1555 /* Insert this one into the hash table. */
1558 /* Make a new node for next time round. */
1559 int_cst_node
= make_int_cst (1, 1);
1565 /* The value either hashes properly or we drop it on the floor
1566 for the gc to take care of. There will not be enough of them
1569 tree nt
= build_new_int_cst (type
, cst
);
1570 tree
*slot
= int_cst_hash_table
->find_slot (nt
, INSERT
);
1574 /* Insert this one into the hash table. */
1586 cache_integer_cst (tree t
)
1588 tree type
= TREE_TYPE (t
);
1591 int prec
= TYPE_PRECISION (type
);
1593 gcc_assert (!TREE_OVERFLOW (t
));
1595 switch (TREE_CODE (type
))
1598 gcc_assert (integer_zerop (t
));
1602 case REFERENCE_TYPE
:
1603 /* Cache NULL pointer. */
1604 if (integer_zerop (t
))
1612 /* Cache false or true. */
1614 if (wi::ltu_p (wi::to_wide (t
), 2))
1615 ix
= TREE_INT_CST_ELT (t
, 0);
1620 if (TYPE_UNSIGNED (type
))
1623 limit
= INTEGER_SHARE_LIMIT
;
1625 /* This is a little hokie, but if the prec is smaller than
1626 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1627 obvious test will not get the correct answer. */
1628 if (prec
< HOST_BITS_PER_WIDE_INT
)
1630 if (tree_to_uhwi (t
) < (unsigned HOST_WIDE_INT
) INTEGER_SHARE_LIMIT
)
1631 ix
= tree_to_uhwi (t
);
1633 else if (wi::ltu_p (wi::to_wide (t
), INTEGER_SHARE_LIMIT
))
1634 ix
= tree_to_uhwi (t
);
1639 limit
= INTEGER_SHARE_LIMIT
+ 1;
1641 if (integer_minus_onep (t
))
1643 else if (!wi::neg_p (wi::to_wide (t
)))
1645 if (prec
< HOST_BITS_PER_WIDE_INT
)
1647 if (tree_to_shwi (t
) < INTEGER_SHARE_LIMIT
)
1648 ix
= tree_to_shwi (t
) + 1;
1650 else if (wi::ltu_p (wi::to_wide (t
), INTEGER_SHARE_LIMIT
))
1651 ix
= tree_to_shwi (t
) + 1;
1665 /* Look for it in the type's vector of small shared ints. */
1666 if (!TYPE_CACHED_VALUES_P (type
))
1668 TYPE_CACHED_VALUES_P (type
) = 1;
1669 TYPE_CACHED_VALUES (type
) = make_tree_vec (limit
);
1672 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
) == NULL_TREE
);
1673 TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
) = t
;
1677 /* Use the cache of larger shared ints. */
1678 tree
*slot
= int_cst_hash_table
->find_slot (t
, INSERT
);
1679 /* If there is already an entry for the number verify it's the
1682 gcc_assert (wi::to_wide (tree (*slot
)) == wi::to_wide (t
));
1684 /* Otherwise insert this one into the hash table. */
1690 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1691 and the rest are zeros. */
1694 build_low_bits_mask (tree type
, unsigned bits
)
1696 gcc_assert (bits
<= TYPE_PRECISION (type
));
1698 return wide_int_to_tree (type
, wi::mask (bits
, false,
1699 TYPE_PRECISION (type
)));
1702 /* Checks that X is integer constant that can be expressed in (unsigned)
1703 HOST_WIDE_INT without loss of precision. */
1706 cst_and_fits_in_hwi (const_tree x
)
1708 return (TREE_CODE (x
) == INTEGER_CST
1709 && (tree_fits_shwi_p (x
) || tree_fits_uhwi_p (x
)));
1712 /* Build a newly constructed VECTOR_CST with the given values of
1713 (VECTOR_CST_)LOG2_NPATTERNS and (VECTOR_CST_)NELTS_PER_PATTERN. */
1716 make_vector (unsigned log2_npatterns
,
1717 unsigned int nelts_per_pattern MEM_STAT_DECL
)
1719 gcc_assert (IN_RANGE (nelts_per_pattern
, 1, 3));
1721 unsigned npatterns
= 1 << log2_npatterns
;
1722 unsigned encoded_nelts
= npatterns
* nelts_per_pattern
;
1723 unsigned length
= (sizeof (struct tree_vector
)
1724 + (encoded_nelts
- 1) * sizeof (tree
));
1726 record_node_allocation_statistics (VECTOR_CST
, length
);
1728 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
1730 TREE_SET_CODE (t
, VECTOR_CST
);
1731 TREE_CONSTANT (t
) = 1;
1732 VECTOR_CST_LOG2_NPATTERNS (t
) = log2_npatterns
;
1733 VECTOR_CST_NELTS_PER_PATTERN (t
) = nelts_per_pattern
;
1738 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1739 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1742 build_vector_from_ctor (tree type
, vec
<constructor_elt
, va_gc
> *v
)
1744 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
);
1745 unsigned HOST_WIDE_INT idx
;
1748 tree_vector_builder
vec (type
, nelts
, 1);
1749 FOR_EACH_CONSTRUCTOR_VALUE (v
, idx
, value
)
1751 if (TREE_CODE (value
) == VECTOR_CST
)
1752 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
1753 vec
.quick_push (VECTOR_CST_ELT (value
, i
));
1755 vec
.quick_push (value
);
1757 while (vec
.length () < nelts
)
1758 vec
.quick_push (build_zero_cst (TREE_TYPE (type
)));
1760 return vec
.build ();
1763 /* Build a vector of type VECTYPE where all the elements are SCs. */
1765 build_vector_from_val (tree vectype
, tree sc
)
1767 int i
, nunits
= TYPE_VECTOR_SUBPARTS (vectype
);
1769 if (sc
== error_mark_node
)
1772 /* Verify that the vector type is suitable for SC. Note that there
1773 is some inconsistency in the type-system with respect to restrict
1774 qualifications of pointers. Vector types always have a main-variant
1775 element type and the qualification is applied to the vector-type.
1776 So TREE_TYPE (vector-type) does not return a properly qualified
1777 vector element-type. */
1778 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc
)),
1779 TREE_TYPE (vectype
)));
1781 if (CONSTANT_CLASS_P (sc
))
1783 tree_vector_builder
v (vectype
, 1, 1);
1789 vec
<constructor_elt
, va_gc
> *v
;
1790 vec_alloc (v
, nunits
);
1791 for (i
= 0; i
< nunits
; ++i
)
1792 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, sc
);
1793 return build_constructor (vectype
, v
);
1797 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
1798 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
1801 recompute_constructor_flags (tree c
)
1805 bool constant_p
= true;
1806 bool side_effects_p
= false;
1807 vec
<constructor_elt
, va_gc
> *vals
= CONSTRUCTOR_ELTS (c
);
1809 FOR_EACH_CONSTRUCTOR_VALUE (vals
, i
, val
)
1811 /* Mostly ctors will have elts that don't have side-effects, so
1812 the usual case is to scan all the elements. Hence a single
1813 loop for both const and side effects, rather than one loop
1814 each (with early outs). */
1815 if (!TREE_CONSTANT (val
))
1817 if (TREE_SIDE_EFFECTS (val
))
1818 side_effects_p
= true;
1821 TREE_SIDE_EFFECTS (c
) = side_effects_p
;
1822 TREE_CONSTANT (c
) = constant_p
;
1825 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
1829 verify_constructor_flags (tree c
)
1833 bool constant_p
= TREE_CONSTANT (c
);
1834 bool side_effects_p
= TREE_SIDE_EFFECTS (c
);
1835 vec
<constructor_elt
, va_gc
> *vals
= CONSTRUCTOR_ELTS (c
);
1837 FOR_EACH_CONSTRUCTOR_VALUE (vals
, i
, val
)
1839 if (constant_p
&& !TREE_CONSTANT (val
))
1840 internal_error ("non-constant element in constant CONSTRUCTOR");
1841 if (!side_effects_p
&& TREE_SIDE_EFFECTS (val
))
1842 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
1846 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1847 are in the vec pointed to by VALS. */
1849 build_constructor (tree type
, vec
<constructor_elt
, va_gc
> *vals
)
1851 tree c
= make_node (CONSTRUCTOR
);
1853 TREE_TYPE (c
) = type
;
1854 CONSTRUCTOR_ELTS (c
) = vals
;
1856 recompute_constructor_flags (c
);
1861 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1864 build_constructor_single (tree type
, tree index
, tree value
)
1866 vec
<constructor_elt
, va_gc
> *v
;
1867 constructor_elt elt
= {index
, value
};
1870 v
->quick_push (elt
);
1872 return build_constructor (type
, v
);
1876 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1877 are in a list pointed to by VALS. */
1879 build_constructor_from_list (tree type
, tree vals
)
1882 vec
<constructor_elt
, va_gc
> *v
= NULL
;
1886 vec_alloc (v
, list_length (vals
));
1887 for (t
= vals
; t
; t
= TREE_CHAIN (t
))
1888 CONSTRUCTOR_APPEND_ELT (v
, TREE_PURPOSE (t
), TREE_VALUE (t
));
1891 return build_constructor (type
, v
);
1894 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1895 of elements, provided as index/value pairs. */
1898 build_constructor_va (tree type
, int nelts
, ...)
1900 vec
<constructor_elt
, va_gc
> *v
= NULL
;
1903 va_start (p
, nelts
);
1904 vec_alloc (v
, nelts
);
1907 tree index
= va_arg (p
, tree
);
1908 tree value
= va_arg (p
, tree
);
1909 CONSTRUCTOR_APPEND_ELT (v
, index
, value
);
1912 return build_constructor (type
, v
);
1915 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1918 build_fixed (tree type
, FIXED_VALUE_TYPE f
)
1921 FIXED_VALUE_TYPE
*fp
;
1923 v
= make_node (FIXED_CST
);
1924 fp
= ggc_alloc
<fixed_value
> ();
1925 memcpy (fp
, &f
, sizeof (FIXED_VALUE_TYPE
));
1927 TREE_TYPE (v
) = type
;
1928 TREE_FIXED_CST_PTR (v
) = fp
;
1932 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1935 build_real (tree type
, REAL_VALUE_TYPE d
)
1938 REAL_VALUE_TYPE
*dp
;
1941 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1942 Consider doing it via real_convert now. */
1944 v
= make_node (REAL_CST
);
1945 dp
= ggc_alloc
<real_value
> ();
1946 memcpy (dp
, &d
, sizeof (REAL_VALUE_TYPE
));
1948 TREE_TYPE (v
) = type
;
1949 TREE_REAL_CST_PTR (v
) = dp
;
1950 TREE_OVERFLOW (v
) = overflow
;
1954 /* Like build_real, but first truncate D to the type. */
1957 build_real_truncate (tree type
, REAL_VALUE_TYPE d
)
1959 return build_real (type
, real_value_truncate (TYPE_MODE (type
), d
));
1962 /* Return a new REAL_CST node whose type is TYPE
1963 and whose value is the integer value of the INTEGER_CST node I. */
1966 real_value_from_int_cst (const_tree type
, const_tree i
)
1970 /* Clear all bits of the real value type so that we can later do
1971 bitwise comparisons to see if two values are the same. */
1972 memset (&d
, 0, sizeof d
);
1974 real_from_integer (&d
, type
? TYPE_MODE (type
) : VOIDmode
, wi::to_wide (i
),
1975 TYPE_SIGN (TREE_TYPE (i
)));
1979 /* Given a tree representing an integer constant I, return a tree
1980 representing the same value as a floating-point constant of type TYPE. */
1983 build_real_from_int_cst (tree type
, const_tree i
)
1986 int overflow
= TREE_OVERFLOW (i
);
1988 v
= build_real (type
, real_value_from_int_cst (type
, i
));
1990 TREE_OVERFLOW (v
) |= overflow
;
1994 /* Return a newly constructed STRING_CST node whose value is
1995 the LEN characters at STR.
1996 Note that for a C string literal, LEN should include the trailing NUL.
1997 The TREE_TYPE is not initialized. */
2000 build_string (int len
, const char *str
)
2005 /* Do not waste bytes provided by padding of struct tree_string. */
2006 length
= len
+ offsetof (struct tree_string
, str
) + 1;
2008 record_node_allocation_statistics (STRING_CST
, length
);
2010 s
= (tree
) ggc_internal_alloc (length
);
2012 memset (s
, 0, sizeof (struct tree_typed
));
2013 TREE_SET_CODE (s
, STRING_CST
);
2014 TREE_CONSTANT (s
) = 1;
2015 TREE_STRING_LENGTH (s
) = len
;
2016 memcpy (s
->string
.str
, str
, len
);
2017 s
->string
.str
[len
] = '\0';
2022 /* Return a newly constructed COMPLEX_CST node whose value is
2023 specified by the real and imaginary parts REAL and IMAG.
2024 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2025 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2028 build_complex (tree type
, tree real
, tree imag
)
2030 tree t
= make_node (COMPLEX_CST
);
2032 TREE_REALPART (t
) = real
;
2033 TREE_IMAGPART (t
) = imag
;
2034 TREE_TYPE (t
) = type
? type
: build_complex_type (TREE_TYPE (real
));
2035 TREE_OVERFLOW (t
) = TREE_OVERFLOW (real
) | TREE_OVERFLOW (imag
);
2039 /* Build a complex (inf +- 0i), such as for the result of cproj.
2040 TYPE is the complex tree type of the result. If NEG is true, the
2041 imaginary zero is negative. */
2044 build_complex_inf (tree type
, bool neg
)
2046 REAL_VALUE_TYPE rinf
, rzero
= dconst0
;
2050 return build_complex (type
, build_real (TREE_TYPE (type
), rinf
),
2051 build_real (TREE_TYPE (type
), rzero
));
2054 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2055 element is set to 1. In particular, this is 1 + i for complex types. */
2058 build_each_one_cst (tree type
)
2060 if (TREE_CODE (type
) == COMPLEX_TYPE
)
2062 tree scalar
= build_one_cst (TREE_TYPE (type
));
2063 return build_complex (type
, scalar
, scalar
);
2066 return build_one_cst (type
);
2069 /* Return a constant of arithmetic type TYPE which is the
2070 multiplicative identity of the set TYPE. */
2073 build_one_cst (tree type
)
2075 switch (TREE_CODE (type
))
2077 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2078 case POINTER_TYPE
: case REFERENCE_TYPE
:
2080 return build_int_cst (type
, 1);
2083 return build_real (type
, dconst1
);
2085 case FIXED_POINT_TYPE
:
2086 /* We can only generate 1 for accum types. */
2087 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type
)));
2088 return build_fixed (type
, FCONST1 (TYPE_MODE (type
)));
2092 tree scalar
= build_one_cst (TREE_TYPE (type
));
2094 return build_vector_from_val (type
, scalar
);
2098 return build_complex (type
,
2099 build_one_cst (TREE_TYPE (type
)),
2100 build_zero_cst (TREE_TYPE (type
)));
2107 /* Return an integer of type TYPE containing all 1's in as much precision as
2108 it contains, or a complex or vector whose subparts are such integers. */
2111 build_all_ones_cst (tree type
)
2113 if (TREE_CODE (type
) == COMPLEX_TYPE
)
2115 tree scalar
= build_all_ones_cst (TREE_TYPE (type
));
2116 return build_complex (type
, scalar
, scalar
);
2119 return build_minus_one_cst (type
);
2122 /* Return a constant of arithmetic type TYPE which is the
2123 opposite of the multiplicative identity of the set TYPE. */
2126 build_minus_one_cst (tree type
)
2128 switch (TREE_CODE (type
))
2130 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2131 case POINTER_TYPE
: case REFERENCE_TYPE
:
2133 return build_int_cst (type
, -1);
2136 return build_real (type
, dconstm1
);
2138 case FIXED_POINT_TYPE
:
2139 /* We can only generate 1 for accum types. */
2140 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type
)));
2141 return build_fixed (type
,
2142 fixed_from_double_int (double_int_minus_one
,
2143 SCALAR_TYPE_MODE (type
)));
2147 tree scalar
= build_minus_one_cst (TREE_TYPE (type
));
2149 return build_vector_from_val (type
, scalar
);
2153 return build_complex (type
,
2154 build_minus_one_cst (TREE_TYPE (type
)),
2155 build_zero_cst (TREE_TYPE (type
)));
2162 /* Build 0 constant of type TYPE. This is used by constructor folding
2163 and thus the constant should be represented in memory by
2167 build_zero_cst (tree type
)
2169 switch (TREE_CODE (type
))
2171 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2172 case POINTER_TYPE
: case REFERENCE_TYPE
:
2173 case OFFSET_TYPE
: case NULLPTR_TYPE
:
2174 return build_int_cst (type
, 0);
2177 return build_real (type
, dconst0
);
2179 case FIXED_POINT_TYPE
:
2180 return build_fixed (type
, FCONST0 (TYPE_MODE (type
)));
2184 tree scalar
= build_zero_cst (TREE_TYPE (type
));
2186 return build_vector_from_val (type
, scalar
);
2191 tree zero
= build_zero_cst (TREE_TYPE (type
));
2193 return build_complex (type
, zero
, zero
);
2197 if (!AGGREGATE_TYPE_P (type
))
2198 return fold_convert (type
, integer_zero_node
);
2199 return build_constructor (type
, NULL
);
2204 /* Build a BINFO with LEN language slots. */
2207 make_tree_binfo (unsigned base_binfos MEM_STAT_DECL
)
2210 size_t length
= (offsetof (struct tree_binfo
, base_binfos
)
2211 + vec
<tree
, va_gc
>::embedded_size (base_binfos
));
2213 record_node_allocation_statistics (TREE_BINFO
, length
);
2215 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
2217 memset (t
, 0, offsetof (struct tree_binfo
, base_binfos
));
2219 TREE_SET_CODE (t
, TREE_BINFO
);
2221 BINFO_BASE_BINFOS (t
)->embedded_init (base_binfos
);
2226 /* Create a CASE_LABEL_EXPR tree node and return it. */
2229 build_case_label (tree low_value
, tree high_value
, tree label_decl
)
2231 tree t
= make_node (CASE_LABEL_EXPR
);
2233 TREE_TYPE (t
) = void_type_node
;
2234 SET_EXPR_LOCATION (t
, DECL_SOURCE_LOCATION (label_decl
));
2236 CASE_LOW (t
) = low_value
;
2237 CASE_HIGH (t
) = high_value
;
2238 CASE_LABEL (t
) = label_decl
;
2239 CASE_CHAIN (t
) = NULL_TREE
;
2244 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2245 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2246 The latter determines the length of the HOST_WIDE_INT vector. */
2249 make_int_cst (int len
, int ext_len MEM_STAT_DECL
)
2252 int length
= ((ext_len
- 1) * sizeof (HOST_WIDE_INT
)
2253 + sizeof (struct tree_int_cst
));
2256 record_node_allocation_statistics (INTEGER_CST
, length
);
2258 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
2260 TREE_SET_CODE (t
, INTEGER_CST
);
2261 TREE_INT_CST_NUNITS (t
) = len
;
2262 TREE_INT_CST_EXT_NUNITS (t
) = ext_len
;
2263 /* to_offset can only be applied to trees that are offset_int-sized
2264 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2265 must be exactly the precision of offset_int and so LEN is correct. */
2266 if (ext_len
<= OFFSET_INT_ELTS
)
2267 TREE_INT_CST_OFFSET_NUNITS (t
) = ext_len
;
2269 TREE_INT_CST_OFFSET_NUNITS (t
) = len
;
2271 TREE_CONSTANT (t
) = 1;
2276 /* Build a newly constructed TREE_VEC node of length LEN. */
2279 make_tree_vec (int len MEM_STAT_DECL
)
2282 size_t length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2284 record_node_allocation_statistics (TREE_VEC
, length
);
2286 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
2288 TREE_SET_CODE (t
, TREE_VEC
);
2289 TREE_VEC_LENGTH (t
) = len
;
2294 /* Grow a TREE_VEC node to new length LEN. */
2297 grow_tree_vec (tree v
, int len MEM_STAT_DECL
)
2299 gcc_assert (TREE_CODE (v
) == TREE_VEC
);
2301 int oldlen
= TREE_VEC_LENGTH (v
);
2302 gcc_assert (len
> oldlen
);
2304 size_t oldlength
= (oldlen
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2305 size_t length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2307 record_node_allocation_statistics (TREE_VEC
, length
- oldlength
);
2309 v
= (tree
) ggc_realloc (v
, length PASS_MEM_STAT
);
2311 TREE_VEC_LENGTH (v
) = len
;
2316 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2317 fixed, and scalar, complex or vector. */
2320 zerop (const_tree expr
)
2322 return (integer_zerop (expr
)
2323 || real_zerop (expr
)
2324 || fixed_zerop (expr
));
2327 /* Return 1 if EXPR is the integer constant zero or a complex constant
2331 integer_zerop (const_tree expr
)
2333 switch (TREE_CODE (expr
))
2336 return wi::to_wide (expr
) == 0;
2338 return (integer_zerop (TREE_REALPART (expr
))
2339 && integer_zerop (TREE_IMAGPART (expr
)));
2341 return (VECTOR_CST_NPATTERNS (expr
) == 1
2342 && VECTOR_CST_DUPLICATE_P (expr
)
2343 && integer_zerop (VECTOR_CST_ENCODED_ELT (expr
, 0)));
2349 /* Return 1 if EXPR is the integer constant one or the corresponding
2350 complex constant. */
2353 integer_onep (const_tree expr
)
2355 switch (TREE_CODE (expr
))
2358 return wi::eq_p (wi::to_widest (expr
), 1);
2360 return (integer_onep (TREE_REALPART (expr
))
2361 && integer_zerop (TREE_IMAGPART (expr
)));
2363 return (VECTOR_CST_NPATTERNS (expr
) == 1
2364 && VECTOR_CST_DUPLICATE_P (expr
)
2365 && integer_onep (VECTOR_CST_ENCODED_ELT (expr
, 0)));
2371 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2372 return 1 if every piece is the integer constant one. */
2375 integer_each_onep (const_tree expr
)
2377 if (TREE_CODE (expr
) == COMPLEX_CST
)
2378 return (integer_onep (TREE_REALPART (expr
))
2379 && integer_onep (TREE_IMAGPART (expr
)));
2381 return integer_onep (expr
);
2384 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2385 it contains, or a complex or vector whose subparts are such integers. */
2388 integer_all_onesp (const_tree expr
)
2390 if (TREE_CODE (expr
) == COMPLEX_CST
2391 && integer_all_onesp (TREE_REALPART (expr
))
2392 && integer_all_onesp (TREE_IMAGPART (expr
)))
2395 else if (TREE_CODE (expr
) == VECTOR_CST
)
2396 return (VECTOR_CST_NPATTERNS (expr
) == 1
2397 && VECTOR_CST_DUPLICATE_P (expr
)
2398 && integer_all_onesp (VECTOR_CST_ENCODED_ELT (expr
, 0)));
2400 else if (TREE_CODE (expr
) != INTEGER_CST
)
2403 return (wi::max_value (TYPE_PRECISION (TREE_TYPE (expr
)), UNSIGNED
)
2404 == wi::to_wide (expr
));
2407 /* Return 1 if EXPR is the integer constant minus one. */
2410 integer_minus_onep (const_tree expr
)
2412 if (TREE_CODE (expr
) == COMPLEX_CST
)
2413 return (integer_all_onesp (TREE_REALPART (expr
))
2414 && integer_zerop (TREE_IMAGPART (expr
)));
2416 return integer_all_onesp (expr
);
2419 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2423 integer_pow2p (const_tree expr
)
2425 if (TREE_CODE (expr
) == COMPLEX_CST
2426 && integer_pow2p (TREE_REALPART (expr
))
2427 && integer_zerop (TREE_IMAGPART (expr
)))
2430 if (TREE_CODE (expr
) != INTEGER_CST
)
2433 return wi::popcount (wi::to_wide (expr
)) == 1;
2436 /* Return 1 if EXPR is an integer constant other than zero or a
2437 complex constant other than zero. */
2440 integer_nonzerop (const_tree expr
)
2442 return ((TREE_CODE (expr
) == INTEGER_CST
2443 && wi::to_wide (expr
) != 0)
2444 || (TREE_CODE (expr
) == COMPLEX_CST
2445 && (integer_nonzerop (TREE_REALPART (expr
))
2446 || integer_nonzerop (TREE_IMAGPART (expr
)))));
2449 /* Return 1 if EXPR is the integer constant one. For vector,
2450 return 1 if every piece is the integer constant minus one
2451 (representing the value TRUE). */
2454 integer_truep (const_tree expr
)
2456 if (TREE_CODE (expr
) == VECTOR_CST
)
2457 return integer_all_onesp (expr
);
2458 return integer_onep (expr
);
2461 /* Return 1 if EXPR is the fixed-point constant zero. */
2464 fixed_zerop (const_tree expr
)
2466 return (TREE_CODE (expr
) == FIXED_CST
2467 && TREE_FIXED_CST (expr
).data
.is_zero ());
2470 /* Return the power of two represented by a tree node known to be a
2474 tree_log2 (const_tree expr
)
2476 if (TREE_CODE (expr
) == COMPLEX_CST
)
2477 return tree_log2 (TREE_REALPART (expr
));
2479 return wi::exact_log2 (wi::to_wide (expr
));
2482 /* Similar, but return the largest integer Y such that 2 ** Y is less
2483 than or equal to EXPR. */
2486 tree_floor_log2 (const_tree expr
)
2488 if (TREE_CODE (expr
) == COMPLEX_CST
)
2489 return tree_log2 (TREE_REALPART (expr
));
2491 return wi::floor_log2 (wi::to_wide (expr
));
2494 /* Return number of known trailing zero bits in EXPR, or, if the value of
2495 EXPR is known to be zero, the precision of it's type. */
2498 tree_ctz (const_tree expr
)
2500 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr
))
2501 && !POINTER_TYPE_P (TREE_TYPE (expr
)))
2504 unsigned int ret1
, ret2
, prec
= TYPE_PRECISION (TREE_TYPE (expr
));
2505 switch (TREE_CODE (expr
))
2508 ret1
= wi::ctz (wi::to_wide (expr
));
2509 return MIN (ret1
, prec
);
2511 ret1
= wi::ctz (get_nonzero_bits (expr
));
2512 return MIN (ret1
, prec
);
2519 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2522 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2523 return MIN (ret1
, ret2
);
2524 case POINTER_PLUS_EXPR
:
2525 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2526 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2527 /* Second operand is sizetype, which could be in theory
2528 wider than pointer's precision. Make sure we never
2529 return more than prec. */
2530 ret2
= MIN (ret2
, prec
);
2531 return MIN (ret1
, ret2
);
2533 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2534 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2535 return MAX (ret1
, ret2
);
2537 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2538 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2539 return MIN (ret1
+ ret2
, prec
);
2541 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2542 if (tree_fits_uhwi_p (TREE_OPERAND (expr
, 1))
2543 && (tree_to_uhwi (TREE_OPERAND (expr
, 1)) < prec
))
2545 ret2
= tree_to_uhwi (TREE_OPERAND (expr
, 1));
2546 return MIN (ret1
+ ret2
, prec
);
2550 if (tree_fits_uhwi_p (TREE_OPERAND (expr
, 1))
2551 && (tree_to_uhwi (TREE_OPERAND (expr
, 1)) < prec
))
2553 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2554 ret2
= tree_to_uhwi (TREE_OPERAND (expr
, 1));
2559 case TRUNC_DIV_EXPR
:
2561 case FLOOR_DIV_EXPR
:
2562 case ROUND_DIV_EXPR
:
2563 case EXACT_DIV_EXPR
:
2564 if (TREE_CODE (TREE_OPERAND (expr
, 1)) == INTEGER_CST
2565 && tree_int_cst_sgn (TREE_OPERAND (expr
, 1)) == 1)
2567 int l
= tree_log2 (TREE_OPERAND (expr
, 1));
2570 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2578 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2579 if (ret1
&& ret1
== TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr
, 0))))
2581 return MIN (ret1
, prec
);
2583 return tree_ctz (TREE_OPERAND (expr
, 0));
2585 ret1
= tree_ctz (TREE_OPERAND (expr
, 1));
2588 ret2
= tree_ctz (TREE_OPERAND (expr
, 2));
2589 return MIN (ret1
, ret2
);
2591 return tree_ctz (TREE_OPERAND (expr
, 1));
2593 ret1
= get_pointer_alignment (CONST_CAST_TREE (expr
));
2594 if (ret1
> BITS_PER_UNIT
)
2596 ret1
= ctz_hwi (ret1
/ BITS_PER_UNIT
);
2597 return MIN (ret1
, prec
);
2605 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2606 decimal float constants, so don't return 1 for them. */
2609 real_zerop (const_tree expr
)
2611 switch (TREE_CODE (expr
))
2614 return real_equal (&TREE_REAL_CST (expr
), &dconst0
)
2615 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
2617 return real_zerop (TREE_REALPART (expr
))
2618 && real_zerop (TREE_IMAGPART (expr
));
2621 /* Don't simply check for a duplicate because the predicate
2622 accepts both +0.0 and -0.0. */
2623 unsigned count
= vector_cst_encoded_nelts (expr
);
2624 for (unsigned int i
= 0; i
< count
; ++i
)
2625 if (!real_zerop (VECTOR_CST_ENCODED_ELT (expr
, i
)))
2634 /* Return 1 if EXPR is the real constant one in real or complex form.
2635 Trailing zeroes matter for decimal float constants, so don't return
2639 real_onep (const_tree expr
)
2641 switch (TREE_CODE (expr
))
2644 return real_equal (&TREE_REAL_CST (expr
), &dconst1
)
2645 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
2647 return real_onep (TREE_REALPART (expr
))
2648 && real_zerop (TREE_IMAGPART (expr
));
2650 return (VECTOR_CST_NPATTERNS (expr
) == 1
2651 && VECTOR_CST_DUPLICATE_P (expr
)
2652 && real_onep (VECTOR_CST_ENCODED_ELT (expr
, 0)));
2658 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2659 matter for decimal float constants, so don't return 1 for them. */
2662 real_minus_onep (const_tree expr
)
2664 switch (TREE_CODE (expr
))
2667 return real_equal (&TREE_REAL_CST (expr
), &dconstm1
)
2668 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
2670 return real_minus_onep (TREE_REALPART (expr
))
2671 && real_zerop (TREE_IMAGPART (expr
));
2673 return (VECTOR_CST_NPATTERNS (expr
) == 1
2674 && VECTOR_CST_DUPLICATE_P (expr
)
2675 && real_minus_onep (VECTOR_CST_ENCODED_ELT (expr
, 0)));
2681 /* Nonzero if EXP is a constant or a cast of a constant. */
2684 really_constant_p (const_tree exp
)
2686 /* This is not quite the same as STRIP_NOPS. It does more. */
2687 while (CONVERT_EXPR_P (exp
)
2688 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
2689 exp
= TREE_OPERAND (exp
, 0);
2690 return TREE_CONSTANT (exp
);
2693 /* Return first list element whose TREE_VALUE is ELEM.
2694 Return 0 if ELEM is not in LIST. */
2697 value_member (tree elem
, tree list
)
2701 if (elem
== TREE_VALUE (list
))
2703 list
= TREE_CHAIN (list
);
2708 /* Return first list element whose TREE_PURPOSE is ELEM.
2709 Return 0 if ELEM is not in LIST. */
2712 purpose_member (const_tree elem
, tree list
)
2716 if (elem
== TREE_PURPOSE (list
))
2718 list
= TREE_CHAIN (list
);
2723 /* Return true if ELEM is in V. */
2726 vec_member (const_tree elem
, vec
<tree
, va_gc
> *v
)
2730 FOR_EACH_VEC_SAFE_ELT (v
, ix
, t
)
2736 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2740 chain_index (int idx
, tree chain
)
2742 for (; chain
&& idx
> 0; --idx
)
2743 chain
= TREE_CHAIN (chain
);
2747 /* Return nonzero if ELEM is part of the chain CHAIN. */
2750 chain_member (const_tree elem
, const_tree chain
)
2756 chain
= DECL_CHAIN (chain
);
2762 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2763 We expect a null pointer to mark the end of the chain.
2764 This is the Lisp primitive `length'. */
2767 list_length (const_tree t
)
2770 #ifdef ENABLE_TREE_CHECKING
2778 #ifdef ENABLE_TREE_CHECKING
2781 gcc_assert (p
!= q
);
2789 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2790 UNION_TYPE TYPE, or NULL_TREE if none. */
2793 first_field (const_tree type
)
2795 tree t
= TYPE_FIELDS (type
);
2796 while (t
&& TREE_CODE (t
) != FIELD_DECL
)
2801 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2802 by modifying the last node in chain 1 to point to chain 2.
2803 This is the Lisp primitive `nconc'. */
2806 chainon (tree op1
, tree op2
)
2815 for (t1
= op1
; TREE_CHAIN (t1
); t1
= TREE_CHAIN (t1
))
2817 TREE_CHAIN (t1
) = op2
;
2819 #ifdef ENABLE_TREE_CHECKING
2822 for (t2
= op2
; t2
; t2
= TREE_CHAIN (t2
))
2823 gcc_assert (t2
!= t1
);
2830 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2833 tree_last (tree chain
)
2837 while ((next
= TREE_CHAIN (chain
)))
2842 /* Reverse the order of elements in the chain T,
2843 and return the new head of the chain (old last element). */
2848 tree prev
= 0, decl
, next
;
2849 for (decl
= t
; decl
; decl
= next
)
2851 /* We shouldn't be using this function to reverse BLOCK chains; we
2852 have blocks_nreverse for that. */
2853 gcc_checking_assert (TREE_CODE (decl
) != BLOCK
);
2854 next
= TREE_CHAIN (decl
);
2855 TREE_CHAIN (decl
) = prev
;
2861 /* Return a newly created TREE_LIST node whose
2862 purpose and value fields are PARM and VALUE. */
2865 build_tree_list (tree parm
, tree value MEM_STAT_DECL
)
2867 tree t
= make_node (TREE_LIST PASS_MEM_STAT
);
2868 TREE_PURPOSE (t
) = parm
;
2869 TREE_VALUE (t
) = value
;
2873 /* Build a chain of TREE_LIST nodes from a vector. */
2876 build_tree_list_vec (const vec
<tree
, va_gc
> *vec MEM_STAT_DECL
)
2878 tree ret
= NULL_TREE
;
2882 FOR_EACH_VEC_SAFE_ELT (vec
, i
, t
)
2884 *pp
= build_tree_list (NULL
, t PASS_MEM_STAT
);
2885 pp
= &TREE_CHAIN (*pp
);
2890 /* Return a newly created TREE_LIST node whose
2891 purpose and value fields are PURPOSE and VALUE
2892 and whose TREE_CHAIN is CHAIN. */
2895 tree_cons (tree purpose
, tree value
, tree chain MEM_STAT_DECL
)
2899 node
= ggc_alloc_tree_node_stat (sizeof (struct tree_list
) PASS_MEM_STAT
);
2900 memset (node
, 0, sizeof (struct tree_common
));
2902 record_node_allocation_statistics (TREE_LIST
, sizeof (struct tree_list
));
2904 TREE_SET_CODE (node
, TREE_LIST
);
2905 TREE_CHAIN (node
) = chain
;
2906 TREE_PURPOSE (node
) = purpose
;
2907 TREE_VALUE (node
) = value
;
2911 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2915 ctor_to_vec (tree ctor
)
2917 vec
<tree
, va_gc
> *vec
;
2918 vec_alloc (vec
, CONSTRUCTOR_NELTS (ctor
));
2922 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor
), ix
, val
)
2923 vec
->quick_push (val
);
2928 /* Return the size nominally occupied by an object of type TYPE
2929 when it resides in memory. The value is measured in units of bytes,
2930 and its data type is that normally used for type sizes
2931 (which is the first type created by make_signed_type or
2932 make_unsigned_type). */
2935 size_in_bytes_loc (location_t loc
, const_tree type
)
2939 if (type
== error_mark_node
)
2940 return integer_zero_node
;
2942 type
= TYPE_MAIN_VARIANT (type
);
2943 t
= TYPE_SIZE_UNIT (type
);
2947 lang_hooks
.types
.incomplete_type_error (loc
, NULL_TREE
, type
);
2948 return size_zero_node
;
2954 /* Return the size of TYPE (in bytes) as a wide integer
2955 or return -1 if the size can vary or is larger than an integer. */
2958 int_size_in_bytes (const_tree type
)
2962 if (type
== error_mark_node
)
2965 type
= TYPE_MAIN_VARIANT (type
);
2966 t
= TYPE_SIZE_UNIT (type
);
2968 if (t
&& tree_fits_uhwi_p (t
))
2969 return TREE_INT_CST_LOW (t
);
2974 /* Return the maximum size of TYPE (in bytes) as a wide integer
2975 or return -1 if the size can vary or is larger than an integer. */
2978 max_int_size_in_bytes (const_tree type
)
2980 HOST_WIDE_INT size
= -1;
2983 /* If this is an array type, check for a possible MAX_SIZE attached. */
2985 if (TREE_CODE (type
) == ARRAY_TYPE
)
2987 size_tree
= TYPE_ARRAY_MAX_SIZE (type
);
2989 if (size_tree
&& tree_fits_uhwi_p (size_tree
))
2990 size
= tree_to_uhwi (size_tree
);
2993 /* If we still haven't been able to get a size, see if the language
2994 can compute a maximum size. */
2998 size_tree
= lang_hooks
.types
.max_size (type
);
3000 if (size_tree
&& tree_fits_uhwi_p (size_tree
))
3001 size
= tree_to_uhwi (size_tree
);
3007 /* Return the bit position of FIELD, in bits from the start of the record.
3008 This is a tree of type bitsizetype. */
3011 bit_position (const_tree field
)
3013 return bit_from_pos (DECL_FIELD_OFFSET (field
),
3014 DECL_FIELD_BIT_OFFSET (field
));
3017 /* Return the byte position of FIELD, in bytes from the start of the record.
3018 This is a tree of type sizetype. */
3021 byte_position (const_tree field
)
3023 return byte_from_pos (DECL_FIELD_OFFSET (field
),
3024 DECL_FIELD_BIT_OFFSET (field
));
3027 /* Likewise, but return as an integer. It must be representable in
3028 that way (since it could be a signed value, we don't have the
3029 option of returning -1 like int_size_in_byte can. */
3032 int_byte_position (const_tree field
)
3034 return tree_to_shwi (byte_position (field
));
3037 /* Return the strictest alignment, in bits, that T is known to have. */
3040 expr_align (const_tree t
)
3042 unsigned int align0
, align1
;
3044 switch (TREE_CODE (t
))
3046 CASE_CONVERT
: case NON_LVALUE_EXPR
:
3047 /* If we have conversions, we know that the alignment of the
3048 object must meet each of the alignments of the types. */
3049 align0
= expr_align (TREE_OPERAND (t
, 0));
3050 align1
= TYPE_ALIGN (TREE_TYPE (t
));
3051 return MAX (align0
, align1
);
3053 case SAVE_EXPR
: case COMPOUND_EXPR
: case MODIFY_EXPR
:
3054 case INIT_EXPR
: case TARGET_EXPR
: case WITH_CLEANUP_EXPR
:
3055 case CLEANUP_POINT_EXPR
:
3056 /* These don't change the alignment of an object. */
3057 return expr_align (TREE_OPERAND (t
, 0));
3060 /* The best we can do is say that the alignment is the least aligned
3062 align0
= expr_align (TREE_OPERAND (t
, 1));
3063 align1
= expr_align (TREE_OPERAND (t
, 2));
3064 return MIN (align0
, align1
);
3066 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3067 meaningfully, it's always 1. */
3068 case LABEL_DECL
: case CONST_DECL
:
3069 case VAR_DECL
: case PARM_DECL
: case RESULT_DECL
:
3071 gcc_assert (DECL_ALIGN (t
) != 0);
3072 return DECL_ALIGN (t
);
3078 /* Otherwise take the alignment from that of the type. */
3079 return TYPE_ALIGN (TREE_TYPE (t
));
3082 /* Return, as a tree node, the number of elements for TYPE (which is an
3083 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3086 array_type_nelts (const_tree type
)
3088 tree index_type
, min
, max
;
3090 /* If they did it with unspecified bounds, then we should have already
3091 given an error about it before we got here. */
3092 if (! TYPE_DOMAIN (type
))
3093 return error_mark_node
;
3095 index_type
= TYPE_DOMAIN (type
);
3096 min
= TYPE_MIN_VALUE (index_type
);
3097 max
= TYPE_MAX_VALUE (index_type
);
3099 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3101 return error_mark_node
;
3103 return (integer_zerop (min
)
3105 : fold_build2 (MINUS_EXPR
, TREE_TYPE (max
), max
, min
));
3108 /* If arg is static -- a reference to an object in static storage -- then
3109 return the object. This is not the same as the C meaning of `static'.
3110 If arg isn't static, return NULL. */
3115 switch (TREE_CODE (arg
))
3118 /* Nested functions are static, even though taking their address will
3119 involve a trampoline as we unnest the nested function and create
3120 the trampoline on the tree level. */
3124 return ((TREE_STATIC (arg
) || DECL_EXTERNAL (arg
))
3125 && ! DECL_THREAD_LOCAL_P (arg
)
3126 && ! DECL_DLLIMPORT_P (arg
)
3130 return ((TREE_STATIC (arg
) || DECL_EXTERNAL (arg
))
3134 return TREE_STATIC (arg
) ? arg
: NULL
;
3141 /* If the thing being referenced is not a field, then it is
3142 something language specific. */
3143 gcc_assert (TREE_CODE (TREE_OPERAND (arg
, 1)) == FIELD_DECL
);
3145 /* If we are referencing a bitfield, we can't evaluate an
3146 ADDR_EXPR at compile time and so it isn't a constant. */
3147 if (DECL_BIT_FIELD (TREE_OPERAND (arg
, 1)))
3150 return staticp (TREE_OPERAND (arg
, 0));
3156 return TREE_CONSTANT (TREE_OPERAND (arg
, 0)) ? arg
: NULL
;
3159 case ARRAY_RANGE_REF
:
3160 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg
))) == INTEGER_CST
3161 && TREE_CODE (TREE_OPERAND (arg
, 1)) == INTEGER_CST
)
3162 return staticp (TREE_OPERAND (arg
, 0));
3166 case COMPOUND_LITERAL_EXPR
:
3167 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg
)) ? arg
: NULL
;
3177 /* Return whether OP is a DECL whose address is function-invariant. */
3180 decl_address_invariant_p (const_tree op
)
3182 /* The conditions below are slightly less strict than the one in
3185 switch (TREE_CODE (op
))
3194 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3195 || DECL_THREAD_LOCAL_P (op
)
3196 || DECL_CONTEXT (op
) == current_function_decl
3197 || decl_function_context (op
) == current_function_decl
)
3202 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3203 || decl_function_context (op
) == current_function_decl
)
3214 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3217 decl_address_ip_invariant_p (const_tree op
)
3219 /* The conditions below are slightly less strict than the one in
3222 switch (TREE_CODE (op
))
3230 if (((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3231 && !DECL_DLLIMPORT_P (op
))
3232 || DECL_THREAD_LOCAL_P (op
))
3237 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
)))
3249 /* Return true if T is function-invariant (internal function, does
3250 not handle arithmetic; that's handled in skip_simple_arithmetic and
3251 tree_invariant_p). */
3254 tree_invariant_p_1 (tree t
)
3258 if (TREE_CONSTANT (t
)
3259 || (TREE_READONLY (t
) && !TREE_SIDE_EFFECTS (t
)))
3262 switch (TREE_CODE (t
))
3268 op
= TREE_OPERAND (t
, 0);
3269 while (handled_component_p (op
))
3271 switch (TREE_CODE (op
))
3274 case ARRAY_RANGE_REF
:
3275 if (!tree_invariant_p (TREE_OPERAND (op
, 1))
3276 || TREE_OPERAND (op
, 2) != NULL_TREE
3277 || TREE_OPERAND (op
, 3) != NULL_TREE
)
3282 if (TREE_OPERAND (op
, 2) != NULL_TREE
)
3288 op
= TREE_OPERAND (op
, 0);
3291 return CONSTANT_CLASS_P (op
) || decl_address_invariant_p (op
);
3300 /* Return true if T is function-invariant. */
3303 tree_invariant_p (tree t
)
3305 tree inner
= skip_simple_arithmetic (t
);
3306 return tree_invariant_p_1 (inner
);
3309 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3310 Do this to any expression which may be used in more than one place,
3311 but must be evaluated only once.
3313 Normally, expand_expr would reevaluate the expression each time.
3314 Calling save_expr produces something that is evaluated and recorded
3315 the first time expand_expr is called on it. Subsequent calls to
3316 expand_expr just reuse the recorded value.
3318 The call to expand_expr that generates code that actually computes
3319 the value is the first call *at compile time*. Subsequent calls
3320 *at compile time* generate code to use the saved value.
3321 This produces correct result provided that *at run time* control
3322 always flows through the insns made by the first expand_expr
3323 before reaching the other places where the save_expr was evaluated.
3324 You, the caller of save_expr, must make sure this is so.
3326 Constants, and certain read-only nodes, are returned with no
3327 SAVE_EXPR because that is safe. Expressions containing placeholders
3328 are not touched; see tree.def for an explanation of what these
3332 save_expr (tree expr
)
3336 /* If the tree evaluates to a constant, then we don't want to hide that
3337 fact (i.e. this allows further folding, and direct checks for constants).
3338 However, a read-only object that has side effects cannot be bypassed.
3339 Since it is no problem to reevaluate literals, we just return the
3341 inner
= skip_simple_arithmetic (expr
);
3342 if (TREE_CODE (inner
) == ERROR_MARK
)
3345 if (tree_invariant_p_1 (inner
))
3348 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3349 it means that the size or offset of some field of an object depends on
3350 the value within another field.
3352 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3353 and some variable since it would then need to be both evaluated once and
3354 evaluated more than once. Front-ends must assure this case cannot
3355 happen by surrounding any such subexpressions in their own SAVE_EXPR
3356 and forcing evaluation at the proper time. */
3357 if (contains_placeholder_p (inner
))
3360 expr
= build1_loc (EXPR_LOCATION (expr
), SAVE_EXPR
, TREE_TYPE (expr
), expr
);
3362 /* This expression might be placed ahead of a jump to ensure that the
3363 value was computed on both sides of the jump. So make sure it isn't
3364 eliminated as dead. */
3365 TREE_SIDE_EFFECTS (expr
) = 1;
3369 /* Look inside EXPR into any simple arithmetic operations. Return the
3370 outermost non-arithmetic or non-invariant node. */
3373 skip_simple_arithmetic (tree expr
)
3375 /* We don't care about whether this can be used as an lvalue in this
3377 while (TREE_CODE (expr
) == NON_LVALUE_EXPR
)
3378 expr
= TREE_OPERAND (expr
, 0);
3380 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3381 a constant, it will be more efficient to not make another SAVE_EXPR since
3382 it will allow better simplification and GCSE will be able to merge the
3383 computations if they actually occur. */
3386 if (UNARY_CLASS_P (expr
))
3387 expr
= TREE_OPERAND (expr
, 0);
3388 else if (BINARY_CLASS_P (expr
))
3390 if (tree_invariant_p (TREE_OPERAND (expr
, 1)))
3391 expr
= TREE_OPERAND (expr
, 0);
3392 else if (tree_invariant_p (TREE_OPERAND (expr
, 0)))
3393 expr
= TREE_OPERAND (expr
, 1);
3404 /* Look inside EXPR into simple arithmetic operations involving constants.
3405 Return the outermost non-arithmetic or non-constant node. */
3408 skip_simple_constant_arithmetic (tree expr
)
3410 while (TREE_CODE (expr
) == NON_LVALUE_EXPR
)
3411 expr
= TREE_OPERAND (expr
, 0);
3415 if (UNARY_CLASS_P (expr
))
3416 expr
= TREE_OPERAND (expr
, 0);
3417 else if (BINARY_CLASS_P (expr
))
3419 if (TREE_CONSTANT (TREE_OPERAND (expr
, 1)))
3420 expr
= TREE_OPERAND (expr
, 0);
3421 else if (TREE_CONSTANT (TREE_OPERAND (expr
, 0)))
3422 expr
= TREE_OPERAND (expr
, 1);
3433 /* Return which tree structure is used by T. */
3435 enum tree_node_structure_enum
3436 tree_node_structure (const_tree t
)
3438 const enum tree_code code
= TREE_CODE (t
);
3439 return tree_node_structure_for_code (code
);
3442 /* Set various status flags when building a CALL_EXPR object T. */
3445 process_call_operands (tree t
)
3447 bool side_effects
= TREE_SIDE_EFFECTS (t
);
3448 bool read_only
= false;
3449 int i
= call_expr_flags (t
);
3451 /* Calls have side-effects, except those to const or pure functions. */
3452 if ((i
& ECF_LOOPING_CONST_OR_PURE
) || !(i
& (ECF_CONST
| ECF_PURE
)))
3453 side_effects
= true;
3454 /* Propagate TREE_READONLY of arguments for const functions. */
3458 if (!side_effects
|| read_only
)
3459 for (i
= 1; i
< TREE_OPERAND_LENGTH (t
); i
++)
3461 tree op
= TREE_OPERAND (t
, i
);
3462 if (op
&& TREE_SIDE_EFFECTS (op
))
3463 side_effects
= true;
3464 if (op
&& !TREE_READONLY (op
) && !CONSTANT_CLASS_P (op
))
3468 TREE_SIDE_EFFECTS (t
) = side_effects
;
3469 TREE_READONLY (t
) = read_only
;
3472 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3473 size or offset that depends on a field within a record. */
3476 contains_placeholder_p (const_tree exp
)
3478 enum tree_code code
;
3483 code
= TREE_CODE (exp
);
3484 if (code
== PLACEHOLDER_EXPR
)
3487 switch (TREE_CODE_CLASS (code
))
3490 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3491 position computations since they will be converted into a
3492 WITH_RECORD_EXPR involving the reference, which will assume
3493 here will be valid. */
3494 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0));
3496 case tcc_exceptional
:
3497 if (code
== TREE_LIST
)
3498 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp
))
3499 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp
)));
3504 case tcc_comparison
:
3505 case tcc_expression
:
3509 /* Ignoring the first operand isn't quite right, but works best. */
3510 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1));
3513 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0))
3514 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1))
3515 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 2)));
3518 /* The save_expr function never wraps anything containing
3519 a PLACEHOLDER_EXPR. */
3526 switch (TREE_CODE_LENGTH (code
))
3529 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0));
3531 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0))
3532 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1)));
3543 const_call_expr_arg_iterator iter
;
3544 FOR_EACH_CONST_CALL_EXPR_ARG (arg
, iter
, exp
)
3545 if (CONTAINS_PLACEHOLDER_P (arg
))
3559 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3560 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3564 type_contains_placeholder_1 (const_tree type
)
3566 /* If the size contains a placeholder or the parent type (component type in
3567 the case of arrays) type involves a placeholder, this type does. */
3568 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type
))
3569 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type
))
3570 || (!POINTER_TYPE_P (type
)
3572 && type_contains_placeholder_p (TREE_TYPE (type
))))
3575 /* Now do type-specific checks. Note that the last part of the check above
3576 greatly limits what we have to do below. */
3577 switch (TREE_CODE (type
))
3580 case POINTER_BOUNDS_TYPE
:
3586 case REFERENCE_TYPE
:
3595 case FIXED_POINT_TYPE
:
3596 /* Here we just check the bounds. */
3597 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type
))
3598 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type
)));
3601 /* We have already checked the component type above, so just check
3602 the domain type. Flexible array members have a null domain. */
3603 return TYPE_DOMAIN (type
) ?
3604 type_contains_placeholder_p (TYPE_DOMAIN (type
)) : false;
3608 case QUAL_UNION_TYPE
:
3612 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
3613 if (TREE_CODE (field
) == FIELD_DECL
3614 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field
))
3615 || (TREE_CODE (type
) == QUAL_UNION_TYPE
3616 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field
)))
3617 || type_contains_placeholder_p (TREE_TYPE (field
))))
3628 /* Wrapper around above function used to cache its result. */
3631 type_contains_placeholder_p (tree type
)
3635 /* If the contains_placeholder_bits field has been initialized,
3636 then we know the answer. */
3637 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) > 0)
3638 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) - 1;
3640 /* Indicate that we've seen this type node, and the answer is false.
3641 This is what we want to return if we run into recursion via fields. */
3642 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) = 1;
3644 /* Compute the real value. */
3645 result
= type_contains_placeholder_1 (type
);
3647 /* Store the real value. */
3648 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) = result
+ 1;
3653 /* Push tree EXP onto vector QUEUE if it is not already present. */
3656 push_without_duplicates (tree exp
, vec
<tree
> *queue
)
3661 FOR_EACH_VEC_ELT (*queue
, i
, iter
)
3662 if (simple_cst_equal (iter
, exp
) == 1)
3666 queue
->safe_push (exp
);
3669 /* Given a tree EXP, find all occurrences of references to fields
3670 in a PLACEHOLDER_EXPR and place them in vector REFS without
3671 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3672 we assume here that EXP contains only arithmetic expressions
3673 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3677 find_placeholder_in_expr (tree exp
, vec
<tree
> *refs
)
3679 enum tree_code code
= TREE_CODE (exp
);
3683 /* We handle TREE_LIST and COMPONENT_REF separately. */
3684 if (code
== TREE_LIST
)
3686 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp
), refs
);
3687 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp
), refs
);
3689 else if (code
== COMPONENT_REF
)
3691 for (inner
= TREE_OPERAND (exp
, 0);
3692 REFERENCE_CLASS_P (inner
);
3693 inner
= TREE_OPERAND (inner
, 0))
3696 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3697 push_without_duplicates (exp
, refs
);
3699 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), refs
);
3702 switch (TREE_CODE_CLASS (code
))
3707 case tcc_declaration
:
3708 /* Variables allocated to static storage can stay. */
3709 if (!TREE_STATIC (exp
))
3710 push_without_duplicates (exp
, refs
);
3713 case tcc_expression
:
3714 /* This is the pattern built in ada/make_aligning_type. */
3715 if (code
== ADDR_EXPR
3716 && TREE_CODE (TREE_OPERAND (exp
, 0)) == PLACEHOLDER_EXPR
)
3718 push_without_duplicates (exp
, refs
);
3724 case tcc_exceptional
:
3727 case tcc_comparison
:
3729 for (i
= 0; i
< TREE_CODE_LENGTH (code
); i
++)
3730 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, i
), refs
);
3734 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
3735 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, i
), refs
);
3743 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3744 return a tree with all occurrences of references to F in a
3745 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3746 CONST_DECLs. Note that we assume here that EXP contains only
3747 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3748 occurring only in their argument list. */
3751 substitute_in_expr (tree exp
, tree f
, tree r
)
3753 enum tree_code code
= TREE_CODE (exp
);
3754 tree op0
, op1
, op2
, op3
;
3757 /* We handle TREE_LIST and COMPONENT_REF separately. */
3758 if (code
== TREE_LIST
)
3760 op0
= SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp
), f
, r
);
3761 op1
= SUBSTITUTE_IN_EXPR (TREE_VALUE (exp
), f
, r
);
3762 if (op0
== TREE_CHAIN (exp
) && op1
== TREE_VALUE (exp
))
3765 return tree_cons (TREE_PURPOSE (exp
), op1
, op0
);
3767 else if (code
== COMPONENT_REF
)
3771 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3772 and it is the right field, replace it with R. */
3773 for (inner
= TREE_OPERAND (exp
, 0);
3774 REFERENCE_CLASS_P (inner
);
3775 inner
= TREE_OPERAND (inner
, 0))
3779 op1
= TREE_OPERAND (exp
, 1);
3781 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
&& op1
== f
)
3784 /* If this expression hasn't been completed let, leave it alone. */
3785 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
&& !TREE_TYPE (inner
))
3788 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
3789 if (op0
== TREE_OPERAND (exp
, 0))
3793 = fold_build3 (COMPONENT_REF
, TREE_TYPE (exp
), op0
, op1
, NULL_TREE
);
3796 switch (TREE_CODE_CLASS (code
))
3801 case tcc_declaration
:
3807 case tcc_expression
:
3813 case tcc_exceptional
:
3816 case tcc_comparison
:
3818 switch (TREE_CODE_LENGTH (code
))
3824 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
3825 if (op0
== TREE_OPERAND (exp
, 0))
3828 new_tree
= fold_build1 (code
, TREE_TYPE (exp
), op0
);
3832 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
3833 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
3835 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1))
3838 new_tree
= fold_build2 (code
, TREE_TYPE (exp
), op0
, op1
);
3842 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
3843 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
3844 op2
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 2), f
, r
);
3846 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
3847 && op2
== TREE_OPERAND (exp
, 2))
3850 new_tree
= fold_build3 (code
, TREE_TYPE (exp
), op0
, op1
, op2
);
3854 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
3855 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
3856 op2
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 2), f
, r
);
3857 op3
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 3), f
, r
);
3859 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
3860 && op2
== TREE_OPERAND (exp
, 2)
3861 && op3
== TREE_OPERAND (exp
, 3))
3865 = fold (build4 (code
, TREE_TYPE (exp
), op0
, op1
, op2
, op3
));
3877 new_tree
= NULL_TREE
;
3879 /* If we are trying to replace F with a constant or with another
3880 instance of one of the arguments of the call, inline back
3881 functions which do nothing else than computing a value from
3882 the arguments they are passed. This makes it possible to
3883 fold partially or entirely the replacement expression. */
3884 if (code
== CALL_EXPR
)
3886 bool maybe_inline
= false;
3887 if (CONSTANT_CLASS_P (r
))
3888 maybe_inline
= true;
3890 for (i
= 3; i
< TREE_OPERAND_LENGTH (exp
); i
++)
3891 if (operand_equal_p (TREE_OPERAND (exp
, i
), r
, 0))
3893 maybe_inline
= true;
3898 tree t
= maybe_inline_call_in_expr (exp
);
3900 return SUBSTITUTE_IN_EXPR (t
, f
, r
);
3904 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
3906 tree op
= TREE_OPERAND (exp
, i
);
3907 tree new_op
= SUBSTITUTE_IN_EXPR (op
, f
, r
);
3911 new_tree
= copy_node (exp
);
3912 TREE_OPERAND (new_tree
, i
) = new_op
;
3918 new_tree
= fold (new_tree
);
3919 if (TREE_CODE (new_tree
) == CALL_EXPR
)
3920 process_call_operands (new_tree
);
3931 TREE_READONLY (new_tree
) |= TREE_READONLY (exp
);
3933 if (code
== INDIRECT_REF
|| code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
3934 TREE_THIS_NOTRAP (new_tree
) |= TREE_THIS_NOTRAP (exp
);
3939 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3940 for it within OBJ, a tree that is an object or a chain of references. */
3943 substitute_placeholder_in_expr (tree exp
, tree obj
)
3945 enum tree_code code
= TREE_CODE (exp
);
3946 tree op0
, op1
, op2
, op3
;
3949 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3950 in the chain of OBJ. */
3951 if (code
== PLACEHOLDER_EXPR
)
3953 tree need_type
= TYPE_MAIN_VARIANT (TREE_TYPE (exp
));
3956 for (elt
= obj
; elt
!= 0;
3957 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
3958 || TREE_CODE (elt
) == COND_EXPR
)
3959 ? TREE_OPERAND (elt
, 1)
3960 : (REFERENCE_CLASS_P (elt
)
3961 || UNARY_CLASS_P (elt
)
3962 || BINARY_CLASS_P (elt
)
3963 || VL_EXP_CLASS_P (elt
)
3964 || EXPRESSION_CLASS_P (elt
))
3965 ? TREE_OPERAND (elt
, 0) : 0))
3966 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
3969 for (elt
= obj
; elt
!= 0;
3970 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
3971 || TREE_CODE (elt
) == COND_EXPR
)
3972 ? TREE_OPERAND (elt
, 1)
3973 : (REFERENCE_CLASS_P (elt
)
3974 || UNARY_CLASS_P (elt
)
3975 || BINARY_CLASS_P (elt
)
3976 || VL_EXP_CLASS_P (elt
)
3977 || EXPRESSION_CLASS_P (elt
))
3978 ? TREE_OPERAND (elt
, 0) : 0))
3979 if (POINTER_TYPE_P (TREE_TYPE (elt
))
3980 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
3982 return fold_build1 (INDIRECT_REF
, need_type
, elt
);
3984 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3985 survives until RTL generation, there will be an error. */
3989 /* TREE_LIST is special because we need to look at TREE_VALUE
3990 and TREE_CHAIN, not TREE_OPERANDS. */
3991 else if (code
== TREE_LIST
)
3993 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp
), obj
);
3994 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp
), obj
);
3995 if (op0
== TREE_CHAIN (exp
) && op1
== TREE_VALUE (exp
))
3998 return tree_cons (TREE_PURPOSE (exp
), op1
, op0
);
4001 switch (TREE_CODE_CLASS (code
))
4004 case tcc_declaration
:
4007 case tcc_exceptional
:
4010 case tcc_comparison
:
4011 case tcc_expression
:
4014 switch (TREE_CODE_LENGTH (code
))
4020 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4021 if (op0
== TREE_OPERAND (exp
, 0))
4024 new_tree
= fold_build1 (code
, TREE_TYPE (exp
), op0
);
4028 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4029 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
4031 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1))
4034 new_tree
= fold_build2 (code
, TREE_TYPE (exp
), op0
, op1
);
4038 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4039 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
4040 op2
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 2), obj
);
4042 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4043 && op2
== TREE_OPERAND (exp
, 2))
4046 new_tree
= fold_build3 (code
, TREE_TYPE (exp
), op0
, op1
, op2
);
4050 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4051 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
4052 op2
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 2), obj
);
4053 op3
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 3), obj
);
4055 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4056 && op2
== TREE_OPERAND (exp
, 2)
4057 && op3
== TREE_OPERAND (exp
, 3))
4061 = fold (build4 (code
, TREE_TYPE (exp
), op0
, op1
, op2
, op3
));
4073 new_tree
= NULL_TREE
;
4075 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
4077 tree op
= TREE_OPERAND (exp
, i
);
4078 tree new_op
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (op
, obj
);
4082 new_tree
= copy_node (exp
);
4083 TREE_OPERAND (new_tree
, i
) = new_op
;
4089 new_tree
= fold (new_tree
);
4090 if (TREE_CODE (new_tree
) == CALL_EXPR
)
4091 process_call_operands (new_tree
);
4102 TREE_READONLY (new_tree
) |= TREE_READONLY (exp
);
4104 if (code
== INDIRECT_REF
|| code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
4105 TREE_THIS_NOTRAP (new_tree
) |= TREE_THIS_NOTRAP (exp
);
4111 /* Subroutine of stabilize_reference; this is called for subtrees of
4112 references. Any expression with side-effects must be put in a SAVE_EXPR
4113 to ensure that it is only evaluated once.
4115 We don't put SAVE_EXPR nodes around everything, because assigning very
4116 simple expressions to temporaries causes us to miss good opportunities
4117 for optimizations. Among other things, the opportunity to fold in the
4118 addition of a constant into an addressing mode often gets lost, e.g.
4119 "y[i+1] += x;". In general, we take the approach that we should not make
4120 an assignment unless we are forced into it - i.e., that any non-side effect
4121 operator should be allowed, and that cse should take care of coalescing
4122 multiple utterances of the same expression should that prove fruitful. */
4125 stabilize_reference_1 (tree e
)
4128 enum tree_code code
= TREE_CODE (e
);
4130 /* We cannot ignore const expressions because it might be a reference
4131 to a const array but whose index contains side-effects. But we can
4132 ignore things that are actual constant or that already have been
4133 handled by this function. */
4135 if (tree_invariant_p (e
))
4138 switch (TREE_CODE_CLASS (code
))
4140 case tcc_exceptional
:
4142 case tcc_declaration
:
4143 case tcc_comparison
:
4145 case tcc_expression
:
4148 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4149 so that it will only be evaluated once. */
4150 /* The reference (r) and comparison (<) classes could be handled as
4151 below, but it is generally faster to only evaluate them once. */
4152 if (TREE_SIDE_EFFECTS (e
))
4153 return save_expr (e
);
4157 /* Constants need no processing. In fact, we should never reach
4162 /* Division is slow and tends to be compiled with jumps,
4163 especially the division by powers of 2 that is often
4164 found inside of an array reference. So do it just once. */
4165 if (code
== TRUNC_DIV_EXPR
|| code
== TRUNC_MOD_EXPR
4166 || code
== FLOOR_DIV_EXPR
|| code
== FLOOR_MOD_EXPR
4167 || code
== CEIL_DIV_EXPR
|| code
== CEIL_MOD_EXPR
4168 || code
== ROUND_DIV_EXPR
|| code
== ROUND_MOD_EXPR
)
4169 return save_expr (e
);
4170 /* Recursively stabilize each operand. */
4171 result
= build_nt (code
, stabilize_reference_1 (TREE_OPERAND (e
, 0)),
4172 stabilize_reference_1 (TREE_OPERAND (e
, 1)));
4176 /* Recursively stabilize each operand. */
4177 result
= build_nt (code
, stabilize_reference_1 (TREE_OPERAND (e
, 0)));
4184 TREE_TYPE (result
) = TREE_TYPE (e
);
4185 TREE_READONLY (result
) = TREE_READONLY (e
);
4186 TREE_SIDE_EFFECTS (result
) = TREE_SIDE_EFFECTS (e
);
4187 TREE_THIS_VOLATILE (result
) = TREE_THIS_VOLATILE (e
);
4192 /* Stabilize a reference so that we can use it any number of times
4193 without causing its operands to be evaluated more than once.
4194 Returns the stabilized reference. This works by means of save_expr,
4195 so see the caveats in the comments about save_expr.
4197 Also allows conversion expressions whose operands are references.
4198 Any other kind of expression is returned unchanged. */
4201 stabilize_reference (tree ref
)
4204 enum tree_code code
= TREE_CODE (ref
);
4211 /* No action is needed in this case. */
4216 case FIX_TRUNC_EXPR
:
4217 result
= build_nt (code
, stabilize_reference (TREE_OPERAND (ref
, 0)));
4221 result
= build_nt (INDIRECT_REF
,
4222 stabilize_reference_1 (TREE_OPERAND (ref
, 0)));
4226 result
= build_nt (COMPONENT_REF
,
4227 stabilize_reference (TREE_OPERAND (ref
, 0)),
4228 TREE_OPERAND (ref
, 1), NULL_TREE
);
4232 result
= build_nt (BIT_FIELD_REF
,
4233 stabilize_reference (TREE_OPERAND (ref
, 0)),
4234 TREE_OPERAND (ref
, 1), TREE_OPERAND (ref
, 2));
4235 REF_REVERSE_STORAGE_ORDER (result
) = REF_REVERSE_STORAGE_ORDER (ref
);
4239 result
= build_nt (ARRAY_REF
,
4240 stabilize_reference (TREE_OPERAND (ref
, 0)),
4241 stabilize_reference_1 (TREE_OPERAND (ref
, 1)),
4242 TREE_OPERAND (ref
, 2), TREE_OPERAND (ref
, 3));
4245 case ARRAY_RANGE_REF
:
4246 result
= build_nt (ARRAY_RANGE_REF
,
4247 stabilize_reference (TREE_OPERAND (ref
, 0)),
4248 stabilize_reference_1 (TREE_OPERAND (ref
, 1)),
4249 TREE_OPERAND (ref
, 2), TREE_OPERAND (ref
, 3));
4253 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4254 it wouldn't be ignored. This matters when dealing with
4256 return stabilize_reference_1 (ref
);
4258 /* If arg isn't a kind of lvalue we recognize, make no change.
4259 Caller should recognize the error for an invalid lvalue. */
4264 return error_mark_node
;
4267 TREE_TYPE (result
) = TREE_TYPE (ref
);
4268 TREE_READONLY (result
) = TREE_READONLY (ref
);
4269 TREE_SIDE_EFFECTS (result
) = TREE_SIDE_EFFECTS (ref
);
4270 TREE_THIS_VOLATILE (result
) = TREE_THIS_VOLATILE (ref
);
4275 /* Low-level constructors for expressions. */
4277 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4278 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4281 recompute_tree_invariant_for_addr_expr (tree t
)
4284 bool tc
= true, se
= false;
4286 gcc_assert (TREE_CODE (t
) == ADDR_EXPR
);
4288 /* We started out assuming this address is both invariant and constant, but
4289 does not have side effects. Now go down any handled components and see if
4290 any of them involve offsets that are either non-constant or non-invariant.
4291 Also check for side-effects.
4293 ??? Note that this code makes no attempt to deal with the case where
4294 taking the address of something causes a copy due to misalignment. */
4296 #define UPDATE_FLAGS(NODE) \
4297 do { tree _node = (NODE); \
4298 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4299 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4301 for (node
= TREE_OPERAND (t
, 0); handled_component_p (node
);
4302 node
= TREE_OPERAND (node
, 0))
4304 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4305 array reference (probably made temporarily by the G++ front end),
4306 so ignore all the operands. */
4307 if ((TREE_CODE (node
) == ARRAY_REF
4308 || TREE_CODE (node
) == ARRAY_RANGE_REF
)
4309 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node
, 0))) == ARRAY_TYPE
)
4311 UPDATE_FLAGS (TREE_OPERAND (node
, 1));
4312 if (TREE_OPERAND (node
, 2))
4313 UPDATE_FLAGS (TREE_OPERAND (node
, 2));
4314 if (TREE_OPERAND (node
, 3))
4315 UPDATE_FLAGS (TREE_OPERAND (node
, 3));
4317 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4318 FIELD_DECL, apparently. The G++ front end can put something else
4319 there, at least temporarily. */
4320 else if (TREE_CODE (node
) == COMPONENT_REF
4321 && TREE_CODE (TREE_OPERAND (node
, 1)) == FIELD_DECL
)
4323 if (TREE_OPERAND (node
, 2))
4324 UPDATE_FLAGS (TREE_OPERAND (node
, 2));
4328 node
= lang_hooks
.expr_to_decl (node
, &tc
, &se
);
4330 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4331 the address, since &(*a)->b is a form of addition. If it's a constant, the
4332 address is constant too. If it's a decl, its address is constant if the
4333 decl is static. Everything else is not constant and, furthermore,
4334 taking the address of a volatile variable is not volatile. */
4335 if (TREE_CODE (node
) == INDIRECT_REF
4336 || TREE_CODE (node
) == MEM_REF
)
4337 UPDATE_FLAGS (TREE_OPERAND (node
, 0));
4338 else if (CONSTANT_CLASS_P (node
))
4340 else if (DECL_P (node
))
4341 tc
&= (staticp (node
) != NULL_TREE
);
4345 se
|= TREE_SIDE_EFFECTS (node
);
4349 TREE_CONSTANT (t
) = tc
;
4350 TREE_SIDE_EFFECTS (t
) = se
;
4354 /* Build an expression of code CODE, data type TYPE, and operands as
4355 specified. Expressions and reference nodes can be created this way.
4356 Constants, decls, types and misc nodes cannot be.
4358 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4359 enough for all extant tree codes. */
4362 build0 (enum tree_code code
, tree tt MEM_STAT_DECL
)
4366 gcc_assert (TREE_CODE_LENGTH (code
) == 0);
4368 t
= make_node (code PASS_MEM_STAT
);
4375 build1 (enum tree_code code
, tree type
, tree node MEM_STAT_DECL
)
4377 int length
= sizeof (struct tree_exp
);
4380 record_node_allocation_statistics (code
, length
);
4382 gcc_assert (TREE_CODE_LENGTH (code
) == 1);
4384 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
4386 memset (t
, 0, sizeof (struct tree_common
));
4388 TREE_SET_CODE (t
, code
);
4390 TREE_TYPE (t
) = type
;
4391 SET_EXPR_LOCATION (t
, UNKNOWN_LOCATION
);
4392 TREE_OPERAND (t
, 0) = node
;
4393 if (node
&& !TYPE_P (node
))
4395 TREE_SIDE_EFFECTS (t
) = TREE_SIDE_EFFECTS (node
);
4396 TREE_READONLY (t
) = TREE_READONLY (node
);
4399 if (TREE_CODE_CLASS (code
) == tcc_statement
)
4400 TREE_SIDE_EFFECTS (t
) = 1;
4404 /* All of these have side-effects, no matter what their
4406 TREE_SIDE_EFFECTS (t
) = 1;
4407 TREE_READONLY (t
) = 0;
4411 /* Whether a dereference is readonly has nothing to do with whether
4412 its operand is readonly. */
4413 TREE_READONLY (t
) = 0;
4418 recompute_tree_invariant_for_addr_expr (t
);
4422 if ((TREE_CODE_CLASS (code
) == tcc_unary
|| code
== VIEW_CONVERT_EXPR
)
4423 && node
&& !TYPE_P (node
)
4424 && TREE_CONSTANT (node
))
4425 TREE_CONSTANT (t
) = 1;
4426 if (TREE_CODE_CLASS (code
) == tcc_reference
4427 && node
&& TREE_THIS_VOLATILE (node
))
4428 TREE_THIS_VOLATILE (t
) = 1;
4435 #define PROCESS_ARG(N) \
4437 TREE_OPERAND (t, N) = arg##N; \
4438 if (arg##N &&!TYPE_P (arg##N)) \
4440 if (TREE_SIDE_EFFECTS (arg##N)) \
4442 if (!TREE_READONLY (arg##N) \
4443 && !CONSTANT_CLASS_P (arg##N)) \
4444 (void) (read_only = 0); \
4445 if (!TREE_CONSTANT (arg##N)) \
4446 (void) (constant = 0); \
4451 build2 (enum tree_code code
, tree tt
, tree arg0
, tree arg1 MEM_STAT_DECL
)
4453 bool constant
, read_only
, side_effects
, div_by_zero
;
4456 gcc_assert (TREE_CODE_LENGTH (code
) == 2);
4458 if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
|| code
== MULT_EXPR
)
4459 && arg0
&& arg1
&& tt
&& POINTER_TYPE_P (tt
)
4460 /* When sizetype precision doesn't match that of pointers
4461 we need to be able to build explicit extensions or truncations
4462 of the offset argument. */
4463 && TYPE_PRECISION (sizetype
) == TYPE_PRECISION (tt
))
4464 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
4465 && TREE_CODE (arg1
) == INTEGER_CST
);
4467 if (code
== POINTER_PLUS_EXPR
&& arg0
&& arg1
&& tt
)
4468 gcc_assert (POINTER_TYPE_P (tt
) && POINTER_TYPE_P (TREE_TYPE (arg0
))
4469 && ptrofftype_p (TREE_TYPE (arg1
)));
4471 t
= make_node (code PASS_MEM_STAT
);
4474 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4475 result based on those same flags for the arguments. But if the
4476 arguments aren't really even `tree' expressions, we shouldn't be trying
4479 /* Expressions without side effects may be constant if their
4480 arguments are as well. */
4481 constant
= (TREE_CODE_CLASS (code
) == tcc_comparison
4482 || TREE_CODE_CLASS (code
) == tcc_binary
);
4484 side_effects
= TREE_SIDE_EFFECTS (t
);
4488 case TRUNC_DIV_EXPR
:
4490 case FLOOR_DIV_EXPR
:
4491 case ROUND_DIV_EXPR
:
4492 case EXACT_DIV_EXPR
:
4494 case FLOOR_MOD_EXPR
:
4495 case ROUND_MOD_EXPR
:
4496 case TRUNC_MOD_EXPR
:
4497 div_by_zero
= integer_zerop (arg1
);
4500 div_by_zero
= false;
4506 TREE_SIDE_EFFECTS (t
) = side_effects
;
4507 if (code
== MEM_REF
)
4509 if (arg0
&& TREE_CODE (arg0
) == ADDR_EXPR
)
4511 tree o
= TREE_OPERAND (arg0
, 0);
4512 TREE_READONLY (t
) = TREE_READONLY (o
);
4513 TREE_THIS_VOLATILE (t
) = TREE_THIS_VOLATILE (o
);
4518 TREE_READONLY (t
) = read_only
;
4519 /* Don't mark X / 0 as constant. */
4520 TREE_CONSTANT (t
) = constant
&& !div_by_zero
;
4521 TREE_THIS_VOLATILE (t
)
4522 = (TREE_CODE_CLASS (code
) == tcc_reference
4523 && arg0
&& TREE_THIS_VOLATILE (arg0
));
4531 build3 (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
4532 tree arg2 MEM_STAT_DECL
)
4534 bool constant
, read_only
, side_effects
;
4537 gcc_assert (TREE_CODE_LENGTH (code
) == 3);
4538 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
4540 t
= make_node (code PASS_MEM_STAT
);
4545 /* As a special exception, if COND_EXPR has NULL branches, we
4546 assume that it is a gimple statement and always consider
4547 it to have side effects. */
4548 if (code
== COND_EXPR
4549 && tt
== void_type_node
4550 && arg1
== NULL_TREE
4551 && arg2
== NULL_TREE
)
4552 side_effects
= true;
4554 side_effects
= TREE_SIDE_EFFECTS (t
);
4560 if (code
== COND_EXPR
)
4561 TREE_READONLY (t
) = read_only
;
4563 TREE_SIDE_EFFECTS (t
) = side_effects
;
4564 TREE_THIS_VOLATILE (t
)
4565 = (TREE_CODE_CLASS (code
) == tcc_reference
4566 && arg0
&& TREE_THIS_VOLATILE (arg0
));
4572 build4 (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
4573 tree arg2
, tree arg3 MEM_STAT_DECL
)
4575 bool constant
, read_only
, side_effects
;
4578 gcc_assert (TREE_CODE_LENGTH (code
) == 4);
4580 t
= make_node (code PASS_MEM_STAT
);
4583 side_effects
= TREE_SIDE_EFFECTS (t
);
4590 TREE_SIDE_EFFECTS (t
) = side_effects
;
4591 TREE_THIS_VOLATILE (t
)
4592 = (TREE_CODE_CLASS (code
) == tcc_reference
4593 && arg0
&& TREE_THIS_VOLATILE (arg0
));
4599 build5 (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
4600 tree arg2
, tree arg3
, tree arg4 MEM_STAT_DECL
)
4602 bool constant
, read_only
, side_effects
;
4605 gcc_assert (TREE_CODE_LENGTH (code
) == 5);
4607 t
= make_node (code PASS_MEM_STAT
);
4610 side_effects
= TREE_SIDE_EFFECTS (t
);
4618 TREE_SIDE_EFFECTS (t
) = side_effects
;
4619 if (code
== TARGET_MEM_REF
)
4621 if (arg0
&& TREE_CODE (arg0
) == ADDR_EXPR
)
4623 tree o
= TREE_OPERAND (arg0
, 0);
4624 TREE_READONLY (t
) = TREE_READONLY (o
);
4625 TREE_THIS_VOLATILE (t
) = TREE_THIS_VOLATILE (o
);
4629 TREE_THIS_VOLATILE (t
)
4630 = (TREE_CODE_CLASS (code
) == tcc_reference
4631 && arg0
&& TREE_THIS_VOLATILE (arg0
));
4636 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4637 on the pointer PTR. */
4640 build_simple_mem_ref_loc (location_t loc
, tree ptr
)
4642 HOST_WIDE_INT offset
= 0;
4643 tree ptype
= TREE_TYPE (ptr
);
4645 /* For convenience allow addresses that collapse to a simple base
4647 if (TREE_CODE (ptr
) == ADDR_EXPR
4648 && (handled_component_p (TREE_OPERAND (ptr
, 0))
4649 || TREE_CODE (TREE_OPERAND (ptr
, 0)) == MEM_REF
))
4651 ptr
= get_addr_base_and_unit_offset (TREE_OPERAND (ptr
, 0), &offset
);
4653 if (TREE_CODE (ptr
) == MEM_REF
)
4655 offset
+= mem_ref_offset (ptr
).to_short_addr ();
4656 ptr
= TREE_OPERAND (ptr
, 0);
4659 ptr
= build_fold_addr_expr (ptr
);
4660 gcc_assert (is_gimple_reg (ptr
) || is_gimple_min_invariant (ptr
));
4662 tem
= build2 (MEM_REF
, TREE_TYPE (ptype
),
4663 ptr
, build_int_cst (ptype
, offset
));
4664 SET_EXPR_LOCATION (tem
, loc
);
4668 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4671 mem_ref_offset (const_tree t
)
4673 return offset_int::from (wi::to_wide (TREE_OPERAND (t
, 1)), SIGNED
);
4676 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4677 offsetted by OFFSET units. */
4680 build_invariant_address (tree type
, tree base
, HOST_WIDE_INT offset
)
4682 tree ref
= fold_build2 (MEM_REF
, TREE_TYPE (type
),
4683 build_fold_addr_expr (base
),
4684 build_int_cst (ptr_type_node
, offset
));
4685 tree addr
= build1 (ADDR_EXPR
, type
, ref
);
4686 recompute_tree_invariant_for_addr_expr (addr
);
4690 /* Similar except don't specify the TREE_TYPE
4691 and leave the TREE_SIDE_EFFECTS as 0.
4692 It is permissible for arguments to be null,
4693 or even garbage if their values do not matter. */
4696 build_nt (enum tree_code code
, ...)
4703 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
4707 t
= make_node (code
);
4708 length
= TREE_CODE_LENGTH (code
);
4710 for (i
= 0; i
< length
; i
++)
4711 TREE_OPERAND (t
, i
) = va_arg (p
, tree
);
4717 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4721 build_nt_call_vec (tree fn
, vec
<tree
, va_gc
> *args
)
4726 ret
= build_vl_exp (CALL_EXPR
, vec_safe_length (args
) + 3);
4727 CALL_EXPR_FN (ret
) = fn
;
4728 CALL_EXPR_STATIC_CHAIN (ret
) = NULL_TREE
;
4729 FOR_EACH_VEC_SAFE_ELT (args
, ix
, t
)
4730 CALL_EXPR_ARG (ret
, ix
) = t
;
4734 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4735 We do NOT enter this node in any sort of symbol table.
4737 LOC is the location of the decl.
4739 layout_decl is used to set up the decl's storage layout.
4740 Other slots are initialized to 0 or null pointers. */
4743 build_decl (location_t loc
, enum tree_code code
, tree name
,
4744 tree type MEM_STAT_DECL
)
4748 t
= make_node (code PASS_MEM_STAT
);
4749 DECL_SOURCE_LOCATION (t
) = loc
;
4751 /* if (type == error_mark_node)
4752 type = integer_type_node; */
4753 /* That is not done, deliberately, so that having error_mark_node
4754 as the type can suppress useless errors in the use of this variable. */
4756 DECL_NAME (t
) = name
;
4757 TREE_TYPE (t
) = type
;
4759 if (code
== VAR_DECL
|| code
== PARM_DECL
|| code
== RESULT_DECL
)
4765 /* Builds and returns function declaration with NAME and TYPE. */
4768 build_fn_decl (const char *name
, tree type
)
4770 tree id
= get_identifier (name
);
4771 tree decl
= build_decl (input_location
, FUNCTION_DECL
, id
, type
);
4773 DECL_EXTERNAL (decl
) = 1;
4774 TREE_PUBLIC (decl
) = 1;
4775 DECL_ARTIFICIAL (decl
) = 1;
4776 TREE_NOTHROW (decl
) = 1;
4781 vec
<tree
, va_gc
> *all_translation_units
;
4783 /* Builds a new translation-unit decl with name NAME, queues it in the
4784 global list of translation-unit decls and returns it. */
4787 build_translation_unit_decl (tree name
)
4789 tree tu
= build_decl (UNKNOWN_LOCATION
, TRANSLATION_UNIT_DECL
,
4791 TRANSLATION_UNIT_LANGUAGE (tu
) = lang_hooks
.name
;
4792 vec_safe_push (all_translation_units
, tu
);
4797 /* BLOCK nodes are used to represent the structure of binding contours
4798 and declarations, once those contours have been exited and their contents
4799 compiled. This information is used for outputting debugging info. */
4802 build_block (tree vars
, tree subblocks
, tree supercontext
, tree chain
)
4804 tree block
= make_node (BLOCK
);
4806 BLOCK_VARS (block
) = vars
;
4807 BLOCK_SUBBLOCKS (block
) = subblocks
;
4808 BLOCK_SUPERCONTEXT (block
) = supercontext
;
4809 BLOCK_CHAIN (block
) = chain
;
4814 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4816 LOC is the location to use in tree T. */
4819 protected_set_expr_location (tree t
, location_t loc
)
4821 if (CAN_HAVE_LOCATION_P (t
))
4822 SET_EXPR_LOCATION (t
, loc
);
4825 /* Reset the expression *EXPR_P, a size or position.
4827 ??? We could reset all non-constant sizes or positions. But it's cheap
4828 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4830 We need to reset self-referential sizes or positions because they cannot
4831 be gimplified and thus can contain a CALL_EXPR after the gimplification
4832 is finished, which will run afoul of LTO streaming. And they need to be
4833 reset to something essentially dummy but not constant, so as to preserve
4834 the properties of the object they are attached to. */
4837 free_lang_data_in_one_sizepos (tree
*expr_p
)
4839 tree expr
= *expr_p
;
4840 if (CONTAINS_PLACEHOLDER_P (expr
))
4841 *expr_p
= build0 (PLACEHOLDER_EXPR
, TREE_TYPE (expr
));
4845 /* Reset all the fields in a binfo node BINFO. We only keep
4846 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4849 free_lang_data_in_binfo (tree binfo
)
4854 gcc_assert (TREE_CODE (binfo
) == TREE_BINFO
);
4856 BINFO_VIRTUALS (binfo
) = NULL_TREE
;
4857 BINFO_BASE_ACCESSES (binfo
) = NULL
;
4858 BINFO_INHERITANCE_CHAIN (binfo
) = NULL_TREE
;
4859 BINFO_SUBVTT_INDEX (binfo
) = NULL_TREE
;
4861 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo
), i
, t
)
4862 free_lang_data_in_binfo (t
);
4866 /* Reset all language specific information still present in TYPE. */
4869 free_lang_data_in_type (tree type
)
4871 gcc_assert (TYPE_P (type
));
4873 /* Give the FE a chance to remove its own data first. */
4874 lang_hooks
.free_lang_data (type
);
4876 TREE_LANG_FLAG_0 (type
) = 0;
4877 TREE_LANG_FLAG_1 (type
) = 0;
4878 TREE_LANG_FLAG_2 (type
) = 0;
4879 TREE_LANG_FLAG_3 (type
) = 0;
4880 TREE_LANG_FLAG_4 (type
) = 0;
4881 TREE_LANG_FLAG_5 (type
) = 0;
4882 TREE_LANG_FLAG_6 (type
) = 0;
4884 if (TREE_CODE (type
) == FUNCTION_TYPE
)
4886 /* Remove the const and volatile qualifiers from arguments. The
4887 C++ front end removes them, but the C front end does not,
4888 leading to false ODR violation errors when merging two
4889 instances of the same function signature compiled by
4890 different front ends. */
4891 for (tree p
= TYPE_ARG_TYPES (type
); p
; p
= TREE_CHAIN (p
))
4893 tree arg_type
= TREE_VALUE (p
);
4895 if (TYPE_READONLY (arg_type
) || TYPE_VOLATILE (arg_type
))
4897 int quals
= TYPE_QUALS (arg_type
)
4899 & ~TYPE_QUAL_VOLATILE
;
4900 TREE_VALUE (p
) = build_qualified_type (arg_type
, quals
);
4901 free_lang_data_in_type (TREE_VALUE (p
));
4903 /* C++ FE uses TREE_PURPOSE to store initial values. */
4904 TREE_PURPOSE (p
) = NULL
;
4907 else if (TREE_CODE (type
) == METHOD_TYPE
)
4908 for (tree p
= TYPE_ARG_TYPES (type
); p
; p
= TREE_CHAIN (p
))
4909 /* C++ FE uses TREE_PURPOSE to store initial values. */
4910 TREE_PURPOSE (p
) = NULL
;
4911 else if (RECORD_OR_UNION_TYPE_P (type
))
4913 /* Remove members that are not FIELD_DECLs (and maybe
4914 TYPE_DECLs) from the field list of an aggregate. These occur
4916 for (tree
*prev
= &TYPE_FIELDS (type
), member
; (member
= *prev
);)
4917 if (TREE_CODE (member
) == FIELD_DECL
4918 || (TREE_CODE (member
) == TYPE_DECL
4919 && !DECL_IGNORED_P (member
)
4920 && debug_info_level
> DINFO_LEVEL_TERSE
4921 && !is_redundant_typedef (member
)))
4922 prev
= &DECL_CHAIN (member
);
4924 *prev
= DECL_CHAIN (member
);
4926 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
4927 and danagle the pointer from time to time. */
4928 if (TYPE_VFIELD (type
) && TREE_CODE (TYPE_VFIELD (type
)) != FIELD_DECL
)
4929 TYPE_VFIELD (type
) = NULL_TREE
;
4931 if (TYPE_BINFO (type
))
4933 free_lang_data_in_binfo (TYPE_BINFO (type
));
4934 /* We need to preserve link to bases and virtual table for all
4935 polymorphic types to make devirtualization machinery working.
4936 Debug output cares only about bases, but output also
4937 virtual table pointers so merging of -fdevirtualize and
4938 -fno-devirtualize units is easier. */
4939 if ((!BINFO_VTABLE (TYPE_BINFO (type
))
4940 || !flag_devirtualize
)
4941 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type
))
4942 && !BINFO_VTABLE (TYPE_BINFO (type
)))
4943 || debug_info_level
!= DINFO_LEVEL_NONE
))
4944 TYPE_BINFO (type
) = NULL
;
4947 else if (INTEGRAL_TYPE_P (type
)
4948 || SCALAR_FLOAT_TYPE_P (type
)
4949 || FIXED_POINT_TYPE_P (type
))
4951 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type
));
4952 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type
));
4955 TYPE_LANG_SLOT_1 (type
) = NULL_TREE
;
4957 free_lang_data_in_one_sizepos (&TYPE_SIZE (type
));
4958 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type
));
4960 if (TYPE_CONTEXT (type
)
4961 && TREE_CODE (TYPE_CONTEXT (type
)) == BLOCK
)
4963 tree ctx
= TYPE_CONTEXT (type
);
4966 ctx
= BLOCK_SUPERCONTEXT (ctx
);
4968 while (ctx
&& TREE_CODE (ctx
) == BLOCK
);
4969 TYPE_CONTEXT (type
) = ctx
;
4974 /* Return true if DECL may need an assembler name to be set. */
4977 need_assembler_name_p (tree decl
)
4979 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
4980 Rule merging. This makes type_odr_p to return true on those types during
4981 LTO and by comparing the mangled name, we can say what types are intended
4982 to be equivalent across compilation unit.
4984 We do not store names of type_in_anonymous_namespace_p.
4986 Record, union and enumeration type have linkage that allows use
4987 to check type_in_anonymous_namespace_p. We do not mangle compound types
4988 that always can be compared structurally.
4990 Similarly for builtin types, we compare properties of their main variant.
4991 A special case are integer types where mangling do make differences
4992 between char/signed char/unsigned char etc. Storing name for these makes
4993 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
4994 See cp/mangle.c:write_builtin_type for details. */
4996 if (flag_lto_odr_type_mering
4997 && TREE_CODE (decl
) == TYPE_DECL
4999 && decl
== TYPE_NAME (TREE_TYPE (decl
))
5000 && TYPE_MAIN_VARIANT (TREE_TYPE (decl
)) == TREE_TYPE (decl
)
5001 && !TYPE_ARTIFICIAL (TREE_TYPE (decl
))
5002 && (type_with_linkage_p (TREE_TYPE (decl
))
5003 || TREE_CODE (TREE_TYPE (decl
)) == INTEGER_TYPE
)
5004 && !variably_modified_type_p (TREE_TYPE (decl
), NULL_TREE
))
5005 return !DECL_ASSEMBLER_NAME_SET_P (decl
);
5006 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5007 if (!VAR_OR_FUNCTION_DECL_P (decl
))
5010 /* If DECL already has its assembler name set, it does not need a
5012 if (!HAS_DECL_ASSEMBLER_NAME_P (decl
)
5013 || DECL_ASSEMBLER_NAME_SET_P (decl
))
5016 /* Abstract decls do not need an assembler name. */
5017 if (DECL_ABSTRACT_P (decl
))
5020 /* For VAR_DECLs, only static, public and external symbols need an
5023 && !TREE_STATIC (decl
)
5024 && !TREE_PUBLIC (decl
)
5025 && !DECL_EXTERNAL (decl
))
5028 if (TREE_CODE (decl
) == FUNCTION_DECL
)
5030 /* Do not set assembler name on builtins. Allow RTL expansion to
5031 decide whether to expand inline or via a regular call. */
5032 if (DECL_BUILT_IN (decl
)
5033 && DECL_BUILT_IN_CLASS (decl
) != BUILT_IN_FRONTEND
)
5036 /* Functions represented in the callgraph need an assembler name. */
5037 if (cgraph_node::get (decl
) != NULL
)
5040 /* Unused and not public functions don't need an assembler name. */
5041 if (!TREE_USED (decl
) && !TREE_PUBLIC (decl
))
5049 /* Reset all language specific information still present in symbol
5053 free_lang_data_in_decl (tree decl
)
5055 gcc_assert (DECL_P (decl
));
5057 /* Give the FE a chance to remove its own data first. */
5058 lang_hooks
.free_lang_data (decl
);
5060 TREE_LANG_FLAG_0 (decl
) = 0;
5061 TREE_LANG_FLAG_1 (decl
) = 0;
5062 TREE_LANG_FLAG_2 (decl
) = 0;
5063 TREE_LANG_FLAG_3 (decl
) = 0;
5064 TREE_LANG_FLAG_4 (decl
) = 0;
5065 TREE_LANG_FLAG_5 (decl
) = 0;
5066 TREE_LANG_FLAG_6 (decl
) = 0;
5068 free_lang_data_in_one_sizepos (&DECL_SIZE (decl
));
5069 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl
));
5070 if (TREE_CODE (decl
) == FIELD_DECL
)
5072 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl
));
5073 if (TREE_CODE (DECL_CONTEXT (decl
)) == QUAL_UNION_TYPE
)
5074 DECL_QUALIFIER (decl
) = NULL_TREE
;
5077 if (TREE_CODE (decl
) == FUNCTION_DECL
)
5079 struct cgraph_node
*node
;
5080 if (!(node
= cgraph_node::get (decl
))
5081 || (!node
->definition
&& !node
->clones
))
5084 node
->release_body ();
5087 release_function_body (decl
);
5088 DECL_ARGUMENTS (decl
) = NULL
;
5089 DECL_RESULT (decl
) = NULL
;
5090 DECL_INITIAL (decl
) = error_mark_node
;
5093 if (gimple_has_body_p (decl
) || (node
&& node
->thunk
.thunk_p
))
5097 /* If DECL has a gimple body, then the context for its
5098 arguments must be DECL. Otherwise, it doesn't really
5099 matter, as we will not be emitting any code for DECL. In
5100 general, there may be other instances of DECL created by
5101 the front end and since PARM_DECLs are generally shared,
5102 their DECL_CONTEXT changes as the replicas of DECL are
5103 created. The only time where DECL_CONTEXT is important
5104 is for the FUNCTION_DECLs that have a gimple body (since
5105 the PARM_DECL will be used in the function's body). */
5106 for (t
= DECL_ARGUMENTS (decl
); t
; t
= TREE_CHAIN (t
))
5107 DECL_CONTEXT (t
) = decl
;
5108 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl
))
5109 DECL_FUNCTION_SPECIFIC_TARGET (decl
)
5110 = target_option_default_node
;
5111 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
))
5112 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
)
5113 = optimization_default_node
;
5116 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5117 At this point, it is not needed anymore. */
5118 DECL_SAVED_TREE (decl
) = NULL_TREE
;
5120 /* Clear the abstract origin if it refers to a method.
5121 Otherwise dwarf2out.c will ICE as we splice functions out of
5122 TYPE_FIELDS and thus the origin will not be output
5124 if (DECL_ABSTRACT_ORIGIN (decl
)
5125 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl
))
5126 && RECORD_OR_UNION_TYPE_P
5127 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl
))))
5128 DECL_ABSTRACT_ORIGIN (decl
) = NULL_TREE
;
5130 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5131 DECL_VINDEX referring to itself into a vtable slot number as it
5132 should. Happens with functions that are copied and then forgotten
5133 about. Just clear it, it won't matter anymore. */
5134 if (DECL_VINDEX (decl
) && !tree_fits_shwi_p (DECL_VINDEX (decl
)))
5135 DECL_VINDEX (decl
) = NULL_TREE
;
5137 else if (VAR_P (decl
))
5139 if ((DECL_EXTERNAL (decl
)
5140 && (!TREE_STATIC (decl
) || !TREE_READONLY (decl
)))
5141 || (decl_function_context (decl
) && !TREE_STATIC (decl
)))
5142 DECL_INITIAL (decl
) = NULL_TREE
;
5144 else if (TREE_CODE (decl
) == TYPE_DECL
)
5146 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5147 DECL_VISIBILITY_SPECIFIED (decl
) = 0;
5148 DECL_INITIAL (decl
) = NULL_TREE
;
5150 else if (TREE_CODE (decl
) == FIELD_DECL
)
5151 DECL_INITIAL (decl
) = NULL_TREE
;
5152 else if (TREE_CODE (decl
) == TRANSLATION_UNIT_DECL
5153 && DECL_INITIAL (decl
)
5154 && TREE_CODE (DECL_INITIAL (decl
)) == BLOCK
)
5156 /* Strip builtins from the translation-unit BLOCK. We still have targets
5157 without builtin_decl_explicit support and also builtins are shared
5158 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5159 tree
*nextp
= &BLOCK_VARS (DECL_INITIAL (decl
));
5163 if (TREE_CODE (var
) == FUNCTION_DECL
5164 && DECL_BUILT_IN (var
))
5165 *nextp
= TREE_CHAIN (var
);
5167 nextp
= &TREE_CHAIN (var
);
5173 /* Data used when collecting DECLs and TYPEs for language data removal. */
5175 struct free_lang_data_d
5177 free_lang_data_d () : decls (100), types (100) {}
5179 /* Worklist to avoid excessive recursion. */
5180 auto_vec
<tree
> worklist
;
5182 /* Set of traversed objects. Used to avoid duplicate visits. */
5183 hash_set
<tree
> pset
;
5185 /* Array of symbols to process with free_lang_data_in_decl. */
5186 auto_vec
<tree
> decls
;
5188 /* Array of types to process with free_lang_data_in_type. */
5189 auto_vec
<tree
> types
;
5193 /* Save all language fields needed to generate proper debug information
5194 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5197 save_debug_info_for_decl (tree t
)
5199 /*struct saved_debug_info_d *sdi;*/
5201 gcc_assert (debug_info_level
> DINFO_LEVEL_TERSE
&& t
&& DECL_P (t
));
5203 /* FIXME. Partial implementation for saving debug info removed. */
5207 /* Save all language fields needed to generate proper debug information
5208 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5211 save_debug_info_for_type (tree t
)
5213 /*struct saved_debug_info_d *sdi;*/
5215 gcc_assert (debug_info_level
> DINFO_LEVEL_TERSE
&& t
&& TYPE_P (t
));
5217 /* FIXME. Partial implementation for saving debug info removed. */
5221 /* Add type or decl T to one of the list of tree nodes that need their
5222 language data removed. The lists are held inside FLD. */
5225 add_tree_to_fld_list (tree t
, struct free_lang_data_d
*fld
)
5229 fld
->decls
.safe_push (t
);
5230 if (debug_info_level
> DINFO_LEVEL_TERSE
)
5231 save_debug_info_for_decl (t
);
5233 else if (TYPE_P (t
))
5235 fld
->types
.safe_push (t
);
5236 if (debug_info_level
> DINFO_LEVEL_TERSE
)
5237 save_debug_info_for_type (t
);
5243 /* Push tree node T into FLD->WORKLIST. */
5246 fld_worklist_push (tree t
, struct free_lang_data_d
*fld
)
5248 if (t
&& !is_lang_specific (t
) && !fld
->pset
.contains (t
))
5249 fld
->worklist
.safe_push ((t
));
5253 /* Operand callback helper for free_lang_data_in_node. *TP is the
5254 subtree operand being considered. */
5257 find_decls_types_r (tree
*tp
, int *ws
, void *data
)
5260 struct free_lang_data_d
*fld
= (struct free_lang_data_d
*) data
;
5262 if (TREE_CODE (t
) == TREE_LIST
)
5265 /* Language specific nodes will be removed, so there is no need
5266 to gather anything under them. */
5267 if (is_lang_specific (t
))
5275 /* Note that walk_tree does not traverse every possible field in
5276 decls, so we have to do our own traversals here. */
5277 add_tree_to_fld_list (t
, fld
);
5279 fld_worklist_push (DECL_NAME (t
), fld
);
5280 fld_worklist_push (DECL_CONTEXT (t
), fld
);
5281 fld_worklist_push (DECL_SIZE (t
), fld
);
5282 fld_worklist_push (DECL_SIZE_UNIT (t
), fld
);
5284 /* We are going to remove everything under DECL_INITIAL for
5285 TYPE_DECLs. No point walking them. */
5286 if (TREE_CODE (t
) != TYPE_DECL
)
5287 fld_worklist_push (DECL_INITIAL (t
), fld
);
5289 fld_worklist_push (DECL_ATTRIBUTES (t
), fld
);
5290 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t
), fld
);
5292 if (TREE_CODE (t
) == FUNCTION_DECL
)
5294 fld_worklist_push (DECL_ARGUMENTS (t
), fld
);
5295 fld_worklist_push (DECL_RESULT (t
), fld
);
5297 else if (TREE_CODE (t
) == TYPE_DECL
)
5299 fld_worklist_push (DECL_ORIGINAL_TYPE (t
), fld
);
5301 else if (TREE_CODE (t
) == FIELD_DECL
)
5303 fld_worklist_push (DECL_FIELD_OFFSET (t
), fld
);
5304 fld_worklist_push (DECL_BIT_FIELD_TYPE (t
), fld
);
5305 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t
), fld
);
5306 fld_worklist_push (DECL_FCONTEXT (t
), fld
);
5309 if ((VAR_P (t
) || TREE_CODE (t
) == PARM_DECL
)
5310 && DECL_HAS_VALUE_EXPR_P (t
))
5311 fld_worklist_push (DECL_VALUE_EXPR (t
), fld
);
5313 if (TREE_CODE (t
) != FIELD_DECL
5314 && TREE_CODE (t
) != TYPE_DECL
)
5315 fld_worklist_push (TREE_CHAIN (t
), fld
);
5318 else if (TYPE_P (t
))
5320 /* Note that walk_tree does not traverse every possible field in
5321 types, so we have to do our own traversals here. */
5322 add_tree_to_fld_list (t
, fld
);
5324 if (!RECORD_OR_UNION_TYPE_P (t
))
5325 fld_worklist_push (TYPE_CACHED_VALUES (t
), fld
);
5326 fld_worklist_push (TYPE_SIZE (t
), fld
);
5327 fld_worklist_push (TYPE_SIZE_UNIT (t
), fld
);
5328 fld_worklist_push (TYPE_ATTRIBUTES (t
), fld
);
5329 fld_worklist_push (TYPE_POINTER_TO (t
), fld
);
5330 fld_worklist_push (TYPE_REFERENCE_TO (t
), fld
);
5331 fld_worklist_push (TYPE_NAME (t
), fld
);
5332 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5333 them and thus do not and want not to reach unused pointer types
5335 if (!POINTER_TYPE_P (t
))
5336 fld_worklist_push (TYPE_MIN_VALUE_RAW (t
), fld
);
5337 /* TYPE_MAX_VALUE_RAW is TYPE_BINFO for record types. */
5338 if (!RECORD_OR_UNION_TYPE_P (t
))
5339 fld_worklist_push (TYPE_MAX_VALUE_RAW (t
), fld
);
5340 fld_worklist_push (TYPE_MAIN_VARIANT (t
), fld
);
5341 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5342 do not and want not to reach unused variants this way. */
5343 if (TYPE_CONTEXT (t
))
5345 tree ctx
= TYPE_CONTEXT (t
);
5346 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5347 So push that instead. */
5348 while (ctx
&& TREE_CODE (ctx
) == BLOCK
)
5349 ctx
= BLOCK_SUPERCONTEXT (ctx
);
5350 fld_worklist_push (ctx
, fld
);
5352 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5353 and want not to reach unused types this way. */
5355 if (RECORD_OR_UNION_TYPE_P (t
) && TYPE_BINFO (t
))
5359 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t
)), i
, tem
)
5360 fld_worklist_push (TREE_TYPE (tem
), fld
);
5361 fld_worklist_push (BINFO_VIRTUALS (TYPE_BINFO (t
)), fld
);
5363 if (RECORD_OR_UNION_TYPE_P (t
))
5366 /* Push all TYPE_FIELDS - there can be interleaving interesting
5367 and non-interesting things. */
5368 tem
= TYPE_FIELDS (t
);
5371 if (TREE_CODE (tem
) == FIELD_DECL
5372 || (TREE_CODE (tem
) == TYPE_DECL
5373 && !DECL_IGNORED_P (tem
)
5374 && debug_info_level
> DINFO_LEVEL_TERSE
5375 && !is_redundant_typedef (tem
)))
5376 fld_worklist_push (tem
, fld
);
5377 tem
= TREE_CHAIN (tem
);
5381 fld_worklist_push (TYPE_STUB_DECL (t
), fld
);
5384 else if (TREE_CODE (t
) == BLOCK
)
5387 for (tem
= BLOCK_VARS (t
); tem
; tem
= TREE_CHAIN (tem
))
5388 fld_worklist_push (tem
, fld
);
5389 for (tem
= BLOCK_SUBBLOCKS (t
); tem
; tem
= BLOCK_CHAIN (tem
))
5390 fld_worklist_push (tem
, fld
);
5391 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t
), fld
);
5394 if (TREE_CODE (t
) != IDENTIFIER_NODE
5395 && CODE_CONTAINS_STRUCT (TREE_CODE (t
), TS_TYPED
))
5396 fld_worklist_push (TREE_TYPE (t
), fld
);
5402 /* Find decls and types in T. */
5405 find_decls_types (tree t
, struct free_lang_data_d
*fld
)
5409 if (!fld
->pset
.contains (t
))
5410 walk_tree (&t
, find_decls_types_r
, fld
, &fld
->pset
);
5411 if (fld
->worklist
.is_empty ())
5413 t
= fld
->worklist
.pop ();
5417 /* Translate all the types in LIST with the corresponding runtime
5421 get_eh_types_for_runtime (tree list
)
5425 if (list
== NULL_TREE
)
5428 head
= build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list
)));
5430 list
= TREE_CHAIN (list
);
5433 tree n
= build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list
)));
5434 TREE_CHAIN (prev
) = n
;
5435 prev
= TREE_CHAIN (prev
);
5436 list
= TREE_CHAIN (list
);
5443 /* Find decls and types referenced in EH region R and store them in
5444 FLD->DECLS and FLD->TYPES. */
5447 find_decls_types_in_eh_region (eh_region r
, struct free_lang_data_d
*fld
)
5458 /* The types referenced in each catch must first be changed to the
5459 EH types used at runtime. This removes references to FE types
5461 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
5463 c
->type_list
= get_eh_types_for_runtime (c
->type_list
);
5464 walk_tree (&c
->type_list
, find_decls_types_r
, fld
, &fld
->pset
);
5469 case ERT_ALLOWED_EXCEPTIONS
:
5470 r
->u
.allowed
.type_list
5471 = get_eh_types_for_runtime (r
->u
.allowed
.type_list
);
5472 walk_tree (&r
->u
.allowed
.type_list
, find_decls_types_r
, fld
, &fld
->pset
);
5475 case ERT_MUST_NOT_THROW
:
5476 walk_tree (&r
->u
.must_not_throw
.failure_decl
,
5477 find_decls_types_r
, fld
, &fld
->pset
);
5483 /* Find decls and types referenced in cgraph node N and store them in
5484 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5485 look for *every* kind of DECL and TYPE node reachable from N,
5486 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5487 NAMESPACE_DECLs, etc). */
5490 find_decls_types_in_node (struct cgraph_node
*n
, struct free_lang_data_d
*fld
)
5493 struct function
*fn
;
5497 find_decls_types (n
->decl
, fld
);
5499 if (!gimple_has_body_p (n
->decl
))
5502 gcc_assert (current_function_decl
== NULL_TREE
&& cfun
== NULL
);
5504 fn
= DECL_STRUCT_FUNCTION (n
->decl
);
5506 /* Traverse locals. */
5507 FOR_EACH_LOCAL_DECL (fn
, ix
, t
)
5508 find_decls_types (t
, fld
);
5510 /* Traverse EH regions in FN. */
5513 FOR_ALL_EH_REGION_FN (r
, fn
)
5514 find_decls_types_in_eh_region (r
, fld
);
5517 /* Traverse every statement in FN. */
5518 FOR_EACH_BB_FN (bb
, fn
)
5521 gimple_stmt_iterator si
;
5524 for (psi
= gsi_start_phis (bb
); !gsi_end_p (psi
); gsi_next (&psi
))
5526 gphi
*phi
= psi
.phi ();
5528 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
5530 tree
*arg_p
= gimple_phi_arg_def_ptr (phi
, i
);
5531 find_decls_types (*arg_p
, fld
);
5535 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
5537 gimple
*stmt
= gsi_stmt (si
);
5539 if (is_gimple_call (stmt
))
5540 find_decls_types (gimple_call_fntype (stmt
), fld
);
5542 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
5544 tree arg
= gimple_op (stmt
, i
);
5545 find_decls_types (arg
, fld
);
5552 /* Find decls and types referenced in varpool node N and store them in
5553 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5554 look for *every* kind of DECL and TYPE node reachable from N,
5555 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5556 NAMESPACE_DECLs, etc). */
5559 find_decls_types_in_var (varpool_node
*v
, struct free_lang_data_d
*fld
)
5561 find_decls_types (v
->decl
, fld
);
5564 /* If T needs an assembler name, have one created for it. */
5567 assign_assembler_name_if_needed (tree t
)
5569 if (need_assembler_name_p (t
))
5571 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5572 diagnostics that use input_location to show locus
5573 information. The problem here is that, at this point,
5574 input_location is generally anchored to the end of the file
5575 (since the parser is long gone), so we don't have a good
5576 position to pin it to.
5578 To alleviate this problem, this uses the location of T's
5579 declaration. Examples of this are
5580 testsuite/g++.dg/template/cond2.C and
5581 testsuite/g++.dg/template/pr35240.C. */
5582 location_t saved_location
= input_location
;
5583 input_location
= DECL_SOURCE_LOCATION (t
);
5585 decl_assembler_name (t
);
5587 input_location
= saved_location
;
5592 /* Free language specific information for every operand and expression
5593 in every node of the call graph. This process operates in three stages:
5595 1- Every callgraph node and varpool node is traversed looking for
5596 decls and types embedded in them. This is a more exhaustive
5597 search than that done by find_referenced_vars, because it will
5598 also collect individual fields, decls embedded in types, etc.
5600 2- All the decls found are sent to free_lang_data_in_decl.
5602 3- All the types found are sent to free_lang_data_in_type.
5604 The ordering between decls and types is important because
5605 free_lang_data_in_decl sets assembler names, which includes
5606 mangling. So types cannot be freed up until assembler names have
5610 free_lang_data_in_cgraph (void)
5612 struct cgraph_node
*n
;
5614 struct free_lang_data_d fld
;
5619 /* Find decls and types in the body of every function in the callgraph. */
5620 FOR_EACH_FUNCTION (n
)
5621 find_decls_types_in_node (n
, &fld
);
5623 FOR_EACH_VEC_SAFE_ELT (alias_pairs
, i
, p
)
5624 find_decls_types (p
->decl
, &fld
);
5626 /* Find decls and types in every varpool symbol. */
5627 FOR_EACH_VARIABLE (v
)
5628 find_decls_types_in_var (v
, &fld
);
5630 /* Set the assembler name on every decl found. We need to do this
5631 now because free_lang_data_in_decl will invalidate data needed
5632 for mangling. This breaks mangling on interdependent decls. */
5633 FOR_EACH_VEC_ELT (fld
.decls
, i
, t
)
5634 assign_assembler_name_if_needed (t
);
5636 /* Traverse every decl found freeing its language data. */
5637 FOR_EACH_VEC_ELT (fld
.decls
, i
, t
)
5638 free_lang_data_in_decl (t
);
5640 /* Traverse every type found freeing its language data. */
5641 FOR_EACH_VEC_ELT (fld
.types
, i
, t
)
5642 free_lang_data_in_type (t
);
5645 FOR_EACH_VEC_ELT (fld
.types
, i
, t
)
5651 /* Free resources that are used by FE but are not needed once they are done. */
5654 free_lang_data (void)
5658 /* If we are the LTO frontend we have freed lang-specific data already. */
5660 || (!flag_generate_lto
&& !flag_generate_offload
))
5663 /* Provide a dummy TRANSLATION_UNIT_DECL if the FE failed to provide one. */
5664 if (vec_safe_is_empty (all_translation_units
))
5665 build_translation_unit_decl (NULL_TREE
);
5667 /* Allocate and assign alias sets to the standard integer types
5668 while the slots are still in the way the frontends generated them. */
5669 for (i
= 0; i
< itk_none
; ++i
)
5670 if (integer_types
[i
])
5671 TYPE_ALIAS_SET (integer_types
[i
]) = get_alias_set (integer_types
[i
]);
5673 /* Traverse the IL resetting language specific information for
5674 operands, expressions, etc. */
5675 free_lang_data_in_cgraph ();
5677 /* Create gimple variants for common types. */
5678 for (unsigned i
= 0;
5679 i
< sizeof (builtin_structptr_types
) / sizeof (builtin_structptr_type
);
5681 builtin_structptr_types
[i
].node
= builtin_structptr_types
[i
].base
;
5683 /* Reset some langhooks. Do not reset types_compatible_p, it may
5684 still be used indirectly via the get_alias_set langhook. */
5685 lang_hooks
.dwarf_name
= lhd_dwarf_name
;
5686 lang_hooks
.decl_printable_name
= gimple_decl_printable_name
;
5687 lang_hooks
.gimplify_expr
= lhd_gimplify_expr
;
5689 /* We do not want the default decl_assembler_name implementation,
5690 rather if we have fixed everything we want a wrapper around it
5691 asserting that all non-local symbols already got their assembler
5692 name and only produce assembler names for local symbols. Or rather
5693 make sure we never call decl_assembler_name on local symbols and
5694 devise a separate, middle-end private scheme for it. */
5696 /* Reset diagnostic machinery. */
5697 tree_diagnostics_defaults (global_dc
);
5705 const pass_data pass_data_ipa_free_lang_data
=
5707 SIMPLE_IPA_PASS
, /* type */
5708 "*free_lang_data", /* name */
5709 OPTGROUP_NONE
, /* optinfo_flags */
5710 TV_IPA_FREE_LANG_DATA
, /* tv_id */
5711 0, /* properties_required */
5712 0, /* properties_provided */
5713 0, /* properties_destroyed */
5714 0, /* todo_flags_start */
5715 0, /* todo_flags_finish */
5718 class pass_ipa_free_lang_data
: public simple_ipa_opt_pass
5721 pass_ipa_free_lang_data (gcc::context
*ctxt
)
5722 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data
, ctxt
)
5725 /* opt_pass methods: */
5726 virtual unsigned int execute (function
*) { return free_lang_data (); }
5728 }; // class pass_ipa_free_lang_data
5732 simple_ipa_opt_pass
*
5733 make_pass_ipa_free_lang_data (gcc::context
*ctxt
)
5735 return new pass_ipa_free_lang_data (ctxt
);
5738 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
5739 of the various TYPE_QUAL values. */
5742 set_type_quals (tree type
, int type_quals
)
5744 TYPE_READONLY (type
) = (type_quals
& TYPE_QUAL_CONST
) != 0;
5745 TYPE_VOLATILE (type
) = (type_quals
& TYPE_QUAL_VOLATILE
) != 0;
5746 TYPE_RESTRICT (type
) = (type_quals
& TYPE_QUAL_RESTRICT
) != 0;
5747 TYPE_ATOMIC (type
) = (type_quals
& TYPE_QUAL_ATOMIC
) != 0;
5748 TYPE_ADDR_SPACE (type
) = DECODE_QUAL_ADDR_SPACE (type_quals
);
5751 /* Returns true iff CAND and BASE have equivalent language-specific
5755 check_lang_type (const_tree cand
, const_tree base
)
5757 if (lang_hooks
.types
.type_hash_eq
== NULL
)
5759 /* type_hash_eq currently only applies to these types. */
5760 if (TREE_CODE (cand
) != FUNCTION_TYPE
5761 && TREE_CODE (cand
) != METHOD_TYPE
)
5763 return lang_hooks
.types
.type_hash_eq (cand
, base
);
5766 /* Returns true iff unqualified CAND and BASE are equivalent. */
5769 check_base_type (const_tree cand
, const_tree base
)
5771 return (TYPE_NAME (cand
) == TYPE_NAME (base
)
5772 /* Apparently this is needed for Objective-C. */
5773 && TYPE_CONTEXT (cand
) == TYPE_CONTEXT (base
)
5774 /* Check alignment. */
5775 && TYPE_ALIGN (cand
) == TYPE_ALIGN (base
)
5776 && attribute_list_equal (TYPE_ATTRIBUTES (cand
),
5777 TYPE_ATTRIBUTES (base
)));
5780 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
5783 check_qualified_type (const_tree cand
, const_tree base
, int type_quals
)
5785 return (TYPE_QUALS (cand
) == type_quals
5786 && check_base_type (cand
, base
)
5787 && check_lang_type (cand
, base
));
5790 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
5793 check_aligned_type (const_tree cand
, const_tree base
, unsigned int align
)
5795 return (TYPE_QUALS (cand
) == TYPE_QUALS (base
)
5796 && TYPE_NAME (cand
) == TYPE_NAME (base
)
5797 /* Apparently this is needed for Objective-C. */
5798 && TYPE_CONTEXT (cand
) == TYPE_CONTEXT (base
)
5799 /* Check alignment. */
5800 && TYPE_ALIGN (cand
) == align
5801 && attribute_list_equal (TYPE_ATTRIBUTES (cand
),
5802 TYPE_ATTRIBUTES (base
))
5803 && check_lang_type (cand
, base
));
5806 /* This function checks to see if TYPE matches the size one of the built-in
5807 atomic types, and returns that core atomic type. */
5810 find_atomic_core_type (tree type
)
5812 tree base_atomic_type
;
5814 /* Only handle complete types. */
5815 if (!tree_fits_uhwi_p (TYPE_SIZE (type
)))
5818 switch (tree_to_uhwi (TYPE_SIZE (type
)))
5821 base_atomic_type
= atomicQI_type_node
;
5825 base_atomic_type
= atomicHI_type_node
;
5829 base_atomic_type
= atomicSI_type_node
;
5833 base_atomic_type
= atomicDI_type_node
;
5837 base_atomic_type
= atomicTI_type_node
;
5841 base_atomic_type
= NULL_TREE
;
5844 return base_atomic_type
;
5847 /* Return a version of the TYPE, qualified as indicated by the
5848 TYPE_QUALS, if one exists. If no qualified version exists yet,
5849 return NULL_TREE. */
5852 get_qualified_type (tree type
, int type_quals
)
5856 if (TYPE_QUALS (type
) == type_quals
)
5859 /* Search the chain of variants to see if there is already one there just
5860 like the one we need to have. If so, use that existing one. We must
5861 preserve the TYPE_NAME, since there is code that depends on this. */
5862 for (t
= TYPE_MAIN_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
5863 if (check_qualified_type (t
, type
, type_quals
))
5869 /* Like get_qualified_type, but creates the type if it does not
5870 exist. This function never returns NULL_TREE. */
5873 build_qualified_type (tree type
, int type_quals MEM_STAT_DECL
)
5877 /* See if we already have the appropriate qualified variant. */
5878 t
= get_qualified_type (type
, type_quals
);
5880 /* If not, build it. */
5883 t
= build_variant_type_copy (type PASS_MEM_STAT
);
5884 set_type_quals (t
, type_quals
);
5886 if (((type_quals
& TYPE_QUAL_ATOMIC
) == TYPE_QUAL_ATOMIC
))
5888 /* See if this object can map to a basic atomic type. */
5889 tree atomic_type
= find_atomic_core_type (type
);
5892 /* Ensure the alignment of this type is compatible with
5893 the required alignment of the atomic type. */
5894 if (TYPE_ALIGN (atomic_type
) > TYPE_ALIGN (t
))
5895 SET_TYPE_ALIGN (t
, TYPE_ALIGN (atomic_type
));
5899 if (TYPE_STRUCTURAL_EQUALITY_P (type
))
5900 /* Propagate structural equality. */
5901 SET_TYPE_STRUCTURAL_EQUALITY (t
);
5902 else if (TYPE_CANONICAL (type
) != type
)
5903 /* Build the underlying canonical type, since it is different
5906 tree c
= build_qualified_type (TYPE_CANONICAL (type
), type_quals
);
5907 TYPE_CANONICAL (t
) = TYPE_CANONICAL (c
);
5910 /* T is its own canonical type. */
5911 TYPE_CANONICAL (t
) = t
;
5918 /* Create a variant of type T with alignment ALIGN. */
5921 build_aligned_type (tree type
, unsigned int align
)
5925 if (TYPE_PACKED (type
)
5926 || TYPE_ALIGN (type
) == align
)
5929 for (t
= TYPE_MAIN_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
5930 if (check_aligned_type (t
, type
, align
))
5933 t
= build_variant_type_copy (type
);
5934 SET_TYPE_ALIGN (t
, align
);
5935 TYPE_USER_ALIGN (t
) = 1;
5940 /* Create a new distinct copy of TYPE. The new type is made its own
5941 MAIN_VARIANT. If TYPE requires structural equality checks, the
5942 resulting type requires structural equality checks; otherwise, its
5943 TYPE_CANONICAL points to itself. */
5946 build_distinct_type_copy (tree type MEM_STAT_DECL
)
5948 tree t
= copy_node (type PASS_MEM_STAT
);
5950 TYPE_POINTER_TO (t
) = 0;
5951 TYPE_REFERENCE_TO (t
) = 0;
5953 /* Set the canonical type either to a new equivalence class, or
5954 propagate the need for structural equality checks. */
5955 if (TYPE_STRUCTURAL_EQUALITY_P (type
))
5956 SET_TYPE_STRUCTURAL_EQUALITY (t
);
5958 TYPE_CANONICAL (t
) = t
;
5960 /* Make it its own variant. */
5961 TYPE_MAIN_VARIANT (t
) = t
;
5962 TYPE_NEXT_VARIANT (t
) = 0;
5964 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
5965 whose TREE_TYPE is not t. This can also happen in the Ada
5966 frontend when using subtypes. */
5971 /* Create a new variant of TYPE, equivalent but distinct. This is so
5972 the caller can modify it. TYPE_CANONICAL for the return type will
5973 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
5974 are considered equal by the language itself (or that both types
5975 require structural equality checks). */
5978 build_variant_type_copy (tree type MEM_STAT_DECL
)
5980 tree t
, m
= TYPE_MAIN_VARIANT (type
);
5982 t
= build_distinct_type_copy (type PASS_MEM_STAT
);
5984 /* Since we're building a variant, assume that it is a non-semantic
5985 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
5986 TYPE_CANONICAL (t
) = TYPE_CANONICAL (type
);
5987 /* Type variants have no alias set defined. */
5988 TYPE_ALIAS_SET (t
) = -1;
5990 /* Add the new type to the chain of variants of TYPE. */
5991 TYPE_NEXT_VARIANT (t
) = TYPE_NEXT_VARIANT (m
);
5992 TYPE_NEXT_VARIANT (m
) = t
;
5993 TYPE_MAIN_VARIANT (t
) = m
;
5998 /* Return true if the from tree in both tree maps are equal. */
6001 tree_map_base_eq (const void *va
, const void *vb
)
6003 const struct tree_map_base
*const a
= (const struct tree_map_base
*) va
,
6004 *const b
= (const struct tree_map_base
*) vb
;
6005 return (a
->from
== b
->from
);
6008 /* Hash a from tree in a tree_base_map. */
6011 tree_map_base_hash (const void *item
)
6013 return htab_hash_pointer (((const struct tree_map_base
*)item
)->from
);
6016 /* Return true if this tree map structure is marked for garbage collection
6017 purposes. We simply return true if the from tree is marked, so that this
6018 structure goes away when the from tree goes away. */
6021 tree_map_base_marked_p (const void *p
)
6023 return ggc_marked_p (((const struct tree_map_base
*) p
)->from
);
6026 /* Hash a from tree in a tree_map. */
6029 tree_map_hash (const void *item
)
6031 return (((const struct tree_map
*) item
)->hash
);
6034 /* Hash a from tree in a tree_decl_map. */
6037 tree_decl_map_hash (const void *item
)
6039 return DECL_UID (((const struct tree_decl_map
*) item
)->base
.from
);
6042 /* Return the initialization priority for DECL. */
6045 decl_init_priority_lookup (tree decl
)
6047 symtab_node
*snode
= symtab_node::get (decl
);
6050 return DEFAULT_INIT_PRIORITY
;
6052 snode
->get_init_priority ();
6055 /* Return the finalization priority for DECL. */
6058 decl_fini_priority_lookup (tree decl
)
6060 cgraph_node
*node
= cgraph_node::get (decl
);
6063 return DEFAULT_INIT_PRIORITY
;
6065 node
->get_fini_priority ();
6068 /* Set the initialization priority for DECL to PRIORITY. */
6071 decl_init_priority_insert (tree decl
, priority_type priority
)
6073 struct symtab_node
*snode
;
6075 if (priority
== DEFAULT_INIT_PRIORITY
)
6077 snode
= symtab_node::get (decl
);
6081 else if (VAR_P (decl
))
6082 snode
= varpool_node::get_create (decl
);
6084 snode
= cgraph_node::get_create (decl
);
6085 snode
->set_init_priority (priority
);
6088 /* Set the finalization priority for DECL to PRIORITY. */
6091 decl_fini_priority_insert (tree decl
, priority_type priority
)
6093 struct cgraph_node
*node
;
6095 if (priority
== DEFAULT_INIT_PRIORITY
)
6097 node
= cgraph_node::get (decl
);
6102 node
= cgraph_node::get_create (decl
);
6103 node
->set_fini_priority (priority
);
6106 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6109 print_debug_expr_statistics (void)
6111 fprintf (stderr
, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6112 (long) debug_expr_for_decl
->size (),
6113 (long) debug_expr_for_decl
->elements (),
6114 debug_expr_for_decl
->collisions ());
6117 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6120 print_value_expr_statistics (void)
6122 fprintf (stderr
, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6123 (long) value_expr_for_decl
->size (),
6124 (long) value_expr_for_decl
->elements (),
6125 value_expr_for_decl
->collisions ());
6128 /* Lookup a debug expression for FROM, and return it if we find one. */
6131 decl_debug_expr_lookup (tree from
)
6133 struct tree_decl_map
*h
, in
;
6134 in
.base
.from
= from
;
6136 h
= debug_expr_for_decl
->find_with_hash (&in
, DECL_UID (from
));
6142 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6145 decl_debug_expr_insert (tree from
, tree to
)
6147 struct tree_decl_map
*h
;
6149 h
= ggc_alloc
<tree_decl_map
> ();
6150 h
->base
.from
= from
;
6152 *debug_expr_for_decl
->find_slot_with_hash (h
, DECL_UID (from
), INSERT
) = h
;
6155 /* Lookup a value expression for FROM, and return it if we find one. */
6158 decl_value_expr_lookup (tree from
)
6160 struct tree_decl_map
*h
, in
;
6161 in
.base
.from
= from
;
6163 h
= value_expr_for_decl
->find_with_hash (&in
, DECL_UID (from
));
6169 /* Insert a mapping FROM->TO in the value expression hashtable. */
6172 decl_value_expr_insert (tree from
, tree to
)
6174 struct tree_decl_map
*h
;
6176 h
= ggc_alloc
<tree_decl_map
> ();
6177 h
->base
.from
= from
;
6179 *value_expr_for_decl
->find_slot_with_hash (h
, DECL_UID (from
), INSERT
) = h
;
6182 /* Lookup a vector of debug arguments for FROM, and return it if we
6186 decl_debug_args_lookup (tree from
)
6188 struct tree_vec_map
*h
, in
;
6190 if (!DECL_HAS_DEBUG_ARGS_P (from
))
6192 gcc_checking_assert (debug_args_for_decl
!= NULL
);
6193 in
.base
.from
= from
;
6194 h
= debug_args_for_decl
->find_with_hash (&in
, DECL_UID (from
));
6200 /* Insert a mapping FROM->empty vector of debug arguments in the value
6201 expression hashtable. */
6204 decl_debug_args_insert (tree from
)
6206 struct tree_vec_map
*h
;
6209 if (DECL_HAS_DEBUG_ARGS_P (from
))
6210 return decl_debug_args_lookup (from
);
6211 if (debug_args_for_decl
== NULL
)
6212 debug_args_for_decl
= hash_table
<tree_vec_map_cache_hasher
>::create_ggc (64);
6213 h
= ggc_alloc
<tree_vec_map
> ();
6214 h
->base
.from
= from
;
6216 loc
= debug_args_for_decl
->find_slot_with_hash (h
, DECL_UID (from
), INSERT
);
6218 DECL_HAS_DEBUG_ARGS_P (from
) = 1;
6222 /* Hashing of types so that we don't make duplicates.
6223 The entry point is `type_hash_canon'. */
6225 /* Generate the default hash code for TYPE. This is designed for
6226 speed, rather than maximum entropy. */
6229 type_hash_canon_hash (tree type
)
6231 inchash::hash hstate
;
6233 hstate
.add_int (TREE_CODE (type
));
6235 if (TREE_TYPE (type
))
6236 hstate
.add_object (TYPE_HASH (TREE_TYPE (type
)));
6238 for (tree t
= TYPE_ATTRIBUTES (type
); t
; t
= TREE_CHAIN (t
))
6239 /* Just the identifier is adequate to distinguish. */
6240 hstate
.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t
)));
6242 switch (TREE_CODE (type
))
6245 hstate
.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type
)));
6248 for (tree t
= TYPE_ARG_TYPES (type
); t
; t
= TREE_CHAIN (t
))
6249 if (TREE_VALUE (t
) != error_mark_node
)
6250 hstate
.add_object (TYPE_HASH (TREE_VALUE (t
)));
6254 hstate
.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type
)));
6259 if (TYPE_DOMAIN (type
))
6260 hstate
.add_object (TYPE_HASH (TYPE_DOMAIN (type
)));
6261 if (!AGGREGATE_TYPE_P (TREE_TYPE (type
)))
6263 unsigned typeless
= TYPE_TYPELESS_STORAGE (type
);
6264 hstate
.add_object (typeless
);
6271 tree t
= TYPE_MAX_VALUE (type
);
6273 t
= TYPE_MIN_VALUE (type
);
6274 for (int i
= 0; i
< TREE_INT_CST_NUNITS (t
); i
++)
6275 hstate
.add_object (TREE_INT_CST_ELT (t
, i
));
6280 case FIXED_POINT_TYPE
:
6282 unsigned prec
= TYPE_PRECISION (type
);
6283 hstate
.add_object (prec
);
6289 unsigned nunits
= TYPE_VECTOR_SUBPARTS (type
);
6290 hstate
.add_object (nunits
);
6298 return hstate
.end ();
6301 /* These are the Hashtable callback functions. */
6303 /* Returns true iff the types are equivalent. */
6306 type_cache_hasher::equal (type_hash
*a
, type_hash
*b
)
6308 /* First test the things that are the same for all types. */
6309 if (a
->hash
!= b
->hash
6310 || TREE_CODE (a
->type
) != TREE_CODE (b
->type
)
6311 || TREE_TYPE (a
->type
) != TREE_TYPE (b
->type
)
6312 || !attribute_list_equal (TYPE_ATTRIBUTES (a
->type
),
6313 TYPE_ATTRIBUTES (b
->type
))
6314 || (TREE_CODE (a
->type
) != COMPLEX_TYPE
6315 && TYPE_NAME (a
->type
) != TYPE_NAME (b
->type
)))
6318 /* Be careful about comparing arrays before and after the element type
6319 has been completed; don't compare TYPE_ALIGN unless both types are
6321 if (COMPLETE_TYPE_P (a
->type
) && COMPLETE_TYPE_P (b
->type
)
6322 && (TYPE_ALIGN (a
->type
) != TYPE_ALIGN (b
->type
)
6323 || TYPE_MODE (a
->type
) != TYPE_MODE (b
->type
)))
6326 switch (TREE_CODE (a
->type
))
6331 case REFERENCE_TYPE
:
6336 return TYPE_VECTOR_SUBPARTS (a
->type
) == TYPE_VECTOR_SUBPARTS (b
->type
);
6339 if (TYPE_VALUES (a
->type
) != TYPE_VALUES (b
->type
)
6340 && !(TYPE_VALUES (a
->type
)
6341 && TREE_CODE (TYPE_VALUES (a
->type
)) == TREE_LIST
6342 && TYPE_VALUES (b
->type
)
6343 && TREE_CODE (TYPE_VALUES (b
->type
)) == TREE_LIST
6344 && type_list_equal (TYPE_VALUES (a
->type
),
6345 TYPE_VALUES (b
->type
))))
6353 if (TYPE_PRECISION (a
->type
) != TYPE_PRECISION (b
->type
))
6355 return ((TYPE_MAX_VALUE (a
->type
) == TYPE_MAX_VALUE (b
->type
)
6356 || tree_int_cst_equal (TYPE_MAX_VALUE (a
->type
),
6357 TYPE_MAX_VALUE (b
->type
)))
6358 && (TYPE_MIN_VALUE (a
->type
) == TYPE_MIN_VALUE (b
->type
)
6359 || tree_int_cst_equal (TYPE_MIN_VALUE (a
->type
),
6360 TYPE_MIN_VALUE (b
->type
))));
6362 case FIXED_POINT_TYPE
:
6363 return TYPE_SATURATING (a
->type
) == TYPE_SATURATING (b
->type
);
6366 return TYPE_OFFSET_BASETYPE (a
->type
) == TYPE_OFFSET_BASETYPE (b
->type
);
6369 if (TYPE_METHOD_BASETYPE (a
->type
) == TYPE_METHOD_BASETYPE (b
->type
)
6370 && (TYPE_ARG_TYPES (a
->type
) == TYPE_ARG_TYPES (b
->type
)
6371 || (TYPE_ARG_TYPES (a
->type
)
6372 && TREE_CODE (TYPE_ARG_TYPES (a
->type
)) == TREE_LIST
6373 && TYPE_ARG_TYPES (b
->type
)
6374 && TREE_CODE (TYPE_ARG_TYPES (b
->type
)) == TREE_LIST
6375 && type_list_equal (TYPE_ARG_TYPES (a
->type
),
6376 TYPE_ARG_TYPES (b
->type
)))))
6380 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
6381 where the flag should be inherited from the element type
6382 and can change after ARRAY_TYPEs are created; on non-aggregates
6383 compare it and hash it, scalars will never have that flag set
6384 and we need to differentiate between arrays created by different
6385 front-ends or middle-end created arrays. */
6386 return (TYPE_DOMAIN (a
->type
) == TYPE_DOMAIN (b
->type
)
6387 && (AGGREGATE_TYPE_P (TREE_TYPE (a
->type
))
6388 || (TYPE_TYPELESS_STORAGE (a
->type
)
6389 == TYPE_TYPELESS_STORAGE (b
->type
))));
6393 case QUAL_UNION_TYPE
:
6394 return (TYPE_FIELDS (a
->type
) == TYPE_FIELDS (b
->type
)
6395 || (TYPE_FIELDS (a
->type
)
6396 && TREE_CODE (TYPE_FIELDS (a
->type
)) == TREE_LIST
6397 && TYPE_FIELDS (b
->type
)
6398 && TREE_CODE (TYPE_FIELDS (b
->type
)) == TREE_LIST
6399 && type_list_equal (TYPE_FIELDS (a
->type
),
6400 TYPE_FIELDS (b
->type
))));
6403 if (TYPE_ARG_TYPES (a
->type
) == TYPE_ARG_TYPES (b
->type
)
6404 || (TYPE_ARG_TYPES (a
->type
)
6405 && TREE_CODE (TYPE_ARG_TYPES (a
->type
)) == TREE_LIST
6406 && TYPE_ARG_TYPES (b
->type
)
6407 && TREE_CODE (TYPE_ARG_TYPES (b
->type
)) == TREE_LIST
6408 && type_list_equal (TYPE_ARG_TYPES (a
->type
),
6409 TYPE_ARG_TYPES (b
->type
))))
6417 if (lang_hooks
.types
.type_hash_eq
!= NULL
)
6418 return lang_hooks
.types
.type_hash_eq (a
->type
, b
->type
);
6423 /* Given TYPE, and HASHCODE its hash code, return the canonical
6424 object for an identical type if one already exists.
6425 Otherwise, return TYPE, and record it as the canonical object.
6427 To use this function, first create a type of the sort you want.
6428 Then compute its hash code from the fields of the type that
6429 make it different from other similar types.
6430 Then call this function and use the value. */
6433 type_hash_canon (unsigned int hashcode
, tree type
)
6438 /* The hash table only contains main variants, so ensure that's what we're
6440 gcc_assert (TYPE_MAIN_VARIANT (type
) == type
);
6442 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6443 must call that routine before comparing TYPE_ALIGNs. */
6449 loc
= type_hash_table
->find_slot_with_hash (&in
, hashcode
, INSERT
);
6452 tree t1
= ((type_hash
*) *loc
)->type
;
6453 gcc_assert (TYPE_MAIN_VARIANT (t1
) == t1
);
6454 if (TYPE_UID (type
) + 1 == next_type_uid
)
6456 /* Free also min/max values and the cache for integer
6457 types. This can't be done in free_node, as LTO frees
6458 those on its own. */
6459 if (TREE_CODE (type
) == INTEGER_TYPE
)
6461 if (TYPE_MIN_VALUE (type
)
6462 && TREE_TYPE (TYPE_MIN_VALUE (type
)) == type
)
6464 /* Zero is always in TYPE_CACHED_VALUES. */
6465 if (! TYPE_UNSIGNED (type
))
6466 int_cst_hash_table
->remove_elt (TYPE_MIN_VALUE (type
));
6467 ggc_free (TYPE_MIN_VALUE (type
));
6469 if (TYPE_MAX_VALUE (type
)
6470 && TREE_TYPE (TYPE_MAX_VALUE (type
)) == type
)
6472 int_cst_hash_table
->remove_elt (TYPE_MAX_VALUE (type
));
6473 ggc_free (TYPE_MAX_VALUE (type
));
6475 if (TYPE_CACHED_VALUES_P (type
))
6476 ggc_free (TYPE_CACHED_VALUES (type
));
6483 struct type_hash
*h
;
6485 h
= ggc_alloc
<type_hash
> ();
6495 print_type_hash_statistics (void)
6497 fprintf (stderr
, "Type hash: size %ld, %ld elements, %f collisions\n",
6498 (long) type_hash_table
->size (),
6499 (long) type_hash_table
->elements (),
6500 type_hash_table
->collisions ());
6503 /* Given two lists of types
6504 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6505 return 1 if the lists contain the same types in the same order.
6506 Also, the TREE_PURPOSEs must match. */
6509 type_list_equal (const_tree l1
, const_tree l2
)
6513 for (t1
= l1
, t2
= l2
; t1
&& t2
; t1
= TREE_CHAIN (t1
), t2
= TREE_CHAIN (t2
))
6514 if (TREE_VALUE (t1
) != TREE_VALUE (t2
)
6515 || (TREE_PURPOSE (t1
) != TREE_PURPOSE (t2
)
6516 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1
), TREE_PURPOSE (t2
))
6517 && (TREE_TYPE (TREE_PURPOSE (t1
))
6518 == TREE_TYPE (TREE_PURPOSE (t2
))))))
6524 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6525 given by TYPE. If the argument list accepts variable arguments,
6526 then this function counts only the ordinary arguments. */
6529 type_num_arguments (const_tree type
)
6534 for (t
= TYPE_ARG_TYPES (type
); t
; t
= TREE_CHAIN (t
))
6535 /* If the function does not take a variable number of arguments,
6536 the last element in the list will have type `void'. */
6537 if (VOID_TYPE_P (TREE_VALUE (t
)))
6545 /* Nonzero if integer constants T1 and T2
6546 represent the same constant value. */
6549 tree_int_cst_equal (const_tree t1
, const_tree t2
)
6554 if (t1
== 0 || t2
== 0)
6557 if (TREE_CODE (t1
) == INTEGER_CST
6558 && TREE_CODE (t2
) == INTEGER_CST
6559 && wi::to_widest (t1
) == wi::to_widest (t2
))
6565 /* Return true if T is an INTEGER_CST whose numerical value (extended
6566 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6569 tree_fits_shwi_p (const_tree t
)
6571 return (t
!= NULL_TREE
6572 && TREE_CODE (t
) == INTEGER_CST
6573 && wi::fits_shwi_p (wi::to_widest (t
)));
6576 /* Return true if T is an INTEGER_CST whose numerical value (extended
6577 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6580 tree_fits_uhwi_p (const_tree t
)
6582 return (t
!= NULL_TREE
6583 && TREE_CODE (t
) == INTEGER_CST
6584 && wi::fits_uhwi_p (wi::to_widest (t
)));
6587 /* T is an INTEGER_CST whose numerical value (extended according to
6588 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6592 tree_to_shwi (const_tree t
)
6594 gcc_assert (tree_fits_shwi_p (t
));
6595 return TREE_INT_CST_LOW (t
);
6598 /* T is an INTEGER_CST whose numerical value (extended according to
6599 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6602 unsigned HOST_WIDE_INT
6603 tree_to_uhwi (const_tree t
)
6605 gcc_assert (tree_fits_uhwi_p (t
));
6606 return TREE_INT_CST_LOW (t
);
6609 /* Return the most significant (sign) bit of T. */
6612 tree_int_cst_sign_bit (const_tree t
)
6614 unsigned bitno
= TYPE_PRECISION (TREE_TYPE (t
)) - 1;
6616 return wi::extract_uhwi (wi::to_wide (t
), bitno
, 1);
6619 /* Return an indication of the sign of the integer constant T.
6620 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
6621 Note that -1 will never be returned if T's type is unsigned. */
6624 tree_int_cst_sgn (const_tree t
)
6626 if (wi::to_wide (t
) == 0)
6628 else if (TYPE_UNSIGNED (TREE_TYPE (t
)))
6630 else if (wi::neg_p (wi::to_wide (t
)))
6636 /* Return the minimum number of bits needed to represent VALUE in a
6637 signed or unsigned type, UNSIGNEDP says which. */
6640 tree_int_cst_min_precision (tree value
, signop sgn
)
6642 /* If the value is negative, compute its negative minus 1. The latter
6643 adjustment is because the absolute value of the largest negative value
6644 is one larger than the largest positive value. This is equivalent to
6645 a bit-wise negation, so use that operation instead. */
6647 if (tree_int_cst_sgn (value
) < 0)
6648 value
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (value
), value
);
6650 /* Return the number of bits needed, taking into account the fact
6651 that we need one more bit for a signed than unsigned type.
6652 If value is 0 or -1, the minimum precision is 1 no matter
6653 whether unsignedp is true or false. */
6655 if (integer_zerop (value
))
6658 return tree_floor_log2 (value
) + 1 + (sgn
== SIGNED
? 1 : 0) ;
6661 /* Return truthvalue of whether T1 is the same tree structure as T2.
6662 Return 1 if they are the same.
6663 Return 0 if they are understandably different.
6664 Return -1 if either contains tree structure not understood by
6668 simple_cst_equal (const_tree t1
, const_tree t2
)
6670 enum tree_code code1
, code2
;
6676 if (t1
== 0 || t2
== 0)
6679 code1
= TREE_CODE (t1
);
6680 code2
= TREE_CODE (t2
);
6682 if (CONVERT_EXPR_CODE_P (code1
) || code1
== NON_LVALUE_EXPR
)
6684 if (CONVERT_EXPR_CODE_P (code2
)
6685 || code2
== NON_LVALUE_EXPR
)
6686 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6688 return simple_cst_equal (TREE_OPERAND (t1
, 0), t2
);
6691 else if (CONVERT_EXPR_CODE_P (code2
)
6692 || code2
== NON_LVALUE_EXPR
)
6693 return simple_cst_equal (t1
, TREE_OPERAND (t2
, 0));
6701 return wi::to_widest (t1
) == wi::to_widest (t2
);
6704 return real_identical (&TREE_REAL_CST (t1
), &TREE_REAL_CST (t2
));
6707 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1
), TREE_FIXED_CST (t2
));
6710 return (TREE_STRING_LENGTH (t1
) == TREE_STRING_LENGTH (t2
)
6711 && ! memcmp (TREE_STRING_POINTER (t1
), TREE_STRING_POINTER (t2
),
6712 TREE_STRING_LENGTH (t1
)));
6716 unsigned HOST_WIDE_INT idx
;
6717 vec
<constructor_elt
, va_gc
> *v1
= CONSTRUCTOR_ELTS (t1
);
6718 vec
<constructor_elt
, va_gc
> *v2
= CONSTRUCTOR_ELTS (t2
);
6720 if (vec_safe_length (v1
) != vec_safe_length (v2
))
6723 for (idx
= 0; idx
< vec_safe_length (v1
); ++idx
)
6724 /* ??? Should we handle also fields here? */
6725 if (!simple_cst_equal ((*v1
)[idx
].value
, (*v2
)[idx
].value
))
6731 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6734 cmp
= simple_cst_equal (CALL_EXPR_FN (t1
), CALL_EXPR_FN (t2
));
6737 if (call_expr_nargs (t1
) != call_expr_nargs (t2
))
6740 const_tree arg1
, arg2
;
6741 const_call_expr_arg_iterator iter1
, iter2
;
6742 for (arg1
= first_const_call_expr_arg (t1
, &iter1
),
6743 arg2
= first_const_call_expr_arg (t2
, &iter2
);
6745 arg1
= next_const_call_expr_arg (&iter1
),
6746 arg2
= next_const_call_expr_arg (&iter2
))
6748 cmp
= simple_cst_equal (arg1
, arg2
);
6752 return arg1
== arg2
;
6756 /* Special case: if either target is an unallocated VAR_DECL,
6757 it means that it's going to be unified with whatever the
6758 TARGET_EXPR is really supposed to initialize, so treat it
6759 as being equivalent to anything. */
6760 if ((TREE_CODE (TREE_OPERAND (t1
, 0)) == VAR_DECL
6761 && DECL_NAME (TREE_OPERAND (t1
, 0)) == NULL_TREE
6762 && !DECL_RTL_SET_P (TREE_OPERAND (t1
, 0)))
6763 || (TREE_CODE (TREE_OPERAND (t2
, 0)) == VAR_DECL
6764 && DECL_NAME (TREE_OPERAND (t2
, 0)) == NULL_TREE
6765 && !DECL_RTL_SET_P (TREE_OPERAND (t2
, 0))))
6768 cmp
= simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6773 return simple_cst_equal (TREE_OPERAND (t1
, 1), TREE_OPERAND (t2
, 1));
6775 case WITH_CLEANUP_EXPR
:
6776 cmp
= simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6780 return simple_cst_equal (TREE_OPERAND (t1
, 1), TREE_OPERAND (t1
, 1));
6783 if (TREE_OPERAND (t1
, 1) == TREE_OPERAND (t2
, 1))
6784 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
6798 /* This general rule works for most tree codes. All exceptions should be
6799 handled above. If this is a language-specific tree code, we can't
6800 trust what might be in the operand, so say we don't know
6802 if ((int) code1
>= (int) LAST_AND_UNUSED_TREE_CODE
)
6805 switch (TREE_CODE_CLASS (code1
))
6809 case tcc_comparison
:
6810 case tcc_expression
:
6814 for (i
= 0; i
< TREE_CODE_LENGTH (code1
); i
++)
6816 cmp
= simple_cst_equal (TREE_OPERAND (t1
, i
), TREE_OPERAND (t2
, i
));
6828 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
6829 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
6830 than U, respectively. */
6833 compare_tree_int (const_tree t
, unsigned HOST_WIDE_INT u
)
6835 if (tree_int_cst_sgn (t
) < 0)
6837 else if (!tree_fits_uhwi_p (t
))
6839 else if (TREE_INT_CST_LOW (t
) == u
)
6841 else if (TREE_INT_CST_LOW (t
) < u
)
6847 /* Return true if SIZE represents a constant size that is in bounds of
6848 what the middle-end and the backend accepts (covering not more than
6849 half of the address-space). */
6852 valid_constant_size_p (const_tree size
)
6854 if (! tree_fits_uhwi_p (size
)
6855 || TREE_OVERFLOW (size
)
6856 || tree_int_cst_sign_bit (size
) != 0)
6861 /* Return the precision of the type, or for a complex or vector type the
6862 precision of the type of its elements. */
6865 element_precision (const_tree type
)
6868 type
= TREE_TYPE (type
);
6869 enum tree_code code
= TREE_CODE (type
);
6870 if (code
== COMPLEX_TYPE
|| code
== VECTOR_TYPE
)
6871 type
= TREE_TYPE (type
);
6873 return TYPE_PRECISION (type
);
6876 /* Return true if CODE represents an associative tree code. Otherwise
6879 associative_tree_code (enum tree_code code
)
6898 /* Return true if CODE represents a commutative tree code. Otherwise
6901 commutative_tree_code (enum tree_code code
)
6907 case MULT_HIGHPART_EXPR
:
6915 case UNORDERED_EXPR
:
6919 case TRUTH_AND_EXPR
:
6920 case TRUTH_XOR_EXPR
:
6922 case WIDEN_MULT_EXPR
:
6923 case VEC_WIDEN_MULT_HI_EXPR
:
6924 case VEC_WIDEN_MULT_LO_EXPR
:
6925 case VEC_WIDEN_MULT_EVEN_EXPR
:
6926 case VEC_WIDEN_MULT_ODD_EXPR
:
6935 /* Return true if CODE represents a ternary tree code for which the
6936 first two operands are commutative. Otherwise return false. */
6938 commutative_ternary_tree_code (enum tree_code code
)
6942 case WIDEN_MULT_PLUS_EXPR
:
6943 case WIDEN_MULT_MINUS_EXPR
:
6954 /* Returns true if CODE can overflow. */
6957 operation_can_overflow (enum tree_code code
)
6965 /* Can overflow in various ways. */
6967 case TRUNC_DIV_EXPR
:
6968 case EXACT_DIV_EXPR
:
6969 case FLOOR_DIV_EXPR
:
6971 /* For INT_MIN / -1. */
6978 /* These operators cannot overflow. */
6983 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
6984 ftrapv doesn't generate trapping insns for CODE. */
6987 operation_no_trapping_overflow (tree type
, enum tree_code code
)
6989 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type
));
6991 /* We don't generate instructions that trap on overflow for complex or vector
6993 if (!INTEGRAL_TYPE_P (type
))
6996 if (!TYPE_OVERFLOW_TRAPS (type
))
7006 /* These operators can overflow, and -ftrapv generates trapping code for
7009 case TRUNC_DIV_EXPR
:
7010 case EXACT_DIV_EXPR
:
7011 case FLOOR_DIV_EXPR
:
7014 /* These operators can overflow, but -ftrapv does not generate trapping
7018 /* These operators cannot overflow. */
7026 /* Generate a hash value for an expression. This can be used iteratively
7027 by passing a previous result as the HSTATE argument.
7029 This function is intended to produce the same hash for expressions which
7030 would compare equal using operand_equal_p. */
7032 add_expr (const_tree t
, inchash::hash
&hstate
, unsigned int flags
)
7035 enum tree_code code
;
7036 enum tree_code_class tclass
;
7038 if (t
== NULL_TREE
|| t
== error_mark_node
)
7040 hstate
.merge_hash (0);
7044 if (!(flags
& OEP_ADDRESS_OF
))
7047 code
= TREE_CODE (t
);
7051 /* Alas, constants aren't shared, so we can't rely on pointer
7054 hstate
.merge_hash (0);
7057 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
7058 for (i
= 0; i
< TREE_INT_CST_EXT_NUNITS (t
); i
++)
7059 hstate
.add_hwi (TREE_INT_CST_ELT (t
, i
));
7064 if (!HONOR_SIGNED_ZEROS (t
) && real_zerop (t
))
7067 val2
= real_hash (TREE_REAL_CST_PTR (t
));
7068 hstate
.merge_hash (val2
);
7073 unsigned int val2
= fixed_hash (TREE_FIXED_CST_PTR (t
));
7074 hstate
.merge_hash (val2
);
7078 hstate
.add ((const void *) TREE_STRING_POINTER (t
),
7079 TREE_STRING_LENGTH (t
));
7082 inchash::add_expr (TREE_REALPART (t
), hstate
, flags
);
7083 inchash::add_expr (TREE_IMAGPART (t
), hstate
, flags
);
7087 hstate
.add_int (VECTOR_CST_NPATTERNS (t
));
7088 hstate
.add_int (VECTOR_CST_NELTS_PER_PATTERN (t
));
7089 unsigned int count
= vector_cst_encoded_nelts (t
);
7090 for (unsigned int i
= 0; i
< count
; ++i
)
7091 inchash::add_expr (VECTOR_CST_ENCODED_ELT (t
, i
), hstate
, flags
);
7095 /* We can just compare by pointer. */
7096 hstate
.add_hwi (SSA_NAME_VERSION (t
));
7098 case PLACEHOLDER_EXPR
:
7099 /* The node itself doesn't matter. */
7106 /* A list of expressions, for a CALL_EXPR or as the elements of a
7108 for (; t
; t
= TREE_CHAIN (t
))
7109 inchash::add_expr (TREE_VALUE (t
), hstate
, flags
);
7113 unsigned HOST_WIDE_INT idx
;
7115 flags
&= ~OEP_ADDRESS_OF
;
7116 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t
), idx
, field
, value
)
7118 inchash::add_expr (field
, hstate
, flags
);
7119 inchash::add_expr (value
, hstate
, flags
);
7123 case STATEMENT_LIST
:
7125 tree_stmt_iterator i
;
7126 for (i
= tsi_start (CONST_CAST_TREE (t
));
7127 !tsi_end_p (i
); tsi_next (&i
))
7128 inchash::add_expr (tsi_stmt (i
), hstate
, flags
);
7132 for (i
= 0; i
< TREE_VEC_LENGTH (t
); ++i
)
7133 inchash::add_expr (TREE_VEC_ELT (t
, i
), hstate
, flags
);
7136 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7137 Otherwise nodes that compare equal according to operand_equal_p might
7138 get different hash codes. However, don't do this for machine specific
7139 or front end builtins, since the function code is overloaded in those
7141 if (DECL_BUILT_IN_CLASS (t
) == BUILT_IN_NORMAL
7142 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t
)))
7144 t
= builtin_decl_explicit (DECL_FUNCTION_CODE (t
));
7145 code
= TREE_CODE (t
);
7149 tclass
= TREE_CODE_CLASS (code
);
7151 if (tclass
== tcc_declaration
)
7153 /* DECL's have a unique ID */
7154 hstate
.add_hwi (DECL_UID (t
));
7156 else if (tclass
== tcc_comparison
&& !commutative_tree_code (code
))
7158 /* For comparisons that can be swapped, use the lower
7160 enum tree_code ccode
= swap_tree_comparison (code
);
7163 hstate
.add_object (ccode
);
7164 inchash::add_expr (TREE_OPERAND (t
, ccode
!= code
), hstate
, flags
);
7165 inchash::add_expr (TREE_OPERAND (t
, ccode
== code
), hstate
, flags
);
7167 else if (CONVERT_EXPR_CODE_P (code
))
7169 /* NOP_EXPR and CONVERT_EXPR are considered equal by
7171 enum tree_code ccode
= NOP_EXPR
;
7172 hstate
.add_object (ccode
);
7174 /* Don't hash the type, that can lead to having nodes which
7175 compare equal according to operand_equal_p, but which
7176 have different hash codes. Make sure to include signedness
7177 in the hash computation. */
7178 hstate
.add_int (TYPE_UNSIGNED (TREE_TYPE (t
)));
7179 inchash::add_expr (TREE_OPERAND (t
, 0), hstate
, flags
);
7181 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
7182 else if (code
== MEM_REF
7183 && (flags
& OEP_ADDRESS_OF
) != 0
7184 && TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
7185 && DECL_P (TREE_OPERAND (TREE_OPERAND (t
, 0), 0))
7186 && integer_zerop (TREE_OPERAND (t
, 1)))
7187 inchash::add_expr (TREE_OPERAND (TREE_OPERAND (t
, 0), 0),
7189 /* Don't ICE on FE specific trees, or their arguments etc.
7190 during operand_equal_p hash verification. */
7191 else if (!IS_EXPR_CODE_CLASS (tclass
))
7192 gcc_assert (flags
& OEP_HASH_CHECK
);
7195 unsigned int sflags
= flags
;
7197 hstate
.add_object (code
);
7202 gcc_checking_assert (!(flags
& OEP_ADDRESS_OF
));
7203 flags
|= OEP_ADDRESS_OF
;
7209 case TARGET_MEM_REF
:
7210 flags
&= ~OEP_ADDRESS_OF
;
7215 case ARRAY_RANGE_REF
:
7218 sflags
&= ~OEP_ADDRESS_OF
;
7222 flags
&= ~OEP_ADDRESS_OF
;
7226 case WIDEN_MULT_PLUS_EXPR
:
7227 case WIDEN_MULT_MINUS_EXPR
:
7229 /* The multiplication operands are commutative. */
7230 inchash::hash one
, two
;
7231 inchash::add_expr (TREE_OPERAND (t
, 0), one
, flags
);
7232 inchash::add_expr (TREE_OPERAND (t
, 1), two
, flags
);
7233 hstate
.add_commutative (one
, two
);
7234 inchash::add_expr (TREE_OPERAND (t
, 2), two
, flags
);
7239 if (CALL_EXPR_FN (t
) == NULL_TREE
)
7240 hstate
.add_int (CALL_EXPR_IFN (t
));
7244 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
7245 Usually different TARGET_EXPRs just should use
7246 different temporaries in their slots. */
7247 inchash::add_expr (TARGET_EXPR_SLOT (t
), hstate
, flags
);
7254 /* Don't hash the type, that can lead to having nodes which
7255 compare equal according to operand_equal_p, but which
7256 have different hash codes. */
7257 if (code
== NON_LVALUE_EXPR
)
7259 /* Make sure to include signness in the hash computation. */
7260 hstate
.add_int (TYPE_UNSIGNED (TREE_TYPE (t
)));
7261 inchash::add_expr (TREE_OPERAND (t
, 0), hstate
, flags
);
7264 else if (commutative_tree_code (code
))
7266 /* It's a commutative expression. We want to hash it the same
7267 however it appears. We do this by first hashing both operands
7268 and then rehashing based on the order of their independent
7270 inchash::hash one
, two
;
7271 inchash::add_expr (TREE_OPERAND (t
, 0), one
, flags
);
7272 inchash::add_expr (TREE_OPERAND (t
, 1), two
, flags
);
7273 hstate
.add_commutative (one
, two
);
7276 for (i
= TREE_OPERAND_LENGTH (t
) - 1; i
>= 0; --i
)
7277 inchash::add_expr (TREE_OPERAND (t
, i
), hstate
,
7278 i
== 0 ? flags
: sflags
);
7286 /* Constructors for pointer, array and function types.
7287 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7288 constructed by language-dependent code, not here.) */
7290 /* Construct, lay out and return the type of pointers to TO_TYPE with
7291 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7292 reference all of memory. If such a type has already been
7293 constructed, reuse it. */
7296 build_pointer_type_for_mode (tree to_type
, machine_mode mode
,
7300 bool could_alias
= can_alias_all
;
7302 if (to_type
== error_mark_node
)
7303 return error_mark_node
;
7305 /* If the pointed-to type has the may_alias attribute set, force
7306 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7307 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type
)))
7308 can_alias_all
= true;
7310 /* In some cases, languages will have things that aren't a POINTER_TYPE
7311 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7312 In that case, return that type without regard to the rest of our
7315 ??? This is a kludge, but consistent with the way this function has
7316 always operated and there doesn't seem to be a good way to avoid this
7318 if (TYPE_POINTER_TO (to_type
) != 0
7319 && TREE_CODE (TYPE_POINTER_TO (to_type
)) != POINTER_TYPE
)
7320 return TYPE_POINTER_TO (to_type
);
7322 /* First, if we already have a type for pointers to TO_TYPE and it's
7323 the proper mode, use it. */
7324 for (t
= TYPE_POINTER_TO (to_type
); t
; t
= TYPE_NEXT_PTR_TO (t
))
7325 if (TYPE_MODE (t
) == mode
&& TYPE_REF_CAN_ALIAS_ALL (t
) == can_alias_all
)
7328 t
= make_node (POINTER_TYPE
);
7330 TREE_TYPE (t
) = to_type
;
7331 SET_TYPE_MODE (t
, mode
);
7332 TYPE_REF_CAN_ALIAS_ALL (t
) = can_alias_all
;
7333 TYPE_NEXT_PTR_TO (t
) = TYPE_POINTER_TO (to_type
);
7334 TYPE_POINTER_TO (to_type
) = t
;
7336 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7337 if (TYPE_STRUCTURAL_EQUALITY_P (to_type
) || in_lto_p
)
7338 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7339 else if (TYPE_CANONICAL (to_type
) != to_type
|| could_alias
)
7341 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type
),
7344 /* Lay out the type. This function has many callers that are concerned
7345 with expression-construction, and this simplifies them all. */
7351 /* By default build pointers in ptr_mode. */
7354 build_pointer_type (tree to_type
)
7356 addr_space_t as
= to_type
== error_mark_node
? ADDR_SPACE_GENERIC
7357 : TYPE_ADDR_SPACE (to_type
);
7358 machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
7359 return build_pointer_type_for_mode (to_type
, pointer_mode
, false);
7362 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7365 build_reference_type_for_mode (tree to_type
, machine_mode mode
,
7369 bool could_alias
= can_alias_all
;
7371 if (to_type
== error_mark_node
)
7372 return error_mark_node
;
7374 /* If the pointed-to type has the may_alias attribute set, force
7375 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7376 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type
)))
7377 can_alias_all
= true;
7379 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7380 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7381 In that case, return that type without regard to the rest of our
7384 ??? This is a kludge, but consistent with the way this function has
7385 always operated and there doesn't seem to be a good way to avoid this
7387 if (TYPE_REFERENCE_TO (to_type
) != 0
7388 && TREE_CODE (TYPE_REFERENCE_TO (to_type
)) != REFERENCE_TYPE
)
7389 return TYPE_REFERENCE_TO (to_type
);
7391 /* First, if we already have a type for pointers to TO_TYPE and it's
7392 the proper mode, use it. */
7393 for (t
= TYPE_REFERENCE_TO (to_type
); t
; t
= TYPE_NEXT_REF_TO (t
))
7394 if (TYPE_MODE (t
) == mode
&& TYPE_REF_CAN_ALIAS_ALL (t
) == can_alias_all
)
7397 t
= make_node (REFERENCE_TYPE
);
7399 TREE_TYPE (t
) = to_type
;
7400 SET_TYPE_MODE (t
, mode
);
7401 TYPE_REF_CAN_ALIAS_ALL (t
) = can_alias_all
;
7402 TYPE_NEXT_REF_TO (t
) = TYPE_REFERENCE_TO (to_type
);
7403 TYPE_REFERENCE_TO (to_type
) = t
;
7405 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7406 if (TYPE_STRUCTURAL_EQUALITY_P (to_type
) || in_lto_p
)
7407 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7408 else if (TYPE_CANONICAL (to_type
) != to_type
|| could_alias
)
7410 = build_reference_type_for_mode (TYPE_CANONICAL (to_type
),
7419 /* Build the node for the type of references-to-TO_TYPE by default
7423 build_reference_type (tree to_type
)
7425 addr_space_t as
= to_type
== error_mark_node
? ADDR_SPACE_GENERIC
7426 : TYPE_ADDR_SPACE (to_type
);
7427 machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
7428 return build_reference_type_for_mode (to_type
, pointer_mode
, false);
7431 #define MAX_INT_CACHED_PREC \
7432 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7433 static GTY(()) tree nonstandard_integer_type_cache
[2 * MAX_INT_CACHED_PREC
+ 2];
7435 /* Builds a signed or unsigned integer type of precision PRECISION.
7436 Used for C bitfields whose precision does not match that of
7437 built-in target types. */
7439 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision
,
7445 unsignedp
= MAX_INT_CACHED_PREC
+ 1;
7447 if (precision
<= MAX_INT_CACHED_PREC
)
7449 itype
= nonstandard_integer_type_cache
[precision
+ unsignedp
];
7454 itype
= make_node (INTEGER_TYPE
);
7455 TYPE_PRECISION (itype
) = precision
;
7458 fixup_unsigned_type (itype
);
7460 fixup_signed_type (itype
);
7464 inchash::hash hstate
;
7465 inchash::add_expr (TYPE_MAX_VALUE (itype
), hstate
);
7466 ret
= type_hash_canon (hstate
.end (), itype
);
7467 if (precision
<= MAX_INT_CACHED_PREC
)
7468 nonstandard_integer_type_cache
[precision
+ unsignedp
] = ret
;
7473 #define MAX_BOOL_CACHED_PREC \
7474 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7475 static GTY(()) tree nonstandard_boolean_type_cache
[MAX_BOOL_CACHED_PREC
+ 1];
7477 /* Builds a boolean type of precision PRECISION.
7478 Used for boolean vectors to choose proper vector element size. */
7480 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision
)
7484 if (precision
<= MAX_BOOL_CACHED_PREC
)
7486 type
= nonstandard_boolean_type_cache
[precision
];
7491 type
= make_node (BOOLEAN_TYPE
);
7492 TYPE_PRECISION (type
) = precision
;
7493 fixup_signed_type (type
);
7495 if (precision
<= MAX_INT_CACHED_PREC
)
7496 nonstandard_boolean_type_cache
[precision
] = type
;
7501 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7502 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7503 is true, reuse such a type that has already been constructed. */
7506 build_range_type_1 (tree type
, tree lowval
, tree highval
, bool shared
)
7508 tree itype
= make_node (INTEGER_TYPE
);
7510 TREE_TYPE (itype
) = type
;
7512 TYPE_MIN_VALUE (itype
) = fold_convert (type
, lowval
);
7513 TYPE_MAX_VALUE (itype
) = highval
? fold_convert (type
, highval
) : NULL
;
7515 TYPE_PRECISION (itype
) = TYPE_PRECISION (type
);
7516 SET_TYPE_MODE (itype
, TYPE_MODE (type
));
7517 TYPE_SIZE (itype
) = TYPE_SIZE (type
);
7518 TYPE_SIZE_UNIT (itype
) = TYPE_SIZE_UNIT (type
);
7519 SET_TYPE_ALIGN (itype
, TYPE_ALIGN (type
));
7520 TYPE_USER_ALIGN (itype
) = TYPE_USER_ALIGN (type
);
7521 SET_TYPE_WARN_IF_NOT_ALIGN (itype
, TYPE_WARN_IF_NOT_ALIGN (type
));
7526 if ((TYPE_MIN_VALUE (itype
)
7527 && TREE_CODE (TYPE_MIN_VALUE (itype
)) != INTEGER_CST
)
7528 || (TYPE_MAX_VALUE (itype
)
7529 && TREE_CODE (TYPE_MAX_VALUE (itype
)) != INTEGER_CST
))
7531 /* Since we cannot reliably merge this type, we need to compare it using
7532 structural equality checks. */
7533 SET_TYPE_STRUCTURAL_EQUALITY (itype
);
7537 hashval_t hash
= type_hash_canon_hash (itype
);
7538 itype
= type_hash_canon (hash
, itype
);
7543 /* Wrapper around build_range_type_1 with SHARED set to true. */
7546 build_range_type (tree type
, tree lowval
, tree highval
)
7548 return build_range_type_1 (type
, lowval
, highval
, true);
7551 /* Wrapper around build_range_type_1 with SHARED set to false. */
7554 build_nonshared_range_type (tree type
, tree lowval
, tree highval
)
7556 return build_range_type_1 (type
, lowval
, highval
, false);
7559 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7560 MAXVAL should be the maximum value in the domain
7561 (one less than the length of the array).
7563 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7564 We don't enforce this limit, that is up to caller (e.g. language front end).
7565 The limit exists because the result is a signed type and we don't handle
7566 sizes that use more than one HOST_WIDE_INT. */
7569 build_index_type (tree maxval
)
7571 return build_range_type (sizetype
, size_zero_node
, maxval
);
7574 /* Return true if the debug information for TYPE, a subtype, should be emitted
7575 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7576 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7577 debug info and doesn't reflect the source code. */
7580 subrange_type_for_debug_p (const_tree type
, tree
*lowval
, tree
*highval
)
7582 tree base_type
= TREE_TYPE (type
), low
, high
;
7584 /* Subrange types have a base type which is an integral type. */
7585 if (!INTEGRAL_TYPE_P (base_type
))
7588 /* Get the real bounds of the subtype. */
7589 if (lang_hooks
.types
.get_subrange_bounds
)
7590 lang_hooks
.types
.get_subrange_bounds (type
, &low
, &high
);
7593 low
= TYPE_MIN_VALUE (type
);
7594 high
= TYPE_MAX_VALUE (type
);
7597 /* If the type and its base type have the same representation and the same
7598 name, then the type is not a subrange but a copy of the base type. */
7599 if ((TREE_CODE (base_type
) == INTEGER_TYPE
7600 || TREE_CODE (base_type
) == BOOLEAN_TYPE
)
7601 && int_size_in_bytes (type
) == int_size_in_bytes (base_type
)
7602 && tree_int_cst_equal (low
, TYPE_MIN_VALUE (base_type
))
7603 && tree_int_cst_equal (high
, TYPE_MAX_VALUE (base_type
))
7604 && TYPE_IDENTIFIER (type
) == TYPE_IDENTIFIER (base_type
))
7614 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7615 and number of elements specified by the range of values of INDEX_TYPE.
7616 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
7617 If SHARED is true, reuse such a type that has already been constructed. */
7620 build_array_type_1 (tree elt_type
, tree index_type
, bool typeless_storage
,
7625 if (TREE_CODE (elt_type
) == FUNCTION_TYPE
)
7627 error ("arrays of functions are not meaningful");
7628 elt_type
= integer_type_node
;
7631 t
= make_node (ARRAY_TYPE
);
7632 TREE_TYPE (t
) = elt_type
;
7633 TYPE_DOMAIN (t
) = index_type
;
7634 TYPE_ADDR_SPACE (t
) = TYPE_ADDR_SPACE (elt_type
);
7635 TYPE_TYPELESS_STORAGE (t
) = typeless_storage
;
7638 /* If the element type is incomplete at this point we get marked for
7639 structural equality. Do not record these types in the canonical
7641 if (TYPE_STRUCTURAL_EQUALITY_P (t
))
7646 hashval_t hash
= type_hash_canon_hash (t
);
7647 t
= type_hash_canon (hash
, t
);
7650 if (TYPE_CANONICAL (t
) == t
)
7652 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type
)
7653 || (index_type
&& TYPE_STRUCTURAL_EQUALITY_P (index_type
))
7655 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7656 else if (TYPE_CANONICAL (elt_type
) != elt_type
7657 || (index_type
&& TYPE_CANONICAL (index_type
) != index_type
))
7659 = build_array_type_1 (TYPE_CANONICAL (elt_type
),
7661 ? TYPE_CANONICAL (index_type
) : NULL_TREE
,
7662 typeless_storage
, shared
);
7668 /* Wrapper around build_array_type_1 with SHARED set to true. */
7671 build_array_type (tree elt_type
, tree index_type
, bool typeless_storage
)
7673 return build_array_type_1 (elt_type
, index_type
, typeless_storage
, true);
7676 /* Wrapper around build_array_type_1 with SHARED set to false. */
7679 build_nonshared_array_type (tree elt_type
, tree index_type
)
7681 return build_array_type_1 (elt_type
, index_type
, false, false);
7684 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7688 build_array_type_nelts (tree elt_type
, unsigned HOST_WIDE_INT nelts
)
7690 return build_array_type (elt_type
, build_index_type (size_int (nelts
- 1)));
7693 /* Recursively examines the array elements of TYPE, until a non-array
7694 element type is found. */
7697 strip_array_types (tree type
)
7699 while (TREE_CODE (type
) == ARRAY_TYPE
)
7700 type
= TREE_TYPE (type
);
7705 /* Computes the canonical argument types from the argument type list
7708 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7709 on entry to this function, or if any of the ARGTYPES are
7712 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7713 true on entry to this function, or if any of the ARGTYPES are
7716 Returns a canonical argument list, which may be ARGTYPES when the
7717 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7718 true) or would not differ from ARGTYPES. */
7721 maybe_canonicalize_argtypes (tree argtypes
,
7722 bool *any_structural_p
,
7723 bool *any_noncanonical_p
)
7726 bool any_noncanonical_argtypes_p
= false;
7728 for (arg
= argtypes
; arg
&& !(*any_structural_p
); arg
= TREE_CHAIN (arg
))
7730 if (!TREE_VALUE (arg
) || TREE_VALUE (arg
) == error_mark_node
)
7731 /* Fail gracefully by stating that the type is structural. */
7732 *any_structural_p
= true;
7733 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg
)))
7734 *any_structural_p
= true;
7735 else if (TYPE_CANONICAL (TREE_VALUE (arg
)) != TREE_VALUE (arg
)
7736 || TREE_PURPOSE (arg
))
7737 /* If the argument has a default argument, we consider it
7738 non-canonical even though the type itself is canonical.
7739 That way, different variants of function and method types
7740 with default arguments will all point to the variant with
7741 no defaults as their canonical type. */
7742 any_noncanonical_argtypes_p
= true;
7745 if (*any_structural_p
)
7748 if (any_noncanonical_argtypes_p
)
7750 /* Build the canonical list of argument types. */
7751 tree canon_argtypes
= NULL_TREE
;
7752 bool is_void
= false;
7754 for (arg
= argtypes
; arg
; arg
= TREE_CHAIN (arg
))
7756 if (arg
== void_list_node
)
7759 canon_argtypes
= tree_cons (NULL_TREE
,
7760 TYPE_CANONICAL (TREE_VALUE (arg
)),
7764 canon_argtypes
= nreverse (canon_argtypes
);
7766 canon_argtypes
= chainon (canon_argtypes
, void_list_node
);
7768 /* There is a non-canonical type. */
7769 *any_noncanonical_p
= true;
7770 return canon_argtypes
;
7773 /* The canonical argument types are the same as ARGTYPES. */
7777 /* Construct, lay out and return
7778 the type of functions returning type VALUE_TYPE
7779 given arguments of types ARG_TYPES.
7780 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7781 are data type nodes for the arguments of the function.
7782 If such a type has already been constructed, reuse it. */
7785 build_function_type (tree value_type
, tree arg_types
)
7788 inchash::hash hstate
;
7789 bool any_structural_p
, any_noncanonical_p
;
7790 tree canon_argtypes
;
7792 if (TREE_CODE (value_type
) == FUNCTION_TYPE
)
7794 error ("function return type cannot be function");
7795 value_type
= integer_type_node
;
7798 /* Make a node of the sort we want. */
7799 t
= make_node (FUNCTION_TYPE
);
7800 TREE_TYPE (t
) = value_type
;
7801 TYPE_ARG_TYPES (t
) = arg_types
;
7803 /* If we already have such a type, use the old one. */
7804 hashval_t hash
= type_hash_canon_hash (t
);
7805 t
= type_hash_canon (hash
, t
);
7807 /* Set up the canonical type. */
7808 any_structural_p
= TYPE_STRUCTURAL_EQUALITY_P (value_type
);
7809 any_noncanonical_p
= TYPE_CANONICAL (value_type
) != value_type
;
7810 canon_argtypes
= maybe_canonicalize_argtypes (arg_types
,
7812 &any_noncanonical_p
);
7813 if (any_structural_p
)
7814 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7815 else if (any_noncanonical_p
)
7816 TYPE_CANONICAL (t
) = build_function_type (TYPE_CANONICAL (value_type
),
7819 if (!COMPLETE_TYPE_P (t
))
7824 /* Build a function type. The RETURN_TYPE is the type returned by the
7825 function. If VAARGS is set, no void_type_node is appended to the
7826 list. ARGP must be always be terminated be a NULL_TREE. */
7829 build_function_type_list_1 (bool vaargs
, tree return_type
, va_list argp
)
7833 t
= va_arg (argp
, tree
);
7834 for (args
= NULL_TREE
; t
!= NULL_TREE
; t
= va_arg (argp
, tree
))
7835 args
= tree_cons (NULL_TREE
, t
, args
);
7840 if (args
!= NULL_TREE
)
7841 args
= nreverse (args
);
7842 gcc_assert (last
!= void_list_node
);
7844 else if (args
== NULL_TREE
)
7845 args
= void_list_node
;
7849 args
= nreverse (args
);
7850 TREE_CHAIN (last
) = void_list_node
;
7852 args
= build_function_type (return_type
, args
);
7857 /* Build a function type. The RETURN_TYPE is the type returned by the
7858 function. If additional arguments are provided, they are
7859 additional argument types. The list of argument types must always
7860 be terminated by NULL_TREE. */
7863 build_function_type_list (tree return_type
, ...)
7868 va_start (p
, return_type
);
7869 args
= build_function_type_list_1 (false, return_type
, p
);
7874 /* Build a variable argument function type. The RETURN_TYPE is the
7875 type returned by the function. If additional arguments are provided,
7876 they are additional argument types. The list of argument types must
7877 always be terminated by NULL_TREE. */
7880 build_varargs_function_type_list (tree return_type
, ...)
7885 va_start (p
, return_type
);
7886 args
= build_function_type_list_1 (true, return_type
, p
);
7892 /* Build a function type. RETURN_TYPE is the type returned by the
7893 function; VAARGS indicates whether the function takes varargs. The
7894 function takes N named arguments, the types of which are provided in
7898 build_function_type_array_1 (bool vaargs
, tree return_type
, int n
,
7902 tree t
= vaargs
? NULL_TREE
: void_list_node
;
7904 for (i
= n
- 1; i
>= 0; i
--)
7905 t
= tree_cons (NULL_TREE
, arg_types
[i
], t
);
7907 return build_function_type (return_type
, t
);
7910 /* Build a function type. RETURN_TYPE is the type returned by the
7911 function. The function takes N named arguments, the types of which
7912 are provided in ARG_TYPES. */
7915 build_function_type_array (tree return_type
, int n
, tree
*arg_types
)
7917 return build_function_type_array_1 (false, return_type
, n
, arg_types
);
7920 /* Build a variable argument function type. RETURN_TYPE is the type
7921 returned by the function. The function takes N named arguments, the
7922 types of which are provided in ARG_TYPES. */
7925 build_varargs_function_type_array (tree return_type
, int n
, tree
*arg_types
)
7927 return build_function_type_array_1 (true, return_type
, n
, arg_types
);
7930 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
7931 and ARGTYPES (a TREE_LIST) are the return type and arguments types
7932 for the method. An implicit additional parameter (of type
7933 pointer-to-BASETYPE) is added to the ARGTYPES. */
7936 build_method_type_directly (tree basetype
,
7942 bool any_structural_p
, any_noncanonical_p
;
7943 tree canon_argtypes
;
7945 /* Make a node of the sort we want. */
7946 t
= make_node (METHOD_TYPE
);
7948 TYPE_METHOD_BASETYPE (t
) = TYPE_MAIN_VARIANT (basetype
);
7949 TREE_TYPE (t
) = rettype
;
7950 ptype
= build_pointer_type (basetype
);
7952 /* The actual arglist for this function includes a "hidden" argument
7953 which is "this". Put it into the list of argument types. */
7954 argtypes
= tree_cons (NULL_TREE
, ptype
, argtypes
);
7955 TYPE_ARG_TYPES (t
) = argtypes
;
7957 /* If we already have such a type, use the old one. */
7958 hashval_t hash
= type_hash_canon_hash (t
);
7959 t
= type_hash_canon (hash
, t
);
7961 /* Set up the canonical type. */
7963 = (TYPE_STRUCTURAL_EQUALITY_P (basetype
)
7964 || TYPE_STRUCTURAL_EQUALITY_P (rettype
));
7966 = (TYPE_CANONICAL (basetype
) != basetype
7967 || TYPE_CANONICAL (rettype
) != rettype
);
7968 canon_argtypes
= maybe_canonicalize_argtypes (TREE_CHAIN (argtypes
),
7970 &any_noncanonical_p
);
7971 if (any_structural_p
)
7972 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7973 else if (any_noncanonical_p
)
7975 = build_method_type_directly (TYPE_CANONICAL (basetype
),
7976 TYPE_CANONICAL (rettype
),
7978 if (!COMPLETE_TYPE_P (t
))
7984 /* Construct, lay out and return the type of methods belonging to class
7985 BASETYPE and whose arguments and values are described by TYPE.
7986 If that type exists already, reuse it.
7987 TYPE must be a FUNCTION_TYPE node. */
7990 build_method_type (tree basetype
, tree type
)
7992 gcc_assert (TREE_CODE (type
) == FUNCTION_TYPE
);
7994 return build_method_type_directly (basetype
,
7996 TYPE_ARG_TYPES (type
));
7999 /* Construct, lay out and return the type of offsets to a value
8000 of type TYPE, within an object of type BASETYPE.
8001 If a suitable offset type exists already, reuse it. */
8004 build_offset_type (tree basetype
, tree type
)
8008 /* Make a node of the sort we want. */
8009 t
= make_node (OFFSET_TYPE
);
8011 TYPE_OFFSET_BASETYPE (t
) = TYPE_MAIN_VARIANT (basetype
);
8012 TREE_TYPE (t
) = type
;
8014 /* If we already have such a type, use the old one. */
8015 hashval_t hash
= type_hash_canon_hash (t
);
8016 t
= type_hash_canon (hash
, t
);
8018 if (!COMPLETE_TYPE_P (t
))
8021 if (TYPE_CANONICAL (t
) == t
)
8023 if (TYPE_STRUCTURAL_EQUALITY_P (basetype
)
8024 || TYPE_STRUCTURAL_EQUALITY_P (type
))
8025 SET_TYPE_STRUCTURAL_EQUALITY (t
);
8026 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype
)) != basetype
8027 || TYPE_CANONICAL (type
) != type
)
8029 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype
)),
8030 TYPE_CANONICAL (type
));
8036 /* Create a complex type whose components are COMPONENT_TYPE.
8038 If NAMED is true, the type is given a TYPE_NAME. We do not always
8039 do so because this creates a DECL node and thus make the DECL_UIDs
8040 dependent on the type canonicalization hashtable, which is GC-ed,
8041 so the DECL_UIDs would not be stable wrt garbage collection. */
8044 build_complex_type (tree component_type
, bool named
)
8046 gcc_assert (INTEGRAL_TYPE_P (component_type
)
8047 || SCALAR_FLOAT_TYPE_P (component_type
)
8048 || FIXED_POINT_TYPE_P (component_type
));
8050 /* Make a node of the sort we want. */
8051 tree probe
= make_node (COMPLEX_TYPE
);
8053 TREE_TYPE (probe
) = TYPE_MAIN_VARIANT (component_type
);
8055 /* If we already have such a type, use the old one. */
8056 hashval_t hash
= type_hash_canon_hash (probe
);
8057 tree t
= type_hash_canon (hash
, probe
);
8061 /* We created a new type. The hash insertion will have laid
8062 out the type. We need to check the canonicalization and
8063 maybe set the name. */
8064 gcc_checking_assert (COMPLETE_TYPE_P (t
)
8066 && TYPE_CANONICAL (t
) == t
);
8068 if (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (t
)))
8069 SET_TYPE_STRUCTURAL_EQUALITY (t
);
8070 else if (TYPE_CANONICAL (TREE_TYPE (t
)) != TREE_TYPE (t
))
8072 = build_complex_type (TYPE_CANONICAL (TREE_TYPE (t
)), named
);
8074 /* We need to create a name, since complex is a fundamental type. */
8077 const char *name
= NULL
;
8079 if (TREE_TYPE (t
) == char_type_node
)
8080 name
= "complex char";
8081 else if (TREE_TYPE (t
) == signed_char_type_node
)
8082 name
= "complex signed char";
8083 else if (TREE_TYPE (t
) == unsigned_char_type_node
)
8084 name
= "complex unsigned char";
8085 else if (TREE_TYPE (t
) == short_integer_type_node
)
8086 name
= "complex short int";
8087 else if (TREE_TYPE (t
) == short_unsigned_type_node
)
8088 name
= "complex short unsigned int";
8089 else if (TREE_TYPE (t
) == integer_type_node
)
8090 name
= "complex int";
8091 else if (TREE_TYPE (t
) == unsigned_type_node
)
8092 name
= "complex unsigned int";
8093 else if (TREE_TYPE (t
) == long_integer_type_node
)
8094 name
= "complex long int";
8095 else if (TREE_TYPE (t
) == long_unsigned_type_node
)
8096 name
= "complex long unsigned int";
8097 else if (TREE_TYPE (t
) == long_long_integer_type_node
)
8098 name
= "complex long long int";
8099 else if (TREE_TYPE (t
) == long_long_unsigned_type_node
)
8100 name
= "complex long long unsigned int";
8103 TYPE_NAME (t
) = build_decl (UNKNOWN_LOCATION
, TYPE_DECL
,
8104 get_identifier (name
), t
);
8108 return build_qualified_type (t
, TYPE_QUALS (component_type
));
8111 /* If TYPE is a real or complex floating-point type and the target
8112 does not directly support arithmetic on TYPE then return the wider
8113 type to be used for arithmetic on TYPE. Otherwise, return
8117 excess_precision_type (tree type
)
8119 /* The target can give two different responses to the question of
8120 which excess precision mode it would like depending on whether we
8121 are in -fexcess-precision=standard or -fexcess-precision=fast. */
8123 enum excess_precision_type requested_type
8124 = (flag_excess_precision
== EXCESS_PRECISION_FAST
8125 ? EXCESS_PRECISION_TYPE_FAST
8126 : EXCESS_PRECISION_TYPE_STANDARD
);
8128 enum flt_eval_method target_flt_eval_method
8129 = targetm
.c
.excess_precision (requested_type
);
8131 /* The target should not ask for unpredictable float evaluation (though
8132 it might advertise that implicitly the evaluation is unpredictable,
8133 but we don't care about that here, it will have been reported
8134 elsewhere). If it does ask for unpredictable evaluation, we have
8135 nothing to do here. */
8136 gcc_assert (target_flt_eval_method
!= FLT_EVAL_METHOD_UNPREDICTABLE
);
8138 /* Nothing to do. The target has asked for all types we know about
8139 to be computed with their native precision and range. */
8140 if (target_flt_eval_method
== FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16
)
8143 /* The target will promote this type in a target-dependent way, so excess
8144 precision ought to leave it alone. */
8145 if (targetm
.promoted_type (type
) != NULL_TREE
)
8148 machine_mode float16_type_mode
= (float16_type_node
8149 ? TYPE_MODE (float16_type_node
)
8151 machine_mode float_type_mode
= TYPE_MODE (float_type_node
);
8152 machine_mode double_type_mode
= TYPE_MODE (double_type_node
);
8154 switch (TREE_CODE (type
))
8158 machine_mode type_mode
= TYPE_MODE (type
);
8159 switch (target_flt_eval_method
)
8161 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT
:
8162 if (type_mode
== float16_type_mode
)
8163 return float_type_node
;
8165 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE
:
8166 if (type_mode
== float16_type_mode
8167 || type_mode
== float_type_mode
)
8168 return double_type_node
;
8170 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE
:
8171 if (type_mode
== float16_type_mode
8172 || type_mode
== float_type_mode
8173 || type_mode
== double_type_mode
)
8174 return long_double_type_node
;
8183 if (TREE_CODE (TREE_TYPE (type
)) != REAL_TYPE
)
8185 machine_mode type_mode
= TYPE_MODE (TREE_TYPE (type
));
8186 switch (target_flt_eval_method
)
8188 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT
:
8189 if (type_mode
== float16_type_mode
)
8190 return complex_float_type_node
;
8192 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE
:
8193 if (type_mode
== float16_type_mode
8194 || type_mode
== float_type_mode
)
8195 return complex_double_type_node
;
8197 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE
:
8198 if (type_mode
== float16_type_mode
8199 || type_mode
== float_type_mode
8200 || type_mode
== double_type_mode
)
8201 return complex_long_double_type_node
;
8215 /* Return OP, stripped of any conversions to wider types as much as is safe.
8216 Converting the value back to OP's type makes a value equivalent to OP.
8218 If FOR_TYPE is nonzero, we return a value which, if converted to
8219 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8221 OP must have integer, real or enumeral type. Pointers are not allowed!
8223 There are some cases where the obvious value we could return
8224 would regenerate to OP if converted to OP's type,
8225 but would not extend like OP to wider types.
8226 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8227 For example, if OP is (unsigned short)(signed char)-1,
8228 we avoid returning (signed char)-1 if FOR_TYPE is int,
8229 even though extending that to an unsigned short would regenerate OP,
8230 since the result of extending (signed char)-1 to (int)
8231 is different from (int) OP. */
8234 get_unwidened (tree op
, tree for_type
)
8236 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8237 tree type
= TREE_TYPE (op
);
8239 = TYPE_PRECISION (for_type
!= 0 ? for_type
: type
);
8241 = (for_type
!= 0 && for_type
!= type
8242 && final_prec
> TYPE_PRECISION (type
)
8243 && TYPE_UNSIGNED (type
));
8246 while (CONVERT_EXPR_P (op
))
8250 /* TYPE_PRECISION on vector types has different meaning
8251 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8252 so avoid them here. */
8253 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op
, 0))) == VECTOR_TYPE
)
8256 bitschange
= TYPE_PRECISION (TREE_TYPE (op
))
8257 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op
, 0)));
8259 /* Truncations are many-one so cannot be removed.
8260 Unless we are later going to truncate down even farther. */
8262 && final_prec
> TYPE_PRECISION (TREE_TYPE (op
)))
8265 /* See what's inside this conversion. If we decide to strip it,
8267 op
= TREE_OPERAND (op
, 0);
8269 /* If we have not stripped any zero-extensions (uns is 0),
8270 we can strip any kind of extension.
8271 If we have previously stripped a zero-extension,
8272 only zero-extensions can safely be stripped.
8273 Any extension can be stripped if the bits it would produce
8274 are all going to be discarded later by truncating to FOR_TYPE. */
8278 if (! uns
|| final_prec
<= TYPE_PRECISION (TREE_TYPE (op
)))
8280 /* TYPE_UNSIGNED says whether this is a zero-extension.
8281 Let's avoid computing it if it does not affect WIN
8282 and if UNS will not be needed again. */
8284 || CONVERT_EXPR_P (op
))
8285 && TYPE_UNSIGNED (TREE_TYPE (op
)))
8293 /* If we finally reach a constant see if it fits in sth smaller and
8294 in that case convert it. */
8295 if (TREE_CODE (win
) == INTEGER_CST
)
8297 tree wtype
= TREE_TYPE (win
);
8298 unsigned prec
= wi::min_precision (wi::to_wide (win
), TYPE_SIGN (wtype
));
8300 prec
= MAX (prec
, final_prec
);
8301 if (prec
< TYPE_PRECISION (wtype
))
8303 tree t
= lang_hooks
.types
.type_for_size (prec
, TYPE_UNSIGNED (wtype
));
8304 if (t
&& TYPE_PRECISION (t
) < TYPE_PRECISION (wtype
))
8305 win
= fold_convert (t
, win
);
8312 /* Return OP or a simpler expression for a narrower value
8313 which can be sign-extended or zero-extended to give back OP.
8314 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8315 or 0 if the value should be sign-extended. */
8318 get_narrower (tree op
, int *unsignedp_ptr
)
8323 bool integral_p
= INTEGRAL_TYPE_P (TREE_TYPE (op
));
8325 while (TREE_CODE (op
) == NOP_EXPR
)
8328 = (TYPE_PRECISION (TREE_TYPE (op
))
8329 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op
, 0))));
8331 /* Truncations are many-one so cannot be removed. */
8335 /* See what's inside this conversion. If we decide to strip it,
8340 op
= TREE_OPERAND (op
, 0);
8341 /* An extension: the outermost one can be stripped,
8342 but remember whether it is zero or sign extension. */
8344 uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
8345 /* Otherwise, if a sign extension has been stripped,
8346 only sign extensions can now be stripped;
8347 if a zero extension has been stripped, only zero-extensions. */
8348 else if (uns
!= TYPE_UNSIGNED (TREE_TYPE (op
)))
8352 else /* bitschange == 0 */
8354 /* A change in nominal type can always be stripped, but we must
8355 preserve the unsignedness. */
8357 uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
8359 op
= TREE_OPERAND (op
, 0);
8360 /* Keep trying to narrow, but don't assign op to win if it
8361 would turn an integral type into something else. */
8362 if (INTEGRAL_TYPE_P (TREE_TYPE (op
)) != integral_p
)
8369 if (TREE_CODE (op
) == COMPONENT_REF
8370 /* Since type_for_size always gives an integer type. */
8371 && TREE_CODE (TREE_TYPE (op
)) != REAL_TYPE
8372 && TREE_CODE (TREE_TYPE (op
)) != FIXED_POINT_TYPE
8373 /* Ensure field is laid out already. */
8374 && DECL_SIZE (TREE_OPERAND (op
, 1)) != 0
8375 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op
, 1))))
8377 unsigned HOST_WIDE_INT innerprec
8378 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op
, 1)));
8379 int unsignedp
= (DECL_UNSIGNED (TREE_OPERAND (op
, 1))
8380 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op
, 1))));
8381 tree type
= lang_hooks
.types
.type_for_size (innerprec
, unsignedp
);
8383 /* We can get this structure field in a narrower type that fits it,
8384 but the resulting extension to its nominal type (a fullword type)
8385 must satisfy the same conditions as for other extensions.
8387 Do this only for fields that are aligned (not bit-fields),
8388 because when bit-field insns will be used there is no
8389 advantage in doing this. */
8391 if (innerprec
< TYPE_PRECISION (TREE_TYPE (op
))
8392 && ! DECL_BIT_FIELD (TREE_OPERAND (op
, 1))
8393 && (first
|| uns
== DECL_UNSIGNED (TREE_OPERAND (op
, 1)))
8397 uns
= DECL_UNSIGNED (TREE_OPERAND (op
, 1));
8398 win
= fold_convert (type
, op
);
8402 *unsignedp_ptr
= uns
;
8406 /* Return true if integer constant C has a value that is permissible
8407 for TYPE, an integral type. */
8410 int_fits_type_p (const_tree c
, const_tree type
)
8412 tree type_low_bound
, type_high_bound
;
8413 bool ok_for_low_bound
, ok_for_high_bound
;
8414 signop sgn_c
= TYPE_SIGN (TREE_TYPE (c
));
8416 /* Non-standard boolean types can have arbitrary precision but various
8417 transformations assume that they can only take values 0 and +/-1. */
8418 if (TREE_CODE (type
) == BOOLEAN_TYPE
)
8419 return wi::fits_to_boolean_p (wi::to_wide (c
), type
);
8422 type_low_bound
= TYPE_MIN_VALUE (type
);
8423 type_high_bound
= TYPE_MAX_VALUE (type
);
8425 /* If at least one bound of the type is a constant integer, we can check
8426 ourselves and maybe make a decision. If no such decision is possible, but
8427 this type is a subtype, try checking against that. Otherwise, use
8428 fits_to_tree_p, which checks against the precision.
8430 Compute the status for each possibly constant bound, and return if we see
8431 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8432 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8433 for "constant known to fit". */
8435 /* Check if c >= type_low_bound. */
8436 if (type_low_bound
&& TREE_CODE (type_low_bound
) == INTEGER_CST
)
8438 if (tree_int_cst_lt (c
, type_low_bound
))
8440 ok_for_low_bound
= true;
8443 ok_for_low_bound
= false;
8445 /* Check if c <= type_high_bound. */
8446 if (type_high_bound
&& TREE_CODE (type_high_bound
) == INTEGER_CST
)
8448 if (tree_int_cst_lt (type_high_bound
, c
))
8450 ok_for_high_bound
= true;
8453 ok_for_high_bound
= false;
8455 /* If the constant fits both bounds, the result is known. */
8456 if (ok_for_low_bound
&& ok_for_high_bound
)
8459 /* Perform some generic filtering which may allow making a decision
8460 even if the bounds are not constant. First, negative integers
8461 never fit in unsigned types, */
8462 if (TYPE_UNSIGNED (type
) && sgn_c
== SIGNED
&& wi::neg_p (wi::to_wide (c
)))
8465 /* Second, narrower types always fit in wider ones. */
8466 if (TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (c
)))
8469 /* Third, unsigned integers with top bit set never fit signed types. */
8470 if (!TYPE_UNSIGNED (type
) && sgn_c
== UNSIGNED
)
8472 int prec
= GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (TREE_TYPE (c
))) - 1;
8473 if (prec
< TYPE_PRECISION (TREE_TYPE (c
)))
8475 /* When a tree_cst is converted to a wide-int, the precision
8476 is taken from the type. However, if the precision of the
8477 mode underneath the type is smaller than that, it is
8478 possible that the value will not fit. The test below
8479 fails if any bit is set between the sign bit of the
8480 underlying mode and the top bit of the type. */
8481 if (wi::zext (wi::to_wide (c
), prec
- 1) != wi::to_wide (c
))
8484 else if (wi::neg_p (wi::to_wide (c
)))
8488 /* If we haven't been able to decide at this point, there nothing more we
8489 can check ourselves here. Look at the base type if we have one and it
8490 has the same precision. */
8491 if (TREE_CODE (type
) == INTEGER_TYPE
8492 && TREE_TYPE (type
) != 0
8493 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (type
)))
8495 type
= TREE_TYPE (type
);
8499 /* Or to fits_to_tree_p, if nothing else. */
8500 return wi::fits_to_tree_p (wi::to_wide (c
), type
);
8503 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8504 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8505 represented (assuming two's-complement arithmetic) within the bit
8506 precision of the type are returned instead. */
8509 get_type_static_bounds (const_tree type
, mpz_t min
, mpz_t max
)
8511 if (!POINTER_TYPE_P (type
) && TYPE_MIN_VALUE (type
)
8512 && TREE_CODE (TYPE_MIN_VALUE (type
)) == INTEGER_CST
)
8513 wi::to_mpz (wi::to_wide (TYPE_MIN_VALUE (type
)), min
, TYPE_SIGN (type
));
8516 if (TYPE_UNSIGNED (type
))
8517 mpz_set_ui (min
, 0);
8520 wide_int mn
= wi::min_value (TYPE_PRECISION (type
), SIGNED
);
8521 wi::to_mpz (mn
, min
, SIGNED
);
8525 if (!POINTER_TYPE_P (type
) && TYPE_MAX_VALUE (type
)
8526 && TREE_CODE (TYPE_MAX_VALUE (type
)) == INTEGER_CST
)
8527 wi::to_mpz (wi::to_wide (TYPE_MAX_VALUE (type
)), max
, TYPE_SIGN (type
));
8530 wide_int mn
= wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
8531 wi::to_mpz (mn
, max
, TYPE_SIGN (type
));
8535 /* Return true if VAR is an automatic variable defined in function FN. */
8538 auto_var_in_fn_p (const_tree var
, const_tree fn
)
8540 return (DECL_P (var
) && DECL_CONTEXT (var
) == fn
8541 && ((((VAR_P (var
) && ! DECL_EXTERNAL (var
))
8542 || TREE_CODE (var
) == PARM_DECL
)
8543 && ! TREE_STATIC (var
))
8544 || TREE_CODE (var
) == LABEL_DECL
8545 || TREE_CODE (var
) == RESULT_DECL
));
8548 /* Subprogram of following function. Called by walk_tree.
8550 Return *TP if it is an automatic variable or parameter of the
8551 function passed in as DATA. */
8554 find_var_from_fn (tree
*tp
, int *walk_subtrees
, void *data
)
8556 tree fn
= (tree
) data
;
8561 else if (DECL_P (*tp
)
8562 && auto_var_in_fn_p (*tp
, fn
))
8568 /* Returns true if T is, contains, or refers to a type with variable
8569 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8570 arguments, but not the return type. If FN is nonzero, only return
8571 true if a modifier of the type or position of FN is a variable or
8572 parameter inside FN.
8574 This concept is more general than that of C99 'variably modified types':
8575 in C99, a struct type is never variably modified because a VLA may not
8576 appear as a structure member. However, in GNU C code like:
8578 struct S { int i[f()]; };
8580 is valid, and other languages may define similar constructs. */
8583 variably_modified_type_p (tree type
, tree fn
)
8587 /* Test if T is either variable (if FN is zero) or an expression containing
8588 a variable in FN. If TYPE isn't gimplified, return true also if
8589 gimplify_one_sizepos would gimplify the expression into a local
8591 #define RETURN_TRUE_IF_VAR(T) \
8592 do { tree _t = (T); \
8593 if (_t != NULL_TREE \
8594 && _t != error_mark_node \
8595 && TREE_CODE (_t) != INTEGER_CST \
8596 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8598 || (!TYPE_SIZES_GIMPLIFIED (type) \
8599 && !is_gimple_sizepos (_t)) \
8600 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8601 return true; } while (0)
8603 if (type
== error_mark_node
)
8606 /* If TYPE itself has variable size, it is variably modified. */
8607 RETURN_TRUE_IF_VAR (TYPE_SIZE (type
));
8608 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type
));
8610 switch (TREE_CODE (type
))
8613 case REFERENCE_TYPE
:
8615 /* Ada can have pointer types refering to themselves indirectly. */
8616 if (TREE_VISITED (type
))
8618 TREE_VISITED (type
) = true;
8619 if (variably_modified_type_p (TREE_TYPE (type
), fn
))
8621 TREE_VISITED (type
) = false;
8624 TREE_VISITED (type
) = false;
8629 /* If TYPE is a function type, it is variably modified if the
8630 return type is variably modified. */
8631 if (variably_modified_type_p (TREE_TYPE (type
), fn
))
8637 case FIXED_POINT_TYPE
:
8640 /* Scalar types are variably modified if their end points
8642 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type
));
8643 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type
));
8648 case QUAL_UNION_TYPE
:
8649 /* We can't see if any of the fields are variably-modified by the
8650 definition we normally use, since that would produce infinite
8651 recursion via pointers. */
8652 /* This is variably modified if some field's type is. */
8653 for (t
= TYPE_FIELDS (type
); t
; t
= DECL_CHAIN (t
))
8654 if (TREE_CODE (t
) == FIELD_DECL
)
8656 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t
));
8657 RETURN_TRUE_IF_VAR (DECL_SIZE (t
));
8658 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t
));
8660 if (TREE_CODE (type
) == QUAL_UNION_TYPE
)
8661 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t
));
8666 /* Do not call ourselves to avoid infinite recursion. This is
8667 variably modified if the element type is. */
8668 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type
)));
8669 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type
)));
8676 /* The current language may have other cases to check, but in general,
8677 all other types are not variably modified. */
8678 return lang_hooks
.tree_inlining
.var_mod_type_p (type
, fn
);
8680 #undef RETURN_TRUE_IF_VAR
8683 /* Given a DECL or TYPE, return the scope in which it was declared, or
8684 NULL_TREE if there is no containing scope. */
8687 get_containing_scope (const_tree t
)
8689 return (TYPE_P (t
) ? TYPE_CONTEXT (t
) : DECL_CONTEXT (t
));
8692 /* Returns the ultimate TRANSLATION_UNIT_DECL context of DECL or NULL. */
8695 get_ultimate_context (const_tree decl
)
8697 while (decl
&& TREE_CODE (decl
) != TRANSLATION_UNIT_DECL
)
8699 if (TREE_CODE (decl
) == BLOCK
)
8700 decl
= BLOCK_SUPERCONTEXT (decl
);
8702 decl
= get_containing_scope (decl
);
8707 /* Return the innermost context enclosing DECL that is
8708 a FUNCTION_DECL, or zero if none. */
8711 decl_function_context (const_tree decl
)
8715 if (TREE_CODE (decl
) == ERROR_MARK
)
8718 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8719 where we look up the function at runtime. Such functions always take
8720 a first argument of type 'pointer to real context'.
8722 C++ should really be fixed to use DECL_CONTEXT for the real context,
8723 and use something else for the "virtual context". */
8724 else if (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_VINDEX (decl
))
8727 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl
)))));
8729 context
= DECL_CONTEXT (decl
);
8731 while (context
&& TREE_CODE (context
) != FUNCTION_DECL
)
8733 if (TREE_CODE (context
) == BLOCK
)
8734 context
= BLOCK_SUPERCONTEXT (context
);
8736 context
= get_containing_scope (context
);
8742 /* Return the innermost context enclosing DECL that is
8743 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8744 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8747 decl_type_context (const_tree decl
)
8749 tree context
= DECL_CONTEXT (decl
);
8752 switch (TREE_CODE (context
))
8754 case NAMESPACE_DECL
:
8755 case TRANSLATION_UNIT_DECL
:
8760 case QUAL_UNION_TYPE
:
8765 context
= DECL_CONTEXT (context
);
8769 context
= BLOCK_SUPERCONTEXT (context
);
8779 /* CALL is a CALL_EXPR. Return the declaration for the function
8780 called, or NULL_TREE if the called function cannot be
8784 get_callee_fndecl (const_tree call
)
8788 if (call
== error_mark_node
)
8789 return error_mark_node
;
8791 /* It's invalid to call this function with anything but a
8793 gcc_assert (TREE_CODE (call
) == CALL_EXPR
);
8795 /* The first operand to the CALL is the address of the function
8797 addr
= CALL_EXPR_FN (call
);
8799 /* If there is no function, return early. */
8800 if (addr
== NULL_TREE
)
8805 /* If this is a readonly function pointer, extract its initial value. */
8806 if (DECL_P (addr
) && TREE_CODE (addr
) != FUNCTION_DECL
8807 && TREE_READONLY (addr
) && ! TREE_THIS_VOLATILE (addr
)
8808 && DECL_INITIAL (addr
))
8809 addr
= DECL_INITIAL (addr
);
8811 /* If the address is just `&f' for some function `f', then we know
8812 that `f' is being called. */
8813 if (TREE_CODE (addr
) == ADDR_EXPR
8814 && TREE_CODE (TREE_OPERAND (addr
, 0)) == FUNCTION_DECL
)
8815 return TREE_OPERAND (addr
, 0);
8817 /* We couldn't figure out what was being called. */
8821 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
8822 return the associated function code, otherwise return CFN_LAST. */
8825 get_call_combined_fn (const_tree call
)
8827 /* It's invalid to call this function with anything but a CALL_EXPR. */
8828 gcc_assert (TREE_CODE (call
) == CALL_EXPR
);
8830 if (!CALL_EXPR_FN (call
))
8831 return as_combined_fn (CALL_EXPR_IFN (call
));
8833 tree fndecl
= get_callee_fndecl (call
);
8834 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
8835 return as_combined_fn (DECL_FUNCTION_CODE (fndecl
));
8840 #define TREE_MEM_USAGE_SPACES 40
8842 /* Print debugging information about tree nodes generated during the compile,
8843 and any language-specific information. */
8846 dump_tree_statistics (void)
8848 if (GATHER_STATISTICS
)
8851 int total_nodes
, total_bytes
;
8852 fprintf (stderr
, "\nKind Nodes Bytes\n");
8853 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8854 total_nodes
= total_bytes
= 0;
8855 for (i
= 0; i
< (int) all_kinds
; i
++)
8857 fprintf (stderr
, "%-20s %7d %10d\n", tree_node_kind_names
[i
],
8858 tree_node_counts
[i
], tree_node_sizes
[i
]);
8859 total_nodes
+= tree_node_counts
[i
];
8860 total_bytes
+= tree_node_sizes
[i
];
8862 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8863 fprintf (stderr
, "%-20s %7d %10d\n", "Total", total_nodes
, total_bytes
);
8864 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8865 fprintf (stderr
, "Code Nodes\n");
8866 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8867 for (i
= 0; i
< (int) MAX_TREE_CODES
; i
++)
8868 fprintf (stderr
, "%-32s %7d\n", get_tree_code_name ((enum tree_code
) i
),
8869 tree_code_counts
[i
]);
8870 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
8871 fprintf (stderr
, "\n");
8872 ssanames_print_statistics ();
8873 fprintf (stderr
, "\n");
8874 phinodes_print_statistics ();
8875 fprintf (stderr
, "\n");
8878 fprintf (stderr
, "(No per-node statistics)\n");
8880 print_type_hash_statistics ();
8881 print_debug_expr_statistics ();
8882 print_value_expr_statistics ();
8883 lang_hooks
.print_statistics ();
8886 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8888 /* Generate a crc32 of the low BYTES bytes of VALUE. */
8891 crc32_unsigned_n (unsigned chksum
, unsigned value
, unsigned bytes
)
8893 /* This relies on the raw feedback's top 4 bits being zero. */
8894 #define FEEDBACK(X) ((X) * 0x04c11db7)
8895 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
8896 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
8897 static const unsigned syndromes
[16] =
8899 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
8900 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
8901 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
8902 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
8907 value
<<= (32 - bytes
* 8);
8908 for (unsigned ix
= bytes
* 2; ix
--; value
<<= 4)
8910 unsigned feedback
= syndromes
[((value
^ chksum
) >> 28) & 0xf];
8912 chksum
= (chksum
<< 4) ^ feedback
;
8918 /* Generate a crc32 of a string. */
8921 crc32_string (unsigned chksum
, const char *string
)
8924 chksum
= crc32_byte (chksum
, *string
);
8929 /* P is a string that will be used in a symbol. Mask out any characters
8930 that are not valid in that context. */
8933 clean_symbol_name (char *p
)
8937 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8940 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
8947 /* For anonymous aggregate types, we need some sort of name to
8948 hold on to. In practice, this should not appear, but it should
8949 not be harmful if it does. */
8951 anon_aggrname_p(const_tree id_node
)
8953 #ifndef NO_DOT_IN_LABEL
8954 return (IDENTIFIER_POINTER (id_node
)[0] == '.'
8955 && IDENTIFIER_POINTER (id_node
)[1] == '_');
8956 #else /* NO_DOT_IN_LABEL */
8957 #ifndef NO_DOLLAR_IN_LABEL
8958 return (IDENTIFIER_POINTER (id_node
)[0] == '$' \
8959 && IDENTIFIER_POINTER (id_node
)[1] == '_');
8960 #else /* NO_DOLLAR_IN_LABEL */
8961 #define ANON_AGGRNAME_PREFIX "__anon_"
8962 return (!strncmp (IDENTIFIER_POINTER (id_node
), ANON_AGGRNAME_PREFIX
,
8963 sizeof (ANON_AGGRNAME_PREFIX
) - 1));
8964 #endif /* NO_DOLLAR_IN_LABEL */
8965 #endif /* NO_DOT_IN_LABEL */
8968 /* Return a format for an anonymous aggregate name. */
8970 anon_aggrname_format()
8972 #ifndef NO_DOT_IN_LABEL
8974 #else /* NO_DOT_IN_LABEL */
8975 #ifndef NO_DOLLAR_IN_LABEL
8977 #else /* NO_DOLLAR_IN_LABEL */
8979 #endif /* NO_DOLLAR_IN_LABEL */
8980 #endif /* NO_DOT_IN_LABEL */
8983 /* Generate a name for a special-purpose function.
8984 The generated name may need to be unique across the whole link.
8985 Changes to this function may also require corresponding changes to
8986 xstrdup_mask_random.
8987 TYPE is some string to identify the purpose of this function to the
8988 linker or collect2; it must start with an uppercase letter,
8990 I - for constructors
8992 N - for C++ anonymous namespaces
8993 F - for DWARF unwind frame information. */
8996 get_file_function_name (const char *type
)
9002 /* If we already have a name we know to be unique, just use that. */
9003 if (first_global_object_name
)
9004 p
= q
= ASTRDUP (first_global_object_name
);
9005 /* If the target is handling the constructors/destructors, they
9006 will be local to this file and the name is only necessary for
9008 We also assign sub_I and sub_D sufixes to constructors called from
9009 the global static constructors. These are always local. */
9010 else if (((type
[0] == 'I' || type
[0] == 'D') && targetm
.have_ctors_dtors
)
9011 || (strncmp (type
, "sub_", 4) == 0
9012 && (type
[4] == 'I' || type
[4] == 'D')))
9014 const char *file
= main_input_filename
;
9016 file
= LOCATION_FILE (input_location
);
9017 /* Just use the file's basename, because the full pathname
9018 might be quite long. */
9019 p
= q
= ASTRDUP (lbasename (file
));
9023 /* Otherwise, the name must be unique across the entire link.
9024 We don't have anything that we know to be unique to this translation
9025 unit, so use what we do have and throw in some randomness. */
9027 const char *name
= weak_global_object_name
;
9028 const char *file
= main_input_filename
;
9033 file
= LOCATION_FILE (input_location
);
9035 len
= strlen (file
);
9036 q
= (char *) alloca (9 + 19 + len
+ 1);
9037 memcpy (q
, file
, len
+ 1);
9039 snprintf (q
+ len
, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX
,
9040 crc32_string (0, name
), get_random_seed (false));
9045 clean_symbol_name (q
);
9046 buf
= (char *) alloca (sizeof (FILE_FUNCTION_FORMAT
) + strlen (p
)
9049 /* Set up the name of the file-level functions we may need.
9050 Use a global object (which is already required to be unique over
9051 the program) rather than the file name (which imposes extra
9053 sprintf (buf
, FILE_FUNCTION_FORMAT
, type
, p
);
9055 return get_identifier (buf
);
9058 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9060 /* Complain that the tree code of NODE does not match the expected 0
9061 terminated list of trailing codes. The trailing code list can be
9062 empty, for a more vague error message. FILE, LINE, and FUNCTION
9063 are of the caller. */
9066 tree_check_failed (const_tree node
, const char *file
,
9067 int line
, const char *function
, ...)
9071 unsigned length
= 0;
9072 enum tree_code code
;
9074 va_start (args
, function
);
9075 while ((code
= (enum tree_code
) va_arg (args
, int)))
9076 length
+= 4 + strlen (get_tree_code_name (code
));
9081 va_start (args
, function
);
9082 length
+= strlen ("expected ");
9083 buffer
= tmp
= (char *) alloca (length
);
9085 while ((code
= (enum tree_code
) va_arg (args
, int)))
9087 const char *prefix
= length
? " or " : "expected ";
9089 strcpy (tmp
+ length
, prefix
);
9090 length
+= strlen (prefix
);
9091 strcpy (tmp
+ length
, get_tree_code_name (code
));
9092 length
+= strlen (get_tree_code_name (code
));
9097 buffer
= "unexpected node";
9099 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9100 buffer
, get_tree_code_name (TREE_CODE (node
)),
9101 function
, trim_filename (file
), line
);
9104 /* Complain that the tree code of NODE does match the expected 0
9105 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9109 tree_not_check_failed (const_tree node
, const char *file
,
9110 int line
, const char *function
, ...)
9114 unsigned length
= 0;
9115 enum tree_code code
;
9117 va_start (args
, function
);
9118 while ((code
= (enum tree_code
) va_arg (args
, int)))
9119 length
+= 4 + strlen (get_tree_code_name (code
));
9121 va_start (args
, function
);
9122 buffer
= (char *) alloca (length
);
9124 while ((code
= (enum tree_code
) va_arg (args
, int)))
9128 strcpy (buffer
+ length
, " or ");
9131 strcpy (buffer
+ length
, get_tree_code_name (code
));
9132 length
+= strlen (get_tree_code_name (code
));
9136 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9137 buffer
, get_tree_code_name (TREE_CODE (node
)),
9138 function
, trim_filename (file
), line
);
9141 /* Similar to tree_check_failed, except that we check for a class of tree
9142 code, given in CL. */
9145 tree_class_check_failed (const_tree node
, const enum tree_code_class cl
,
9146 const char *file
, int line
, const char *function
)
9149 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9150 TREE_CODE_CLASS_STRING (cl
),
9151 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node
))),
9152 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
9155 /* Similar to tree_check_failed, except that instead of specifying a
9156 dozen codes, use the knowledge that they're all sequential. */
9159 tree_range_check_failed (const_tree node
, const char *file
, int line
,
9160 const char *function
, enum tree_code c1
,
9164 unsigned length
= 0;
9167 for (c
= c1
; c
<= c2
; ++c
)
9168 length
+= 4 + strlen (get_tree_code_name ((enum tree_code
) c
));
9170 length
+= strlen ("expected ");
9171 buffer
= (char *) alloca (length
);
9174 for (c
= c1
; c
<= c2
; ++c
)
9176 const char *prefix
= length
? " or " : "expected ";
9178 strcpy (buffer
+ length
, prefix
);
9179 length
+= strlen (prefix
);
9180 strcpy (buffer
+ length
, get_tree_code_name ((enum tree_code
) c
));
9181 length
+= strlen (get_tree_code_name ((enum tree_code
) c
));
9184 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9185 buffer
, get_tree_code_name (TREE_CODE (node
)),
9186 function
, trim_filename (file
), line
);
9190 /* Similar to tree_check_failed, except that we check that a tree does
9191 not have the specified code, given in CL. */
9194 tree_not_class_check_failed (const_tree node
, const enum tree_code_class cl
,
9195 const char *file
, int line
, const char *function
)
9198 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9199 TREE_CODE_CLASS_STRING (cl
),
9200 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node
))),
9201 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
9205 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9208 omp_clause_check_failed (const_tree node
, const char *file
, int line
,
9209 const char *function
, enum omp_clause_code code
)
9211 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9212 omp_clause_code_name
[code
], get_tree_code_name (TREE_CODE (node
)),
9213 function
, trim_filename (file
), line
);
9217 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9220 omp_clause_range_check_failed (const_tree node
, const char *file
, int line
,
9221 const char *function
, enum omp_clause_code c1
,
9222 enum omp_clause_code c2
)
9225 unsigned length
= 0;
9228 for (c
= c1
; c
<= c2
; ++c
)
9229 length
+= 4 + strlen (omp_clause_code_name
[c
]);
9231 length
+= strlen ("expected ");
9232 buffer
= (char *) alloca (length
);
9235 for (c
= c1
; c
<= c2
; ++c
)
9237 const char *prefix
= length
? " or " : "expected ";
9239 strcpy (buffer
+ length
, prefix
);
9240 length
+= strlen (prefix
);
9241 strcpy (buffer
+ length
, omp_clause_code_name
[c
]);
9242 length
+= strlen (omp_clause_code_name
[c
]);
9245 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9246 buffer
, omp_clause_code_name
[TREE_CODE (node
)],
9247 function
, trim_filename (file
), line
);
9251 #undef DEFTREESTRUCT
9252 #define DEFTREESTRUCT(VAL, NAME) NAME,
9254 static const char *ts_enum_names
[] = {
9255 #include "treestruct.def"
9257 #undef DEFTREESTRUCT
9259 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9261 /* Similar to tree_class_check_failed, except that we check for
9262 whether CODE contains the tree structure identified by EN. */
9265 tree_contains_struct_check_failed (const_tree node
,
9266 const enum tree_node_structure_enum en
,
9267 const char *file
, int line
,
9268 const char *function
)
9271 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9273 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
9277 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9278 (dynamically sized) vector. */
9281 tree_int_cst_elt_check_failed (int idx
, int len
, const char *file
, int line
,
9282 const char *function
)
9285 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9286 idx
+ 1, len
, function
, trim_filename (file
), line
);
9289 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9290 (dynamically sized) vector. */
9293 tree_vec_elt_check_failed (int idx
, int len
, const char *file
, int line
,
9294 const char *function
)
9297 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9298 idx
+ 1, len
, function
, trim_filename (file
), line
);
9301 /* Similar to above, except that the check is for the bounds of the operand
9302 vector of an expression node EXP. */
9305 tree_operand_check_failed (int idx
, const_tree exp
, const char *file
,
9306 int line
, const char *function
)
9308 enum tree_code code
= TREE_CODE (exp
);
9310 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9311 idx
+ 1, get_tree_code_name (code
), TREE_OPERAND_LENGTH (exp
),
9312 function
, trim_filename (file
), line
);
9315 /* Similar to above, except that the check is for the number of
9316 operands of an OMP_CLAUSE node. */
9319 omp_clause_operand_check_failed (int idx
, const_tree t
, const char *file
,
9320 int line
, const char *function
)
9323 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9324 "in %s, at %s:%d", idx
+ 1, omp_clause_code_name
[OMP_CLAUSE_CODE (t
)],
9325 omp_clause_num_ops
[OMP_CLAUSE_CODE (t
)], function
,
9326 trim_filename (file
), line
);
9328 #endif /* ENABLE_TREE_CHECKING */
9330 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9331 and mapped to the machine mode MODE. Initialize its fields and build
9332 the information necessary for debugging output. */
9335 make_vector_type (tree innertype
, int nunits
, machine_mode mode
)
9338 tree mv_innertype
= TYPE_MAIN_VARIANT (innertype
);
9340 t
= make_node (VECTOR_TYPE
);
9341 TREE_TYPE (t
) = mv_innertype
;
9342 SET_TYPE_VECTOR_SUBPARTS (t
, nunits
);
9343 SET_TYPE_MODE (t
, mode
);
9345 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype
) || in_lto_p
)
9346 SET_TYPE_STRUCTURAL_EQUALITY (t
);
9347 else if ((TYPE_CANONICAL (mv_innertype
) != innertype
9348 || mode
!= VOIDmode
)
9349 && !VECTOR_BOOLEAN_TYPE_P (t
))
9351 = make_vector_type (TYPE_CANONICAL (mv_innertype
), nunits
, VOIDmode
);
9355 hashval_t hash
= type_hash_canon_hash (t
);
9356 t
= type_hash_canon (hash
, t
);
9358 /* We have built a main variant, based on the main variant of the
9359 inner type. Use it to build the variant we return. */
9360 if ((TYPE_ATTRIBUTES (innertype
) || TYPE_QUALS (innertype
))
9361 && TREE_TYPE (t
) != innertype
)
9362 return build_type_attribute_qual_variant (t
,
9363 TYPE_ATTRIBUTES (innertype
),
9364 TYPE_QUALS (innertype
));
9370 make_or_reuse_type (unsigned size
, int unsignedp
)
9374 if (size
== INT_TYPE_SIZE
)
9375 return unsignedp
? unsigned_type_node
: integer_type_node
;
9376 if (size
== CHAR_TYPE_SIZE
)
9377 return unsignedp
? unsigned_char_type_node
: signed_char_type_node
;
9378 if (size
== SHORT_TYPE_SIZE
)
9379 return unsignedp
? short_unsigned_type_node
: short_integer_type_node
;
9380 if (size
== LONG_TYPE_SIZE
)
9381 return unsignedp
? long_unsigned_type_node
: long_integer_type_node
;
9382 if (size
== LONG_LONG_TYPE_SIZE
)
9383 return (unsignedp
? long_long_unsigned_type_node
9384 : long_long_integer_type_node
);
9386 for (i
= 0; i
< NUM_INT_N_ENTS
; i
++)
9387 if (size
== int_n_data
[i
].bitsize
9388 && int_n_enabled_p
[i
])
9389 return (unsignedp
? int_n_trees
[i
].unsigned_type
9390 : int_n_trees
[i
].signed_type
);
9393 return make_unsigned_type (size
);
9395 return make_signed_type (size
);
9398 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9401 make_or_reuse_fract_type (unsigned size
, int unsignedp
, int satp
)
9405 if (size
== SHORT_FRACT_TYPE_SIZE
)
9406 return unsignedp
? sat_unsigned_short_fract_type_node
9407 : sat_short_fract_type_node
;
9408 if (size
== FRACT_TYPE_SIZE
)
9409 return unsignedp
? sat_unsigned_fract_type_node
: sat_fract_type_node
;
9410 if (size
== LONG_FRACT_TYPE_SIZE
)
9411 return unsignedp
? sat_unsigned_long_fract_type_node
9412 : sat_long_fract_type_node
;
9413 if (size
== LONG_LONG_FRACT_TYPE_SIZE
)
9414 return unsignedp
? sat_unsigned_long_long_fract_type_node
9415 : sat_long_long_fract_type_node
;
9419 if (size
== SHORT_FRACT_TYPE_SIZE
)
9420 return unsignedp
? unsigned_short_fract_type_node
9421 : short_fract_type_node
;
9422 if (size
== FRACT_TYPE_SIZE
)
9423 return unsignedp
? unsigned_fract_type_node
: fract_type_node
;
9424 if (size
== LONG_FRACT_TYPE_SIZE
)
9425 return unsignedp
? unsigned_long_fract_type_node
9426 : long_fract_type_node
;
9427 if (size
== LONG_LONG_FRACT_TYPE_SIZE
)
9428 return unsignedp
? unsigned_long_long_fract_type_node
9429 : long_long_fract_type_node
;
9432 return make_fract_type (size
, unsignedp
, satp
);
9435 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9438 make_or_reuse_accum_type (unsigned size
, int unsignedp
, int satp
)
9442 if (size
== SHORT_ACCUM_TYPE_SIZE
)
9443 return unsignedp
? sat_unsigned_short_accum_type_node
9444 : sat_short_accum_type_node
;
9445 if (size
== ACCUM_TYPE_SIZE
)
9446 return unsignedp
? sat_unsigned_accum_type_node
: sat_accum_type_node
;
9447 if (size
== LONG_ACCUM_TYPE_SIZE
)
9448 return unsignedp
? sat_unsigned_long_accum_type_node
9449 : sat_long_accum_type_node
;
9450 if (size
== LONG_LONG_ACCUM_TYPE_SIZE
)
9451 return unsignedp
? sat_unsigned_long_long_accum_type_node
9452 : sat_long_long_accum_type_node
;
9456 if (size
== SHORT_ACCUM_TYPE_SIZE
)
9457 return unsignedp
? unsigned_short_accum_type_node
9458 : short_accum_type_node
;
9459 if (size
== ACCUM_TYPE_SIZE
)
9460 return unsignedp
? unsigned_accum_type_node
: accum_type_node
;
9461 if (size
== LONG_ACCUM_TYPE_SIZE
)
9462 return unsignedp
? unsigned_long_accum_type_node
9463 : long_accum_type_node
;
9464 if (size
== LONG_LONG_ACCUM_TYPE_SIZE
)
9465 return unsignedp
? unsigned_long_long_accum_type_node
9466 : long_long_accum_type_node
;
9469 return make_accum_type (size
, unsignedp
, satp
);
9473 /* Create an atomic variant node for TYPE. This routine is called
9474 during initialization of data types to create the 5 basic atomic
9475 types. The generic build_variant_type function requires these to
9476 already be set up in order to function properly, so cannot be
9477 called from there. If ALIGN is non-zero, then ensure alignment is
9478 overridden to this value. */
9481 build_atomic_base (tree type
, unsigned int align
)
9485 /* Make sure its not already registered. */
9486 if ((t
= get_qualified_type (type
, TYPE_QUAL_ATOMIC
)))
9489 t
= build_variant_type_copy (type
);
9490 set_type_quals (t
, TYPE_QUAL_ATOMIC
);
9493 SET_TYPE_ALIGN (t
, align
);
9498 /* Information about the _FloatN and _FloatNx types. This must be in
9499 the same order as the corresponding TI_* enum values. */
9500 const floatn_type_info floatn_nx_types
[NUM_FLOATN_NX_TYPES
] =
9512 /* Create nodes for all integer types (and error_mark_node) using the sizes
9513 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
9516 build_common_tree_nodes (bool signed_char
)
9520 error_mark_node
= make_node (ERROR_MARK
);
9521 TREE_TYPE (error_mark_node
) = error_mark_node
;
9523 initialize_sizetypes ();
9525 /* Define both `signed char' and `unsigned char'. */
9526 signed_char_type_node
= make_signed_type (CHAR_TYPE_SIZE
);
9527 TYPE_STRING_FLAG (signed_char_type_node
) = 1;
9528 unsigned_char_type_node
= make_unsigned_type (CHAR_TYPE_SIZE
);
9529 TYPE_STRING_FLAG (unsigned_char_type_node
) = 1;
9531 /* Define `char', which is like either `signed char' or `unsigned char'
9532 but not the same as either. */
9535 ? make_signed_type (CHAR_TYPE_SIZE
)
9536 : make_unsigned_type (CHAR_TYPE_SIZE
));
9537 TYPE_STRING_FLAG (char_type_node
) = 1;
9539 short_integer_type_node
= make_signed_type (SHORT_TYPE_SIZE
);
9540 short_unsigned_type_node
= make_unsigned_type (SHORT_TYPE_SIZE
);
9541 integer_type_node
= make_signed_type (INT_TYPE_SIZE
);
9542 unsigned_type_node
= make_unsigned_type (INT_TYPE_SIZE
);
9543 long_integer_type_node
= make_signed_type (LONG_TYPE_SIZE
);
9544 long_unsigned_type_node
= make_unsigned_type (LONG_TYPE_SIZE
);
9545 long_long_integer_type_node
= make_signed_type (LONG_LONG_TYPE_SIZE
);
9546 long_long_unsigned_type_node
= make_unsigned_type (LONG_LONG_TYPE_SIZE
);
9548 for (i
= 0; i
< NUM_INT_N_ENTS
; i
++)
9550 int_n_trees
[i
].signed_type
= make_signed_type (int_n_data
[i
].bitsize
);
9551 int_n_trees
[i
].unsigned_type
= make_unsigned_type (int_n_data
[i
].bitsize
);
9552 TYPE_SIZE (int_n_trees
[i
].signed_type
) = bitsize_int (int_n_data
[i
].bitsize
);
9553 TYPE_SIZE (int_n_trees
[i
].unsigned_type
) = bitsize_int (int_n_data
[i
].bitsize
);
9555 if (int_n_data
[i
].bitsize
> LONG_LONG_TYPE_SIZE
9556 && int_n_enabled_p
[i
])
9558 integer_types
[itk_intN_0
+ i
* 2] = int_n_trees
[i
].signed_type
;
9559 integer_types
[itk_unsigned_intN_0
+ i
* 2] = int_n_trees
[i
].unsigned_type
;
9563 /* Define a boolean type. This type only represents boolean values but
9564 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9565 boolean_type_node
= make_unsigned_type (BOOL_TYPE_SIZE
);
9566 TREE_SET_CODE (boolean_type_node
, BOOLEAN_TYPE
);
9567 TYPE_PRECISION (boolean_type_node
) = 1;
9568 TYPE_MAX_VALUE (boolean_type_node
) = build_int_cst (boolean_type_node
, 1);
9570 /* Define what type to use for size_t. */
9571 if (strcmp (SIZE_TYPE
, "unsigned int") == 0)
9572 size_type_node
= unsigned_type_node
;
9573 else if (strcmp (SIZE_TYPE
, "long unsigned int") == 0)
9574 size_type_node
= long_unsigned_type_node
;
9575 else if (strcmp (SIZE_TYPE
, "long long unsigned int") == 0)
9576 size_type_node
= long_long_unsigned_type_node
;
9577 else if (strcmp (SIZE_TYPE
, "short unsigned int") == 0)
9578 size_type_node
= short_unsigned_type_node
;
9583 size_type_node
= NULL_TREE
;
9584 for (i
= 0; i
< NUM_INT_N_ENTS
; i
++)
9585 if (int_n_enabled_p
[i
])
9588 sprintf (name
, "__int%d unsigned", int_n_data
[i
].bitsize
);
9590 if (strcmp (name
, SIZE_TYPE
) == 0)
9592 size_type_node
= int_n_trees
[i
].unsigned_type
;
9595 if (size_type_node
== NULL_TREE
)
9599 /* Define what type to use for ptrdiff_t. */
9600 if (strcmp (PTRDIFF_TYPE
, "int") == 0)
9601 ptrdiff_type_node
= integer_type_node
;
9602 else if (strcmp (PTRDIFF_TYPE
, "long int") == 0)
9603 ptrdiff_type_node
= long_integer_type_node
;
9604 else if (strcmp (PTRDIFF_TYPE
, "long long int") == 0)
9605 ptrdiff_type_node
= long_long_integer_type_node
;
9606 else if (strcmp (PTRDIFF_TYPE
, "short int") == 0)
9607 ptrdiff_type_node
= short_integer_type_node
;
9610 ptrdiff_type_node
= NULL_TREE
;
9611 for (int i
= 0; i
< NUM_INT_N_ENTS
; i
++)
9612 if (int_n_enabled_p
[i
])
9615 sprintf (name
, "__int%d", int_n_data
[i
].bitsize
);
9616 if (strcmp (name
, PTRDIFF_TYPE
) == 0)
9617 ptrdiff_type_node
= int_n_trees
[i
].signed_type
;
9619 if (ptrdiff_type_node
== NULL_TREE
)
9623 /* Fill in the rest of the sized types. Reuse existing type nodes
9625 intQI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (QImode
), 0);
9626 intHI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (HImode
), 0);
9627 intSI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (SImode
), 0);
9628 intDI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (DImode
), 0);
9629 intTI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (TImode
), 0);
9631 unsigned_intQI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (QImode
), 1);
9632 unsigned_intHI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (HImode
), 1);
9633 unsigned_intSI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (SImode
), 1);
9634 unsigned_intDI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (DImode
), 1);
9635 unsigned_intTI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (TImode
), 1);
9637 /* Don't call build_qualified type for atomics. That routine does
9638 special processing for atomics, and until they are initialized
9639 it's better not to make that call.
9641 Check to see if there is a target override for atomic types. */
9643 atomicQI_type_node
= build_atomic_base (unsigned_intQI_type_node
,
9644 targetm
.atomic_align_for_mode (QImode
));
9645 atomicHI_type_node
= build_atomic_base (unsigned_intHI_type_node
,
9646 targetm
.atomic_align_for_mode (HImode
));
9647 atomicSI_type_node
= build_atomic_base (unsigned_intSI_type_node
,
9648 targetm
.atomic_align_for_mode (SImode
));
9649 atomicDI_type_node
= build_atomic_base (unsigned_intDI_type_node
,
9650 targetm
.atomic_align_for_mode (DImode
));
9651 atomicTI_type_node
= build_atomic_base (unsigned_intTI_type_node
,
9652 targetm
.atomic_align_for_mode (TImode
));
9654 access_public_node
= get_identifier ("public");
9655 access_protected_node
= get_identifier ("protected");
9656 access_private_node
= get_identifier ("private");
9658 /* Define these next since types below may used them. */
9659 integer_zero_node
= build_int_cst (integer_type_node
, 0);
9660 integer_one_node
= build_int_cst (integer_type_node
, 1);
9661 integer_three_node
= build_int_cst (integer_type_node
, 3);
9662 integer_minus_one_node
= build_int_cst (integer_type_node
, -1);
9664 size_zero_node
= size_int (0);
9665 size_one_node
= size_int (1);
9666 bitsize_zero_node
= bitsize_int (0);
9667 bitsize_one_node
= bitsize_int (1);
9668 bitsize_unit_node
= bitsize_int (BITS_PER_UNIT
);
9670 boolean_false_node
= TYPE_MIN_VALUE (boolean_type_node
);
9671 boolean_true_node
= TYPE_MAX_VALUE (boolean_type_node
);
9673 void_type_node
= make_node (VOID_TYPE
);
9674 layout_type (void_type_node
);
9676 pointer_bounds_type_node
= targetm
.chkp_bound_type ();
9678 /* We are not going to have real types in C with less than byte alignment,
9679 so we might as well not have any types that claim to have it. */
9680 SET_TYPE_ALIGN (void_type_node
, BITS_PER_UNIT
);
9681 TYPE_USER_ALIGN (void_type_node
) = 0;
9683 void_node
= make_node (VOID_CST
);
9684 TREE_TYPE (void_node
) = void_type_node
;
9686 null_pointer_node
= build_int_cst (build_pointer_type (void_type_node
), 0);
9687 layout_type (TREE_TYPE (null_pointer_node
));
9689 ptr_type_node
= build_pointer_type (void_type_node
);
9691 = build_pointer_type (build_type_variant (void_type_node
, 1, 0));
9692 for (unsigned i
= 0;
9693 i
< sizeof (builtin_structptr_types
) / sizeof (builtin_structptr_type
);
9695 builtin_structptr_types
[i
].node
= builtin_structptr_types
[i
].base
;
9697 pointer_sized_int_node
= build_nonstandard_integer_type (POINTER_SIZE
, 1);
9699 float_type_node
= make_node (REAL_TYPE
);
9700 TYPE_PRECISION (float_type_node
) = FLOAT_TYPE_SIZE
;
9701 layout_type (float_type_node
);
9703 double_type_node
= make_node (REAL_TYPE
);
9704 TYPE_PRECISION (double_type_node
) = DOUBLE_TYPE_SIZE
;
9705 layout_type (double_type_node
);
9707 long_double_type_node
= make_node (REAL_TYPE
);
9708 TYPE_PRECISION (long_double_type_node
) = LONG_DOUBLE_TYPE_SIZE
;
9709 layout_type (long_double_type_node
);
9711 for (i
= 0; i
< NUM_FLOATN_NX_TYPES
; i
++)
9713 int n
= floatn_nx_types
[i
].n
;
9714 bool extended
= floatn_nx_types
[i
].extended
;
9715 scalar_float_mode mode
;
9716 if (!targetm
.floatn_mode (n
, extended
).exists (&mode
))
9718 int precision
= GET_MODE_PRECISION (mode
);
9719 /* Work around the rs6000 KFmode having precision 113 not
9721 const struct real_format
*fmt
= REAL_MODE_FORMAT (mode
);
9722 gcc_assert (fmt
->b
== 2 && fmt
->emin
+ fmt
->emax
== 3);
9723 int min_precision
= fmt
->p
+ ceil_log2 (fmt
->emax
- fmt
->emin
);
9725 gcc_assert (min_precision
== n
);
9726 if (precision
< min_precision
)
9727 precision
= min_precision
;
9728 FLOATN_NX_TYPE_NODE (i
) = make_node (REAL_TYPE
);
9729 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i
)) = precision
;
9730 layout_type (FLOATN_NX_TYPE_NODE (i
));
9731 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i
), mode
);
9734 float_ptr_type_node
= build_pointer_type (float_type_node
);
9735 double_ptr_type_node
= build_pointer_type (double_type_node
);
9736 long_double_ptr_type_node
= build_pointer_type (long_double_type_node
);
9737 integer_ptr_type_node
= build_pointer_type (integer_type_node
);
9739 /* Fixed size integer types. */
9740 uint16_type_node
= make_or_reuse_type (16, 1);
9741 uint32_type_node
= make_or_reuse_type (32, 1);
9742 uint64_type_node
= make_or_reuse_type (64, 1);
9744 /* Decimal float types. */
9745 dfloat32_type_node
= make_node (REAL_TYPE
);
9746 TYPE_PRECISION (dfloat32_type_node
) = DECIMAL32_TYPE_SIZE
;
9747 SET_TYPE_MODE (dfloat32_type_node
, SDmode
);
9748 layout_type (dfloat32_type_node
);
9749 dfloat32_ptr_type_node
= build_pointer_type (dfloat32_type_node
);
9751 dfloat64_type_node
= make_node (REAL_TYPE
);
9752 TYPE_PRECISION (dfloat64_type_node
) = DECIMAL64_TYPE_SIZE
;
9753 SET_TYPE_MODE (dfloat64_type_node
, DDmode
);
9754 layout_type (dfloat64_type_node
);
9755 dfloat64_ptr_type_node
= build_pointer_type (dfloat64_type_node
);
9757 dfloat128_type_node
= make_node (REAL_TYPE
);
9758 TYPE_PRECISION (dfloat128_type_node
) = DECIMAL128_TYPE_SIZE
;
9759 SET_TYPE_MODE (dfloat128_type_node
, TDmode
);
9760 layout_type (dfloat128_type_node
);
9761 dfloat128_ptr_type_node
= build_pointer_type (dfloat128_type_node
);
9763 complex_integer_type_node
= build_complex_type (integer_type_node
, true);
9764 complex_float_type_node
= build_complex_type (float_type_node
, true);
9765 complex_double_type_node
= build_complex_type (double_type_node
, true);
9766 complex_long_double_type_node
= build_complex_type (long_double_type_node
,
9769 for (i
= 0; i
< NUM_FLOATN_NX_TYPES
; i
++)
9771 if (FLOATN_NX_TYPE_NODE (i
) != NULL_TREE
)
9772 COMPLEX_FLOATN_NX_TYPE_NODE (i
)
9773 = build_complex_type (FLOATN_NX_TYPE_NODE (i
));
9776 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9777 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9778 sat_ ## KIND ## _type_node = \
9779 make_sat_signed_ ## KIND ## _type (SIZE); \
9780 sat_unsigned_ ## KIND ## _type_node = \
9781 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9782 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9783 unsigned_ ## KIND ## _type_node = \
9784 make_unsigned_ ## KIND ## _type (SIZE);
9786 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9787 sat_ ## WIDTH ## KIND ## _type_node = \
9788 make_sat_signed_ ## KIND ## _type (SIZE); \
9789 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9790 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9791 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9792 unsigned_ ## WIDTH ## KIND ## _type_node = \
9793 make_unsigned_ ## KIND ## _type (SIZE);
9795 /* Make fixed-point type nodes based on four different widths. */
9796 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9797 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9798 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9799 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9800 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9802 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9803 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9804 NAME ## _type_node = \
9805 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9806 u ## NAME ## _type_node = \
9807 make_or_reuse_unsigned_ ## KIND ## _type \
9808 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9809 sat_ ## NAME ## _type_node = \
9810 make_or_reuse_sat_signed_ ## KIND ## _type \
9811 (GET_MODE_BITSIZE (MODE ## mode)); \
9812 sat_u ## NAME ## _type_node = \
9813 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9814 (GET_MODE_BITSIZE (U ## MODE ## mode));
9816 /* Fixed-point type and mode nodes. */
9817 MAKE_FIXED_TYPE_NODE_FAMILY (fract
, FRACT
)
9818 MAKE_FIXED_TYPE_NODE_FAMILY (accum
, ACCUM
)
9819 MAKE_FIXED_MODE_NODE (fract
, qq
, QQ
)
9820 MAKE_FIXED_MODE_NODE (fract
, hq
, HQ
)
9821 MAKE_FIXED_MODE_NODE (fract
, sq
, SQ
)
9822 MAKE_FIXED_MODE_NODE (fract
, dq
, DQ
)
9823 MAKE_FIXED_MODE_NODE (fract
, tq
, TQ
)
9824 MAKE_FIXED_MODE_NODE (accum
, ha
, HA
)
9825 MAKE_FIXED_MODE_NODE (accum
, sa
, SA
)
9826 MAKE_FIXED_MODE_NODE (accum
, da
, DA
)
9827 MAKE_FIXED_MODE_NODE (accum
, ta
, TA
)
9830 tree t
= targetm
.build_builtin_va_list ();
9832 /* Many back-ends define record types without setting TYPE_NAME.
9833 If we copied the record type here, we'd keep the original
9834 record type without a name. This breaks name mangling. So,
9835 don't copy record types and let c_common_nodes_and_builtins()
9836 declare the type to be __builtin_va_list. */
9837 if (TREE_CODE (t
) != RECORD_TYPE
)
9838 t
= build_variant_type_copy (t
);
9840 va_list_type_node
= t
;
9844 /* Modify DECL for given flags.
9845 TM_PURE attribute is set only on types, so the function will modify
9846 DECL's type when ECF_TM_PURE is used. */
9849 set_call_expr_flags (tree decl
, int flags
)
9851 if (flags
& ECF_NOTHROW
)
9852 TREE_NOTHROW (decl
) = 1;
9853 if (flags
& ECF_CONST
)
9854 TREE_READONLY (decl
) = 1;
9855 if (flags
& ECF_PURE
)
9856 DECL_PURE_P (decl
) = 1;
9857 if (flags
& ECF_LOOPING_CONST_OR_PURE
)
9858 DECL_LOOPING_CONST_OR_PURE_P (decl
) = 1;
9859 if (flags
& ECF_NOVOPS
)
9860 DECL_IS_NOVOPS (decl
) = 1;
9861 if (flags
& ECF_NORETURN
)
9862 TREE_THIS_VOLATILE (decl
) = 1;
9863 if (flags
& ECF_MALLOC
)
9864 DECL_IS_MALLOC (decl
) = 1;
9865 if (flags
& ECF_RETURNS_TWICE
)
9866 DECL_IS_RETURNS_TWICE (decl
) = 1;
9867 if (flags
& ECF_LEAF
)
9868 DECL_ATTRIBUTES (decl
) = tree_cons (get_identifier ("leaf"),
9869 NULL
, DECL_ATTRIBUTES (decl
));
9870 if (flags
& ECF_COLD
)
9871 DECL_ATTRIBUTES (decl
) = tree_cons (get_identifier ("cold"),
9872 NULL
, DECL_ATTRIBUTES (decl
));
9873 if (flags
& ECF_RET1
)
9874 DECL_ATTRIBUTES (decl
)
9875 = tree_cons (get_identifier ("fn spec"),
9876 build_tree_list (NULL_TREE
, build_string (1, "1")),
9877 DECL_ATTRIBUTES (decl
));
9878 if ((flags
& ECF_TM_PURE
) && flag_tm
)
9879 apply_tm_attr (decl
, get_identifier ("transaction_pure"));
9880 /* Looping const or pure is implied by noreturn.
9881 There is currently no way to declare looping const or looping pure alone. */
9882 gcc_assert (!(flags
& ECF_LOOPING_CONST_OR_PURE
)
9883 || ((flags
& ECF_NORETURN
) && (flags
& (ECF_CONST
| ECF_PURE
))));
9887 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9890 local_define_builtin (const char *name
, tree type
, enum built_in_function code
,
9891 const char *library_name
, int ecf_flags
)
9895 decl
= add_builtin_function (name
, type
, code
, BUILT_IN_NORMAL
,
9896 library_name
, NULL_TREE
);
9897 set_call_expr_flags (decl
, ecf_flags
);
9899 set_builtin_decl (code
, decl
, true);
9902 /* Call this function after instantiating all builtins that the language
9903 front end cares about. This will build the rest of the builtins
9904 and internal functions that are relied upon by the tree optimizers and
9908 build_common_builtin_nodes (void)
9913 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE
)
9914 || !builtin_decl_explicit_p (BUILT_IN_ABORT
))
9916 ftype
= build_function_type (void_type_node
, void_list_node
);
9917 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE
))
9918 local_define_builtin ("__builtin_unreachable", ftype
,
9919 BUILT_IN_UNREACHABLE
,
9920 "__builtin_unreachable",
9921 ECF_NOTHROW
| ECF_LEAF
| ECF_NORETURN
9922 | ECF_CONST
| ECF_COLD
);
9923 if (!builtin_decl_explicit_p (BUILT_IN_ABORT
))
9924 local_define_builtin ("__builtin_abort", ftype
, BUILT_IN_ABORT
,
9926 ECF_LEAF
| ECF_NORETURN
| ECF_CONST
| ECF_COLD
);
9929 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY
)
9930 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE
))
9932 ftype
= build_function_type_list (ptr_type_node
,
9933 ptr_type_node
, const_ptr_type_node
,
9934 size_type_node
, NULL_TREE
);
9936 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY
))
9937 local_define_builtin ("__builtin_memcpy", ftype
, BUILT_IN_MEMCPY
,
9938 "memcpy", ECF_NOTHROW
| ECF_LEAF
| ECF_RET1
);
9939 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE
))
9940 local_define_builtin ("__builtin_memmove", ftype
, BUILT_IN_MEMMOVE
,
9941 "memmove", ECF_NOTHROW
| ECF_LEAF
| ECF_RET1
);
9944 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP
))
9946 ftype
= build_function_type_list (integer_type_node
, const_ptr_type_node
,
9947 const_ptr_type_node
, size_type_node
,
9949 local_define_builtin ("__builtin_memcmp", ftype
, BUILT_IN_MEMCMP
,
9950 "memcmp", ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9953 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET
))
9955 ftype
= build_function_type_list (ptr_type_node
,
9956 ptr_type_node
, integer_type_node
,
9957 size_type_node
, NULL_TREE
);
9958 local_define_builtin ("__builtin_memset", ftype
, BUILT_IN_MEMSET
,
9959 "memset", ECF_NOTHROW
| ECF_LEAF
| ECF_RET1
);
9962 /* If we're checking the stack, `alloca' can throw. */
9963 const int alloca_flags
9964 = ECF_MALLOC
| ECF_LEAF
| (flag_stack_check
? 0 : ECF_NOTHROW
);
9966 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA
))
9968 ftype
= build_function_type_list (ptr_type_node
,
9969 size_type_node
, NULL_TREE
);
9970 local_define_builtin ("__builtin_alloca", ftype
, BUILT_IN_ALLOCA
,
9971 "alloca", alloca_flags
);
9974 ftype
= build_function_type_list (ptr_type_node
, size_type_node
,
9975 size_type_node
, NULL_TREE
);
9976 local_define_builtin ("__builtin_alloca_with_align", ftype
,
9977 BUILT_IN_ALLOCA_WITH_ALIGN
,
9978 "__builtin_alloca_with_align",
9981 ftype
= build_function_type_list (ptr_type_node
, size_type_node
,
9982 size_type_node
, size_type_node
, NULL_TREE
);
9983 local_define_builtin ("__builtin_alloca_with_align_and_max", ftype
,
9984 BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
,
9985 "__builtin_alloca_with_align_and_max",
9988 ftype
= build_function_type_list (void_type_node
,
9989 ptr_type_node
, ptr_type_node
,
9990 ptr_type_node
, NULL_TREE
);
9991 local_define_builtin ("__builtin_init_trampoline", ftype
,
9992 BUILT_IN_INIT_TRAMPOLINE
,
9993 "__builtin_init_trampoline", ECF_NOTHROW
| ECF_LEAF
);
9994 local_define_builtin ("__builtin_init_heap_trampoline", ftype
,
9995 BUILT_IN_INIT_HEAP_TRAMPOLINE
,
9996 "__builtin_init_heap_trampoline",
9997 ECF_NOTHROW
| ECF_LEAF
);
9998 local_define_builtin ("__builtin_init_descriptor", ftype
,
9999 BUILT_IN_INIT_DESCRIPTOR
,
10000 "__builtin_init_descriptor", ECF_NOTHROW
| ECF_LEAF
);
10002 ftype
= build_function_type_list (ptr_type_node
, ptr_type_node
, NULL_TREE
);
10003 local_define_builtin ("__builtin_adjust_trampoline", ftype
,
10004 BUILT_IN_ADJUST_TRAMPOLINE
,
10005 "__builtin_adjust_trampoline",
10006 ECF_CONST
| ECF_NOTHROW
);
10007 local_define_builtin ("__builtin_adjust_descriptor", ftype
,
10008 BUILT_IN_ADJUST_DESCRIPTOR
,
10009 "__builtin_adjust_descriptor",
10010 ECF_CONST
| ECF_NOTHROW
);
10012 ftype
= build_function_type_list (void_type_node
,
10013 ptr_type_node
, ptr_type_node
, NULL_TREE
);
10014 local_define_builtin ("__builtin_nonlocal_goto", ftype
,
10015 BUILT_IN_NONLOCAL_GOTO
,
10016 "__builtin_nonlocal_goto",
10017 ECF_NORETURN
| ECF_NOTHROW
);
10019 ftype
= build_function_type_list (void_type_node
,
10020 ptr_type_node
, ptr_type_node
, NULL_TREE
);
10021 local_define_builtin ("__builtin_setjmp_setup", ftype
,
10022 BUILT_IN_SETJMP_SETUP
,
10023 "__builtin_setjmp_setup", ECF_NOTHROW
);
10025 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
10026 local_define_builtin ("__builtin_setjmp_receiver", ftype
,
10027 BUILT_IN_SETJMP_RECEIVER
,
10028 "__builtin_setjmp_receiver", ECF_NOTHROW
| ECF_LEAF
);
10030 ftype
= build_function_type_list (ptr_type_node
, NULL_TREE
);
10031 local_define_builtin ("__builtin_stack_save", ftype
, BUILT_IN_STACK_SAVE
,
10032 "__builtin_stack_save", ECF_NOTHROW
| ECF_LEAF
);
10034 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
10035 local_define_builtin ("__builtin_stack_restore", ftype
,
10036 BUILT_IN_STACK_RESTORE
,
10037 "__builtin_stack_restore", ECF_NOTHROW
| ECF_LEAF
);
10039 ftype
= build_function_type_list (integer_type_node
, const_ptr_type_node
,
10040 const_ptr_type_node
, size_type_node
,
10042 local_define_builtin ("__builtin_memcmp_eq", ftype
, BUILT_IN_MEMCMP_EQ
,
10043 "__builtin_memcmp_eq",
10044 ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
10046 /* If there's a possibility that we might use the ARM EABI, build the
10047 alternate __cxa_end_cleanup node used to resume from C++. */
10048 if (targetm
.arm_eabi_unwinder
)
10050 ftype
= build_function_type_list (void_type_node
, NULL_TREE
);
10051 local_define_builtin ("__builtin_cxa_end_cleanup", ftype
,
10052 BUILT_IN_CXA_END_CLEANUP
,
10053 "__cxa_end_cleanup", ECF_NORETURN
| ECF_LEAF
);
10056 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
10057 local_define_builtin ("__builtin_unwind_resume", ftype
,
10058 BUILT_IN_UNWIND_RESUME
,
10059 ((targetm_common
.except_unwind_info (&global_options
)
10061 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10064 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS
) == NULL_TREE
)
10066 ftype
= build_function_type_list (ptr_type_node
, integer_type_node
,
10068 local_define_builtin ("__builtin_return_address", ftype
,
10069 BUILT_IN_RETURN_ADDRESS
,
10070 "__builtin_return_address",
10074 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER
)
10075 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT
))
10077 ftype
= build_function_type_list (void_type_node
, ptr_type_node
,
10078 ptr_type_node
, NULL_TREE
);
10079 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER
))
10080 local_define_builtin ("__cyg_profile_func_enter", ftype
,
10081 BUILT_IN_PROFILE_FUNC_ENTER
,
10082 "__cyg_profile_func_enter", 0);
10083 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT
))
10084 local_define_builtin ("__cyg_profile_func_exit", ftype
,
10085 BUILT_IN_PROFILE_FUNC_EXIT
,
10086 "__cyg_profile_func_exit", 0);
10089 /* The exception object and filter values from the runtime. The argument
10090 must be zero before exception lowering, i.e. from the front end. After
10091 exception lowering, it will be the region number for the exception
10092 landing pad. These functions are PURE instead of CONST to prevent
10093 them from being hoisted past the exception edge that will initialize
10094 its value in the landing pad. */
10095 ftype
= build_function_type_list (ptr_type_node
,
10096 integer_type_node
, NULL_TREE
);
10097 ecf_flags
= ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
;
10098 /* Only use TM_PURE if we have TM language support. */
10099 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1
))
10100 ecf_flags
|= ECF_TM_PURE
;
10101 local_define_builtin ("__builtin_eh_pointer", ftype
, BUILT_IN_EH_POINTER
,
10102 "__builtin_eh_pointer", ecf_flags
);
10104 tmp
= lang_hooks
.types
.type_for_mode (targetm
.eh_return_filter_mode (), 0);
10105 ftype
= build_function_type_list (tmp
, integer_type_node
, NULL_TREE
);
10106 local_define_builtin ("__builtin_eh_filter", ftype
, BUILT_IN_EH_FILTER
,
10107 "__builtin_eh_filter", ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
10109 ftype
= build_function_type_list (void_type_node
,
10110 integer_type_node
, integer_type_node
,
10112 local_define_builtin ("__builtin_eh_copy_values", ftype
,
10113 BUILT_IN_EH_COPY_VALUES
,
10114 "__builtin_eh_copy_values", ECF_NOTHROW
);
10116 /* Complex multiplication and division. These are handled as builtins
10117 rather than optabs because emit_library_call_value doesn't support
10118 complex. Further, we can do slightly better with folding these
10119 beasties if the real and complex parts of the arguments are separate. */
10123 for (mode
= MIN_MODE_COMPLEX_FLOAT
; mode
<= MAX_MODE_COMPLEX_FLOAT
; ++mode
)
10125 char mode_name_buf
[4], *q
;
10127 enum built_in_function mcode
, dcode
;
10128 tree type
, inner_type
;
10129 const char *prefix
= "__";
10131 if (targetm
.libfunc_gnu_prefix
)
10134 type
= lang_hooks
.types
.type_for_mode ((machine_mode
) mode
, 0);
10137 inner_type
= TREE_TYPE (type
);
10139 ftype
= build_function_type_list (type
, inner_type
, inner_type
,
10140 inner_type
, inner_type
, NULL_TREE
);
10142 mcode
= ((enum built_in_function
)
10143 (BUILT_IN_COMPLEX_MUL_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
10144 dcode
= ((enum built_in_function
)
10145 (BUILT_IN_COMPLEX_DIV_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
10147 for (p
= GET_MODE_NAME (mode
), q
= mode_name_buf
; *p
; p
++, q
++)
10151 built_in_names
[mcode
] = concat (prefix
, "mul", mode_name_buf
, "3",
10153 local_define_builtin (built_in_names
[mcode
], ftype
, mcode
,
10154 built_in_names
[mcode
],
10155 ECF_CONST
| ECF_NOTHROW
| ECF_LEAF
);
10157 built_in_names
[dcode
] = concat (prefix
, "div", mode_name_buf
, "3",
10159 local_define_builtin (built_in_names
[dcode
], ftype
, dcode
,
10160 built_in_names
[dcode
],
10161 ECF_CONST
| ECF_NOTHROW
| ECF_LEAF
);
10165 init_internal_fns ();
10168 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10171 If we requested a pointer to a vector, build up the pointers that
10172 we stripped off while looking for the inner type. Similarly for
10173 return values from functions.
10175 The argument TYPE is the top of the chain, and BOTTOM is the
10176 new type which we will point to. */
10179 reconstruct_complex_type (tree type
, tree bottom
)
10183 if (TREE_CODE (type
) == POINTER_TYPE
)
10185 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10186 outer
= build_pointer_type_for_mode (inner
, TYPE_MODE (type
),
10187 TYPE_REF_CAN_ALIAS_ALL (type
));
10189 else if (TREE_CODE (type
) == REFERENCE_TYPE
)
10191 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10192 outer
= build_reference_type_for_mode (inner
, TYPE_MODE (type
),
10193 TYPE_REF_CAN_ALIAS_ALL (type
));
10195 else if (TREE_CODE (type
) == ARRAY_TYPE
)
10197 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10198 outer
= build_array_type (inner
, TYPE_DOMAIN (type
));
10200 else if (TREE_CODE (type
) == FUNCTION_TYPE
)
10202 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10203 outer
= build_function_type (inner
, TYPE_ARG_TYPES (type
));
10205 else if (TREE_CODE (type
) == METHOD_TYPE
)
10207 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10208 /* The build_method_type_directly() routine prepends 'this' to argument list,
10209 so we must compensate by getting rid of it. */
10211 = build_method_type_directly
10212 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type
))),
10214 TREE_CHAIN (TYPE_ARG_TYPES (type
)));
10216 else if (TREE_CODE (type
) == OFFSET_TYPE
)
10218 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10219 outer
= build_offset_type (TYPE_OFFSET_BASETYPE (type
), inner
);
10224 return build_type_attribute_qual_variant (outer
, TYPE_ATTRIBUTES (type
),
10225 TYPE_QUALS (type
));
10228 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10231 build_vector_type_for_mode (tree innertype
, machine_mode mode
)
10234 unsigned int bitsize
;
10236 switch (GET_MODE_CLASS (mode
))
10238 case MODE_VECTOR_INT
:
10239 case MODE_VECTOR_FLOAT
:
10240 case MODE_VECTOR_FRACT
:
10241 case MODE_VECTOR_UFRACT
:
10242 case MODE_VECTOR_ACCUM
:
10243 case MODE_VECTOR_UACCUM
:
10244 nunits
= GET_MODE_NUNITS (mode
);
10248 /* Check that there are no leftover bits. */
10249 bitsize
= GET_MODE_BITSIZE (as_a
<scalar_int_mode
> (mode
));
10250 gcc_assert (bitsize
% TREE_INT_CST_LOW (TYPE_SIZE (innertype
)) == 0);
10251 nunits
= bitsize
/ TREE_INT_CST_LOW (TYPE_SIZE (innertype
));
10255 gcc_unreachable ();
10258 return make_vector_type (innertype
, nunits
, mode
);
10261 /* Similarly, but takes the inner type and number of units, which must be
10265 build_vector_type (tree innertype
, int nunits
)
10267 return make_vector_type (innertype
, nunits
, VOIDmode
);
10270 /* Build truth vector with specified length and number of units. */
10273 build_truth_vector_type (unsigned nunits
, unsigned vector_size
)
10275 machine_mode mask_mode
10276 = targetm
.vectorize
.get_mask_mode (nunits
, vector_size
).else_blk ();
10278 unsigned HOST_WIDE_INT vsize
;
10279 if (mask_mode
== BLKmode
)
10280 vsize
= vector_size
* BITS_PER_UNIT
;
10282 vsize
= GET_MODE_BITSIZE (mask_mode
);
10284 unsigned HOST_WIDE_INT esize
= vsize
/ nunits
;
10285 gcc_assert (esize
* nunits
== vsize
);
10287 tree bool_type
= build_nonstandard_boolean_type (esize
);
10289 return make_vector_type (bool_type
, nunits
, mask_mode
);
10292 /* Returns a vector type corresponding to a comparison of VECTYPE. */
10295 build_same_sized_truth_vector_type (tree vectype
)
10297 if (VECTOR_BOOLEAN_TYPE_P (vectype
))
10300 unsigned HOST_WIDE_INT size
= GET_MODE_SIZE (TYPE_MODE (vectype
));
10303 size
= tree_to_uhwi (TYPE_SIZE_UNIT (vectype
));
10305 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (vectype
), size
);
10308 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10311 build_opaque_vector_type (tree innertype
, int nunits
)
10313 tree t
= make_vector_type (innertype
, nunits
, VOIDmode
);
10315 /* We always build the non-opaque variant before the opaque one,
10316 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10317 cand
= TYPE_NEXT_VARIANT (t
);
10319 && TYPE_VECTOR_OPAQUE (cand
)
10320 && check_qualified_type (cand
, t
, TYPE_QUALS (t
)))
10322 /* Othewise build a variant type and make sure to queue it after
10323 the non-opaque type. */
10324 cand
= build_distinct_type_copy (t
);
10325 TYPE_VECTOR_OPAQUE (cand
) = true;
10326 TYPE_CANONICAL (cand
) = TYPE_CANONICAL (t
);
10327 TYPE_NEXT_VARIANT (cand
) = TYPE_NEXT_VARIANT (t
);
10328 TYPE_NEXT_VARIANT (t
) = cand
;
10329 TYPE_MAIN_VARIANT (cand
) = TYPE_MAIN_VARIANT (t
);
10333 /* Return the value of element I of VECTOR_CST T as a wide_int. */
10336 vector_cst_int_elt (const_tree t
, unsigned int i
)
10338 /* First handle elements that are directly encoded. */
10339 unsigned int encoded_nelts
= vector_cst_encoded_nelts (t
);
10340 if (i
< encoded_nelts
)
10341 return wi::to_wide (VECTOR_CST_ENCODED_ELT (t
, i
));
10343 /* Identify the pattern that contains element I and work out the index of
10344 the last encoded element for that pattern. */
10345 unsigned int npatterns
= VECTOR_CST_NPATTERNS (t
);
10346 unsigned int pattern
= i
% npatterns
;
10347 unsigned int count
= i
/ npatterns
;
10348 unsigned int final_i
= encoded_nelts
- npatterns
+ pattern
;
10350 /* If there are no steps, the final encoded value is the right one. */
10351 if (!VECTOR_CST_STEPPED_P (t
))
10352 return wi::to_wide (VECTOR_CST_ENCODED_ELT (t
, final_i
));
10354 /* Otherwise work out the value from the last two encoded elements. */
10355 tree v1
= VECTOR_CST_ENCODED_ELT (t
, final_i
- npatterns
);
10356 tree v2
= VECTOR_CST_ENCODED_ELT (t
, final_i
);
10357 wide_int diff
= wi::to_wide (v2
) - wi::to_wide (v1
);
10358 return wi::to_wide (v2
) + (count
- 2) * diff
;
10361 /* Return the value of element I of VECTOR_CST T. */
10364 vector_cst_elt (const_tree t
, unsigned int i
)
10366 /* First handle elements that are directly encoded. */
10367 unsigned int encoded_nelts
= vector_cst_encoded_nelts (t
);
10368 if (i
< encoded_nelts
)
10369 return VECTOR_CST_ENCODED_ELT (t
, i
);
10371 /* If there are no steps, the final encoded value is the right one. */
10372 if (!VECTOR_CST_STEPPED_P (t
))
10374 /* Identify the pattern that contains element I and work out the index of
10375 the last encoded element for that pattern. */
10376 unsigned int npatterns
= VECTOR_CST_NPATTERNS (t
);
10377 unsigned int pattern
= i
% npatterns
;
10378 unsigned int final_i
= encoded_nelts
- npatterns
+ pattern
;
10379 return VECTOR_CST_ENCODED_ELT (t
, final_i
);
10382 /* Otherwise work out the value from the last two encoded elements. */
10383 return wide_int_to_tree (TREE_TYPE (TREE_TYPE (t
)),
10384 vector_cst_int_elt (t
, i
));
10387 /* Given an initializer INIT, return TRUE if INIT is zero or some
10388 aggregate of zeros. Otherwise return FALSE. */
10390 initializer_zerop (const_tree init
)
10396 switch (TREE_CODE (init
))
10399 return integer_zerop (init
);
10402 /* ??? Note that this is not correct for C4X float formats. There,
10403 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10404 negative exponent. */
10405 return real_zerop (init
)
10406 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init
));
10409 return fixed_zerop (init
);
10412 return integer_zerop (init
)
10413 || (real_zerop (init
)
10414 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init
)))
10415 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init
))));
10418 return (VECTOR_CST_NPATTERNS (init
) == 1
10419 && VECTOR_CST_DUPLICATE_P (init
)
10420 && initializer_zerop (VECTOR_CST_ENCODED_ELT (init
, 0)));
10424 unsigned HOST_WIDE_INT idx
;
10426 if (TREE_CLOBBER_P (init
))
10428 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init
), idx
, elt
)
10429 if (!initializer_zerop (elt
))
10438 /* We need to loop through all elements to handle cases like
10439 "\0" and "\0foobar". */
10440 for (i
= 0; i
< TREE_STRING_LENGTH (init
); ++i
)
10441 if (TREE_STRING_POINTER (init
)[i
] != '\0')
10452 /* Check if vector VEC consists of all the equal elements and
10453 that the number of elements corresponds to the type of VEC.
10454 The function returns first element of the vector
10455 or NULL_TREE if the vector is not uniform. */
10457 uniform_vector_p (const_tree vec
)
10462 if (vec
== NULL_TREE
)
10465 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec
)));
10467 if (TREE_CODE (vec
) == VECTOR_CST
)
10469 if (VECTOR_CST_NPATTERNS (vec
) == 1 && VECTOR_CST_DUPLICATE_P (vec
))
10470 return VECTOR_CST_ENCODED_ELT (vec
, 0);
10474 else if (TREE_CODE (vec
) == CONSTRUCTOR
)
10476 first
= error_mark_node
;
10478 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec
), i
, t
)
10485 if (!operand_equal_p (first
, t
, 0))
10488 if (i
!= TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec
)))
10497 /* Build an empty statement at location LOC. */
10500 build_empty_stmt (location_t loc
)
10502 tree t
= build1 (NOP_EXPR
, void_type_node
, size_zero_node
);
10503 SET_EXPR_LOCATION (t
, loc
);
10508 /* Build an OpenMP clause with code CODE. LOC is the location of the
10512 build_omp_clause (location_t loc
, enum omp_clause_code code
)
10517 length
= omp_clause_num_ops
[code
];
10518 size
= (sizeof (struct tree_omp_clause
) + (length
- 1) * sizeof (tree
));
10520 record_node_allocation_statistics (OMP_CLAUSE
, size
);
10522 t
= (tree
) ggc_internal_alloc (size
);
10523 memset (t
, 0, size
);
10524 TREE_SET_CODE (t
, OMP_CLAUSE
);
10525 OMP_CLAUSE_SET_CODE (t
, code
);
10526 OMP_CLAUSE_LOCATION (t
) = loc
;
10531 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10532 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10533 Except for the CODE and operand count field, other storage for the
10534 object is initialized to zeros. */
10537 build_vl_exp (enum tree_code code
, int len MEM_STAT_DECL
)
10540 int length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_exp
);
10542 gcc_assert (TREE_CODE_CLASS (code
) == tcc_vl_exp
);
10543 gcc_assert (len
>= 1);
10545 record_node_allocation_statistics (code
, length
);
10547 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
10549 TREE_SET_CODE (t
, code
);
10551 /* Can't use TREE_OPERAND to store the length because if checking is
10552 enabled, it will try to check the length before we store it. :-P */
10553 t
->exp
.operands
[0] = build_int_cst (sizetype
, len
);
10558 /* Helper function for build_call_* functions; build a CALL_EXPR with
10559 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10560 the argument slots. */
10563 build_call_1 (tree return_type
, tree fn
, int nargs
)
10567 t
= build_vl_exp (CALL_EXPR
, nargs
+ 3);
10568 TREE_TYPE (t
) = return_type
;
10569 CALL_EXPR_FN (t
) = fn
;
10570 CALL_EXPR_STATIC_CHAIN (t
) = NULL
;
10575 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10576 FN and a null static chain slot. NARGS is the number of call arguments
10577 which are specified as "..." arguments. */
10580 build_call_nary (tree return_type
, tree fn
, int nargs
, ...)
10584 va_start (args
, nargs
);
10585 ret
= build_call_valist (return_type
, fn
, nargs
, args
);
10590 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10591 FN and a null static chain slot. NARGS is the number of call arguments
10592 which are specified as a va_list ARGS. */
10595 build_call_valist (tree return_type
, tree fn
, int nargs
, va_list args
)
10600 t
= build_call_1 (return_type
, fn
, nargs
);
10601 for (i
= 0; i
< nargs
; i
++)
10602 CALL_EXPR_ARG (t
, i
) = va_arg (args
, tree
);
10603 process_call_operands (t
);
10607 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10608 FN and a null static chain slot. NARGS is the number of call arguments
10609 which are specified as a tree array ARGS. */
10612 build_call_array_loc (location_t loc
, tree return_type
, tree fn
,
10613 int nargs
, const tree
*args
)
10618 t
= build_call_1 (return_type
, fn
, nargs
);
10619 for (i
= 0; i
< nargs
; i
++)
10620 CALL_EXPR_ARG (t
, i
) = args
[i
];
10621 process_call_operands (t
);
10622 SET_EXPR_LOCATION (t
, loc
);
10626 /* Like build_call_array, but takes a vec. */
10629 build_call_vec (tree return_type
, tree fn
, vec
<tree
, va_gc
> *args
)
10634 ret
= build_call_1 (return_type
, fn
, vec_safe_length (args
));
10635 FOR_EACH_VEC_SAFE_ELT (args
, ix
, t
)
10636 CALL_EXPR_ARG (ret
, ix
) = t
;
10637 process_call_operands (ret
);
10641 /* Conveniently construct a function call expression. FNDECL names the
10642 function to be called and N arguments are passed in the array
10646 build_call_expr_loc_array (location_t loc
, tree fndecl
, int n
, tree
*argarray
)
10648 tree fntype
= TREE_TYPE (fndecl
);
10649 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10651 return fold_build_call_array_loc (loc
, TREE_TYPE (fntype
), fn
, n
, argarray
);
10654 /* Conveniently construct a function call expression. FNDECL names the
10655 function to be called and the arguments are passed in the vector
10659 build_call_expr_loc_vec (location_t loc
, tree fndecl
, vec
<tree
, va_gc
> *vec
)
10661 return build_call_expr_loc_array (loc
, fndecl
, vec_safe_length (vec
),
10662 vec_safe_address (vec
));
10666 /* Conveniently construct a function call expression. FNDECL names the
10667 function to be called, N is the number of arguments, and the "..."
10668 parameters are the argument expressions. */
10671 build_call_expr_loc (location_t loc
, tree fndecl
, int n
, ...)
10674 tree
*argarray
= XALLOCAVEC (tree
, n
);
10678 for (i
= 0; i
< n
; i
++)
10679 argarray
[i
] = va_arg (ap
, tree
);
10681 return build_call_expr_loc_array (loc
, fndecl
, n
, argarray
);
10684 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10685 varargs macros aren't supported by all bootstrap compilers. */
10688 build_call_expr (tree fndecl
, int n
, ...)
10691 tree
*argarray
= XALLOCAVEC (tree
, n
);
10695 for (i
= 0; i
< n
; i
++)
10696 argarray
[i
] = va_arg (ap
, tree
);
10698 return build_call_expr_loc_array (UNKNOWN_LOCATION
, fndecl
, n
, argarray
);
10701 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
10702 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
10703 It will get gimplified later into an ordinary internal function. */
10706 build_call_expr_internal_loc_array (location_t loc
, internal_fn ifn
,
10707 tree type
, int n
, const tree
*args
)
10709 tree t
= build_call_1 (type
, NULL_TREE
, n
);
10710 for (int i
= 0; i
< n
; ++i
)
10711 CALL_EXPR_ARG (t
, i
) = args
[i
];
10712 SET_EXPR_LOCATION (t
, loc
);
10713 CALL_EXPR_IFN (t
) = ifn
;
10717 /* Build internal call expression. This is just like CALL_EXPR, except
10718 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10719 internal function. */
10722 build_call_expr_internal_loc (location_t loc
, enum internal_fn ifn
,
10723 tree type
, int n
, ...)
10726 tree
*argarray
= XALLOCAVEC (tree
, n
);
10730 for (i
= 0; i
< n
; i
++)
10731 argarray
[i
] = va_arg (ap
, tree
);
10733 return build_call_expr_internal_loc_array (loc
, ifn
, type
, n
, argarray
);
10736 /* Return a function call to FN, if the target is guaranteed to support it,
10739 N is the number of arguments, passed in the "...", and TYPE is the
10740 type of the return value. */
10743 maybe_build_call_expr_loc (location_t loc
, combined_fn fn
, tree type
,
10747 tree
*argarray
= XALLOCAVEC (tree
, n
);
10751 for (i
= 0; i
< n
; i
++)
10752 argarray
[i
] = va_arg (ap
, tree
);
10754 if (internal_fn_p (fn
))
10756 internal_fn ifn
= as_internal_fn (fn
);
10757 if (direct_internal_fn_p (ifn
))
10759 tree_pair types
= direct_internal_fn_types (ifn
, type
, argarray
);
10760 if (!direct_internal_fn_supported_p (ifn
, types
,
10761 OPTIMIZE_FOR_BOTH
))
10764 return build_call_expr_internal_loc_array (loc
, ifn
, type
, n
, argarray
);
10768 tree fndecl
= builtin_decl_implicit (as_builtin_fn (fn
));
10771 return build_call_expr_loc_array (loc
, fndecl
, n
, argarray
);
10775 /* Return a function call to the appropriate builtin alloca variant.
10777 SIZE is the size to be allocated. ALIGN, if non-zero, is the requested
10778 alignment of the allocated area. MAX_SIZE, if non-negative, is an upper
10779 bound for SIZE in case it is not a fixed value. */
10782 build_alloca_call_expr (tree size
, unsigned int align
, HOST_WIDE_INT max_size
)
10786 tree t
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
);
10788 build_call_expr (t
, 3, size
, size_int (align
), size_int (max_size
));
10790 else if (align
> 0)
10792 tree t
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
10793 return build_call_expr (t
, 2, size
, size_int (align
));
10797 tree t
= builtin_decl_explicit (BUILT_IN_ALLOCA
);
10798 return build_call_expr (t
, 1, size
);
10802 /* Create a new constant string literal and return a char* pointer to it.
10803 The STRING_CST value is the LEN characters at STR. */
10805 build_string_literal (int len
, const char *str
)
10807 tree t
, elem
, index
, type
;
10809 t
= build_string (len
, str
);
10810 elem
= build_type_variant (char_type_node
, 1, 0);
10811 index
= build_index_type (size_int (len
- 1));
10812 type
= build_array_type (elem
, index
);
10813 TREE_TYPE (t
) = type
;
10814 TREE_CONSTANT (t
) = 1;
10815 TREE_READONLY (t
) = 1;
10816 TREE_STATIC (t
) = 1;
10818 type
= build_pointer_type (elem
);
10819 t
= build1 (ADDR_EXPR
, type
,
10820 build4 (ARRAY_REF
, elem
,
10821 t
, integer_zero_node
, NULL_TREE
, NULL_TREE
));
10827 /* Return true if T (assumed to be a DECL) must be assigned a memory
10831 needs_to_live_in_memory (const_tree t
)
10833 return (TREE_ADDRESSABLE (t
)
10834 || is_global_var (t
)
10835 || (TREE_CODE (t
) == RESULT_DECL
10836 && !DECL_BY_REFERENCE (t
)
10837 && aggregate_value_p (t
, current_function_decl
)));
10840 /* Return value of a constant X and sign-extend it. */
10843 int_cst_value (const_tree x
)
10845 unsigned bits
= TYPE_PRECISION (TREE_TYPE (x
));
10846 unsigned HOST_WIDE_INT val
= TREE_INT_CST_LOW (x
);
10848 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10849 gcc_assert (cst_and_fits_in_hwi (x
));
10851 if (bits
< HOST_BITS_PER_WIDE_INT
)
10853 bool negative
= ((val
>> (bits
- 1)) & 1) != 0;
10855 val
|= HOST_WIDE_INT_M1U
<< (bits
- 1) << 1;
10857 val
&= ~(HOST_WIDE_INT_M1U
<< (bits
- 1) << 1);
10863 /* If TYPE is an integral or pointer type, return an integer type with
10864 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10865 if TYPE is already an integer type of signedness UNSIGNEDP. */
10868 signed_or_unsigned_type_for (int unsignedp
, tree type
)
10870 if (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_UNSIGNED (type
) == unsignedp
)
10873 if (TREE_CODE (type
) == VECTOR_TYPE
)
10875 tree inner
= TREE_TYPE (type
);
10876 tree inner2
= signed_or_unsigned_type_for (unsignedp
, inner
);
10879 if (inner
== inner2
)
10881 return build_vector_type (inner2
, TYPE_VECTOR_SUBPARTS (type
));
10884 if (!INTEGRAL_TYPE_P (type
)
10885 && !POINTER_TYPE_P (type
)
10886 && TREE_CODE (type
) != OFFSET_TYPE
)
10889 return build_nonstandard_integer_type (TYPE_PRECISION (type
), unsignedp
);
10892 /* If TYPE is an integral or pointer type, return an integer type with
10893 the same precision which is unsigned, or itself if TYPE is already an
10894 unsigned integer type. */
10897 unsigned_type_for (tree type
)
10899 return signed_or_unsigned_type_for (1, type
);
10902 /* If TYPE is an integral or pointer type, return an integer type with
10903 the same precision which is signed, or itself if TYPE is already a
10904 signed integer type. */
10907 signed_type_for (tree type
)
10909 return signed_or_unsigned_type_for (0, type
);
10912 /* If TYPE is a vector type, return a signed integer vector type with the
10913 same width and number of subparts. Otherwise return boolean_type_node. */
10916 truth_type_for (tree type
)
10918 if (TREE_CODE (type
) == VECTOR_TYPE
)
10920 if (VECTOR_BOOLEAN_TYPE_P (type
))
10922 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (type
),
10923 GET_MODE_SIZE (TYPE_MODE (type
)));
10926 return boolean_type_node
;
10929 /* Returns the largest value obtainable by casting something in INNER type to
10933 upper_bound_in_type (tree outer
, tree inner
)
10935 unsigned int det
= 0;
10936 unsigned oprec
= TYPE_PRECISION (outer
);
10937 unsigned iprec
= TYPE_PRECISION (inner
);
10940 /* Compute a unique number for every combination. */
10941 det
|= (oprec
> iprec
) ? 4 : 0;
10942 det
|= TYPE_UNSIGNED (outer
) ? 2 : 0;
10943 det
|= TYPE_UNSIGNED (inner
) ? 1 : 0;
10945 /* Determine the exponent to use. */
10950 /* oprec <= iprec, outer: signed, inner: don't care. */
10955 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10959 /* oprec > iprec, outer: signed, inner: signed. */
10963 /* oprec > iprec, outer: signed, inner: unsigned. */
10967 /* oprec > iprec, outer: unsigned, inner: signed. */
10971 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10975 gcc_unreachable ();
10978 return wide_int_to_tree (outer
,
10979 wi::mask (prec
, false, TYPE_PRECISION (outer
)));
10982 /* Returns the smallest value obtainable by casting something in INNER type to
10986 lower_bound_in_type (tree outer
, tree inner
)
10988 unsigned oprec
= TYPE_PRECISION (outer
);
10989 unsigned iprec
= TYPE_PRECISION (inner
);
10991 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10993 if (TYPE_UNSIGNED (outer
)
10994 /* If we are widening something of an unsigned type, OUTER type
10995 contains all values of INNER type. In particular, both INNER
10996 and OUTER types have zero in common. */
10997 || (oprec
> iprec
&& TYPE_UNSIGNED (inner
)))
10998 return build_int_cst (outer
, 0);
11001 /* If we are widening a signed type to another signed type, we
11002 want to obtain -2^^(iprec-1). If we are keeping the
11003 precision or narrowing to a signed type, we want to obtain
11005 unsigned prec
= oprec
> iprec
? iprec
: oprec
;
11006 return wide_int_to_tree (outer
,
11007 wi::mask (prec
- 1, true,
11008 TYPE_PRECISION (outer
)));
11012 /* Return nonzero if two operands that are suitable for PHI nodes are
11013 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11014 SSA_NAME or invariant. Note that this is strictly an optimization.
11015 That is, callers of this function can directly call operand_equal_p
11016 and get the same result, only slower. */
11019 operand_equal_for_phi_arg_p (const_tree arg0
, const_tree arg1
)
11023 if (TREE_CODE (arg0
) == SSA_NAME
|| TREE_CODE (arg1
) == SSA_NAME
)
11025 return operand_equal_p (arg0
, arg1
, 0);
11028 /* Returns number of zeros at the end of binary representation of X. */
11031 num_ending_zeros (const_tree x
)
11033 return build_int_cst (TREE_TYPE (x
), wi::ctz (wi::to_wide (x
)));
11037 #define WALK_SUBTREE(NODE) \
11040 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11046 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11047 be walked whenever a type is seen in the tree. Rest of operands and return
11048 value are as for walk_tree. */
11051 walk_type_fields (tree type
, walk_tree_fn func
, void *data
,
11052 hash_set
<tree
> *pset
, walk_tree_lh lh
)
11054 tree result
= NULL_TREE
;
11056 switch (TREE_CODE (type
))
11059 case REFERENCE_TYPE
:
11061 /* We have to worry about mutually recursive pointers. These can't
11062 be written in C. They can in Ada. It's pathological, but
11063 there's an ACATS test (c38102a) that checks it. Deal with this
11064 by checking if we're pointing to another pointer, that one
11065 points to another pointer, that one does too, and we have no htab.
11066 If so, get a hash table. We check three levels deep to avoid
11067 the cost of the hash table if we don't need one. */
11068 if (POINTER_TYPE_P (TREE_TYPE (type
))
11069 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type
)))
11070 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type
))))
11073 result
= walk_tree_without_duplicates (&TREE_TYPE (type
),
11084 WALK_SUBTREE (TREE_TYPE (type
));
11088 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type
));
11090 /* Fall through. */
11092 case FUNCTION_TYPE
:
11093 WALK_SUBTREE (TREE_TYPE (type
));
11097 /* We never want to walk into default arguments. */
11098 for (arg
= TYPE_ARG_TYPES (type
); arg
; arg
= TREE_CHAIN (arg
))
11099 WALK_SUBTREE (TREE_VALUE (arg
));
11104 /* Don't follow this nodes's type if a pointer for fear that
11105 we'll have infinite recursion. If we have a PSET, then we
11108 || (!POINTER_TYPE_P (TREE_TYPE (type
))
11109 && TREE_CODE (TREE_TYPE (type
)) != OFFSET_TYPE
))
11110 WALK_SUBTREE (TREE_TYPE (type
));
11111 WALK_SUBTREE (TYPE_DOMAIN (type
));
11115 WALK_SUBTREE (TREE_TYPE (type
));
11116 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type
));
11126 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11127 called with the DATA and the address of each sub-tree. If FUNC returns a
11128 non-NULL value, the traversal is stopped, and the value returned by FUNC
11129 is returned. If PSET is non-NULL it is used to record the nodes visited,
11130 and to avoid visiting a node more than once. */
11133 walk_tree_1 (tree
*tp
, walk_tree_fn func
, void *data
,
11134 hash_set
<tree
> *pset
, walk_tree_lh lh
)
11136 enum tree_code code
;
11140 #define WALK_SUBTREE_TAIL(NODE) \
11144 goto tail_recurse; \
11149 /* Skip empty subtrees. */
11153 /* Don't walk the same tree twice, if the user has requested
11154 that we avoid doing so. */
11155 if (pset
&& pset
->add (*tp
))
11158 /* Call the function. */
11160 result
= (*func
) (tp
, &walk_subtrees
, data
);
11162 /* If we found something, return it. */
11166 code
= TREE_CODE (*tp
);
11168 /* Even if we didn't, FUNC may have decided that there was nothing
11169 interesting below this point in the tree. */
11170 if (!walk_subtrees
)
11172 /* But we still need to check our siblings. */
11173 if (code
== TREE_LIST
)
11174 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp
));
11175 else if (code
== OMP_CLAUSE
)
11176 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11183 result
= (*lh
) (tp
, &walk_subtrees
, func
, data
, pset
);
11184 if (result
|| !walk_subtrees
)
11191 case IDENTIFIER_NODE
:
11198 case PLACEHOLDER_EXPR
:
11202 /* None of these have subtrees other than those already walked
11207 WALK_SUBTREE (TREE_VALUE (*tp
));
11208 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp
));
11213 int len
= TREE_VEC_LENGTH (*tp
);
11218 /* Walk all elements but the first. */
11220 WALK_SUBTREE (TREE_VEC_ELT (*tp
, len
));
11222 /* Now walk the first one as a tail call. */
11223 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp
, 0));
11227 WALK_SUBTREE (TREE_REALPART (*tp
));
11228 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp
));
11232 unsigned HOST_WIDE_INT idx
;
11233 constructor_elt
*ce
;
11235 for (idx
= 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp
), idx
, &ce
);
11237 WALK_SUBTREE (ce
->value
);
11242 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, 0));
11247 for (decl
= BIND_EXPR_VARS (*tp
); decl
; decl
= DECL_CHAIN (decl
))
11249 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11250 into declarations that are just mentioned, rather than
11251 declared; they don't really belong to this part of the tree.
11252 And, we can see cycles: the initializer for a declaration
11253 can refer to the declaration itself. */
11254 WALK_SUBTREE (DECL_INITIAL (decl
));
11255 WALK_SUBTREE (DECL_SIZE (decl
));
11256 WALK_SUBTREE (DECL_SIZE_UNIT (decl
));
11258 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp
));
11261 case STATEMENT_LIST
:
11263 tree_stmt_iterator i
;
11264 for (i
= tsi_start (*tp
); !tsi_end_p (i
); tsi_next (&i
))
11265 WALK_SUBTREE (*tsi_stmt_ptr (i
));
11270 switch (OMP_CLAUSE_CODE (*tp
))
11272 case OMP_CLAUSE_GANG
:
11273 case OMP_CLAUSE__GRIDDIM_
:
11274 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, 1));
11277 case OMP_CLAUSE_ASYNC
:
11278 case OMP_CLAUSE_WAIT
:
11279 case OMP_CLAUSE_WORKER
:
11280 case OMP_CLAUSE_VECTOR
:
11281 case OMP_CLAUSE_NUM_GANGS
:
11282 case OMP_CLAUSE_NUM_WORKERS
:
11283 case OMP_CLAUSE_VECTOR_LENGTH
:
11284 case OMP_CLAUSE_PRIVATE
:
11285 case OMP_CLAUSE_SHARED
:
11286 case OMP_CLAUSE_FIRSTPRIVATE
:
11287 case OMP_CLAUSE_COPYIN
:
11288 case OMP_CLAUSE_COPYPRIVATE
:
11289 case OMP_CLAUSE_FINAL
:
11290 case OMP_CLAUSE_IF
:
11291 case OMP_CLAUSE_NUM_THREADS
:
11292 case OMP_CLAUSE_SCHEDULE
:
11293 case OMP_CLAUSE_UNIFORM
:
11294 case OMP_CLAUSE_DEPEND
:
11295 case OMP_CLAUSE_NUM_TEAMS
:
11296 case OMP_CLAUSE_THREAD_LIMIT
:
11297 case OMP_CLAUSE_DEVICE
:
11298 case OMP_CLAUSE_DIST_SCHEDULE
:
11299 case OMP_CLAUSE_SAFELEN
:
11300 case OMP_CLAUSE_SIMDLEN
:
11301 case OMP_CLAUSE_ORDERED
:
11302 case OMP_CLAUSE_PRIORITY
:
11303 case OMP_CLAUSE_GRAINSIZE
:
11304 case OMP_CLAUSE_NUM_TASKS
:
11305 case OMP_CLAUSE_HINT
:
11306 case OMP_CLAUSE_TO_DECLARE
:
11307 case OMP_CLAUSE_LINK
:
11308 case OMP_CLAUSE_USE_DEVICE_PTR
:
11309 case OMP_CLAUSE_IS_DEVICE_PTR
:
11310 case OMP_CLAUSE__LOOPTEMP_
:
11311 case OMP_CLAUSE__SIMDUID_
:
11312 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, 0));
11315 case OMP_CLAUSE_INDEPENDENT
:
11316 case OMP_CLAUSE_NOWAIT
:
11317 case OMP_CLAUSE_DEFAULT
:
11318 case OMP_CLAUSE_UNTIED
:
11319 case OMP_CLAUSE_MERGEABLE
:
11320 case OMP_CLAUSE_PROC_BIND
:
11321 case OMP_CLAUSE_INBRANCH
:
11322 case OMP_CLAUSE_NOTINBRANCH
:
11323 case OMP_CLAUSE_FOR
:
11324 case OMP_CLAUSE_PARALLEL
:
11325 case OMP_CLAUSE_SECTIONS
:
11326 case OMP_CLAUSE_TASKGROUP
:
11327 case OMP_CLAUSE_NOGROUP
:
11328 case OMP_CLAUSE_THREADS
:
11329 case OMP_CLAUSE_SIMD
:
11330 case OMP_CLAUSE_DEFAULTMAP
:
11331 case OMP_CLAUSE_AUTO
:
11332 case OMP_CLAUSE_SEQ
:
11333 case OMP_CLAUSE_TILE
:
11334 case OMP_CLAUSE__SIMT_
:
11335 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11337 case OMP_CLAUSE_LASTPRIVATE
:
11338 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp
));
11339 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp
));
11340 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11342 case OMP_CLAUSE_COLLAPSE
:
11345 for (i
= 0; i
< 3; i
++)
11346 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, i
));
11347 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11350 case OMP_CLAUSE_LINEAR
:
11351 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp
));
11352 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp
));
11353 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp
));
11354 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11356 case OMP_CLAUSE_ALIGNED
:
11357 case OMP_CLAUSE_FROM
:
11358 case OMP_CLAUSE_TO
:
11359 case OMP_CLAUSE_MAP
:
11360 case OMP_CLAUSE__CACHE_
:
11361 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp
));
11362 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, 1));
11363 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11365 case OMP_CLAUSE_REDUCTION
:
11368 for (i
= 0; i
< 5; i
++)
11369 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, i
));
11370 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11374 gcc_unreachable ();
11382 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11383 But, we only want to walk once. */
11384 len
= (TREE_OPERAND (*tp
, 3) == TREE_OPERAND (*tp
, 1)) ? 2 : 3;
11385 for (i
= 0; i
< len
; ++i
)
11386 WALK_SUBTREE (TREE_OPERAND (*tp
, i
));
11387 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, len
));
11391 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11392 defining. We only want to walk into these fields of a type in this
11393 case and not in the general case of a mere reference to the type.
11395 The criterion is as follows: if the field can be an expression, it
11396 must be walked only here. This should be in keeping with the fields
11397 that are directly gimplified in gimplify_type_sizes in order for the
11398 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11399 variable-sized types.
11401 Note that DECLs get walked as part of processing the BIND_EXPR. */
11402 if (TREE_CODE (DECL_EXPR_DECL (*tp
)) == TYPE_DECL
)
11404 tree
*type_p
= &TREE_TYPE (DECL_EXPR_DECL (*tp
));
11405 if (TREE_CODE (*type_p
) == ERROR_MARK
)
11408 /* Call the function for the type. See if it returns anything or
11409 doesn't want us to continue. If we are to continue, walk both
11410 the normal fields and those for the declaration case. */
11411 result
= (*func
) (type_p
, &walk_subtrees
, data
);
11412 if (result
|| !walk_subtrees
)
11415 /* But do not walk a pointed-to type since it may itself need to
11416 be walked in the declaration case if it isn't anonymous. */
11417 if (!POINTER_TYPE_P (*type_p
))
11419 result
= walk_type_fields (*type_p
, func
, data
, pset
, lh
);
11424 /* If this is a record type, also walk the fields. */
11425 if (RECORD_OR_UNION_TYPE_P (*type_p
))
11429 for (field
= TYPE_FIELDS (*type_p
); field
;
11430 field
= DECL_CHAIN (field
))
11432 /* We'd like to look at the type of the field, but we can
11433 easily get infinite recursion. So assume it's pointed
11434 to elsewhere in the tree. Also, ignore things that
11436 if (TREE_CODE (field
) != FIELD_DECL
)
11439 WALK_SUBTREE (DECL_FIELD_OFFSET (field
));
11440 WALK_SUBTREE (DECL_SIZE (field
));
11441 WALK_SUBTREE (DECL_SIZE_UNIT (field
));
11442 if (TREE_CODE (*type_p
) == QUAL_UNION_TYPE
)
11443 WALK_SUBTREE (DECL_QUALIFIER (field
));
11447 /* Same for scalar types. */
11448 else if (TREE_CODE (*type_p
) == BOOLEAN_TYPE
11449 || TREE_CODE (*type_p
) == ENUMERAL_TYPE
11450 || TREE_CODE (*type_p
) == INTEGER_TYPE
11451 || TREE_CODE (*type_p
) == FIXED_POINT_TYPE
11452 || TREE_CODE (*type_p
) == REAL_TYPE
)
11454 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p
));
11455 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p
));
11458 WALK_SUBTREE (TYPE_SIZE (*type_p
));
11459 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p
));
11464 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
11468 /* Walk over all the sub-trees of this operand. */
11469 len
= TREE_OPERAND_LENGTH (*tp
);
11471 /* Go through the subtrees. We need to do this in forward order so
11472 that the scope of a FOR_EXPR is handled properly. */
11475 for (i
= 0; i
< len
- 1; ++i
)
11476 WALK_SUBTREE (TREE_OPERAND (*tp
, i
));
11477 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, len
- 1));
11480 /* If this is a type, walk the needed fields in the type. */
11481 else if (TYPE_P (*tp
))
11482 return walk_type_fields (*tp
, func
, data
, pset
, lh
);
11486 /* We didn't find what we were looking for. */
11489 #undef WALK_SUBTREE_TAIL
11491 #undef WALK_SUBTREE
11493 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11496 walk_tree_without_duplicates_1 (tree
*tp
, walk_tree_fn func
, void *data
,
11501 hash_set
<tree
> pset
;
11502 result
= walk_tree_1 (tp
, func
, data
, &pset
, lh
);
11508 tree_block (tree t
)
11510 const enum tree_code_class c
= TREE_CODE_CLASS (TREE_CODE (t
));
11512 if (IS_EXPR_CODE_CLASS (c
))
11513 return LOCATION_BLOCK (t
->exp
.locus
);
11514 gcc_unreachable ();
11519 tree_set_block (tree t
, tree b
)
11521 const enum tree_code_class c
= TREE_CODE_CLASS (TREE_CODE (t
));
11523 if (IS_EXPR_CODE_CLASS (c
))
11525 t
->exp
.locus
= set_block (t
->exp
.locus
, b
);
11528 gcc_unreachable ();
11531 /* Create a nameless artificial label and put it in the current
11532 function context. The label has a location of LOC. Returns the
11533 newly created label. */
11536 create_artificial_label (location_t loc
)
11538 tree lab
= build_decl (loc
,
11539 LABEL_DECL
, NULL_TREE
, void_type_node
);
11541 DECL_ARTIFICIAL (lab
) = 1;
11542 DECL_IGNORED_P (lab
) = 1;
11543 DECL_CONTEXT (lab
) = current_function_decl
;
11547 /* Given a tree, try to return a useful variable name that we can use
11548 to prefix a temporary that is being assigned the value of the tree.
11549 I.E. given <temp> = &A, return A. */
11554 tree stripped_decl
;
11557 STRIP_NOPS (stripped_decl
);
11558 if (DECL_P (stripped_decl
) && DECL_NAME (stripped_decl
))
11559 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl
));
11560 else if (TREE_CODE (stripped_decl
) == SSA_NAME
)
11562 tree name
= SSA_NAME_IDENTIFIER (stripped_decl
);
11565 return IDENTIFIER_POINTER (name
);
11569 switch (TREE_CODE (stripped_decl
))
11572 return get_name (TREE_OPERAND (stripped_decl
, 0));
11579 /* Return true if TYPE has a variable argument list. */
11582 stdarg_p (const_tree fntype
)
11584 function_args_iterator args_iter
;
11585 tree n
= NULL_TREE
, t
;
11590 FOREACH_FUNCTION_ARGS (fntype
, t
, args_iter
)
11595 return n
!= NULL_TREE
&& n
!= void_type_node
;
11598 /* Return true if TYPE has a prototype. */
11601 prototype_p (const_tree fntype
)
11605 gcc_assert (fntype
!= NULL_TREE
);
11607 t
= TYPE_ARG_TYPES (fntype
);
11608 return (t
!= NULL_TREE
);
11611 /* If BLOCK is inlined from an __attribute__((__artificial__))
11612 routine, return pointer to location from where it has been
11615 block_nonartificial_location (tree block
)
11617 location_t
*ret
= NULL
;
11619 while (block
&& TREE_CODE (block
) == BLOCK
11620 && BLOCK_ABSTRACT_ORIGIN (block
))
11622 tree ao
= BLOCK_ABSTRACT_ORIGIN (block
);
11624 while (TREE_CODE (ao
) == BLOCK
11625 && BLOCK_ABSTRACT_ORIGIN (ao
)
11626 && BLOCK_ABSTRACT_ORIGIN (ao
) != ao
)
11627 ao
= BLOCK_ABSTRACT_ORIGIN (ao
);
11629 if (TREE_CODE (ao
) == FUNCTION_DECL
)
11631 /* If AO is an artificial inline, point RET to the
11632 call site locus at which it has been inlined and continue
11633 the loop, in case AO's caller is also an artificial
11635 if (DECL_DECLARED_INLINE_P (ao
)
11636 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao
)))
11637 ret
= &BLOCK_SOURCE_LOCATION (block
);
11641 else if (TREE_CODE (ao
) != BLOCK
)
11644 block
= BLOCK_SUPERCONTEXT (block
);
11650 /* If EXP is inlined from an __attribute__((__artificial__))
11651 function, return the location of the original call expression. */
11654 tree_nonartificial_location (tree exp
)
11656 location_t
*loc
= block_nonartificial_location (TREE_BLOCK (exp
));
11661 return EXPR_LOCATION (exp
);
11665 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11668 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11671 cl_option_hasher::hash (tree x
)
11673 const_tree
const t
= x
;
11677 hashval_t hash
= 0;
11679 if (TREE_CODE (t
) == OPTIMIZATION_NODE
)
11681 p
= (const char *)TREE_OPTIMIZATION (t
);
11682 len
= sizeof (struct cl_optimization
);
11685 else if (TREE_CODE (t
) == TARGET_OPTION_NODE
)
11686 return cl_target_option_hash (TREE_TARGET_OPTION (t
));
11689 gcc_unreachable ();
11691 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11693 for (i
= 0; i
< len
; i
++)
11695 hash
= (hash
<< 4) ^ ((i
<< 2) | p
[i
]);
11700 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11701 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11705 cl_option_hasher::equal (tree x
, tree y
)
11707 const_tree
const xt
= x
;
11708 const_tree
const yt
= y
;
11713 if (TREE_CODE (xt
) != TREE_CODE (yt
))
11716 if (TREE_CODE (xt
) == OPTIMIZATION_NODE
)
11718 xp
= (const char *)TREE_OPTIMIZATION (xt
);
11719 yp
= (const char *)TREE_OPTIMIZATION (yt
);
11720 len
= sizeof (struct cl_optimization
);
11723 else if (TREE_CODE (xt
) == TARGET_OPTION_NODE
)
11725 return cl_target_option_eq (TREE_TARGET_OPTION (xt
),
11726 TREE_TARGET_OPTION (yt
));
11730 gcc_unreachable ();
11732 return (memcmp (xp
, yp
, len
) == 0);
11735 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11738 build_optimization_node (struct gcc_options
*opts
)
11742 /* Use the cache of optimization nodes. */
11744 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node
),
11747 tree
*slot
= cl_option_hash_table
->find_slot (cl_optimization_node
, INSERT
);
11751 /* Insert this one into the hash table. */
11752 t
= cl_optimization_node
;
11755 /* Make a new node for next time round. */
11756 cl_optimization_node
= make_node (OPTIMIZATION_NODE
);
11762 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11765 build_target_option_node (struct gcc_options
*opts
)
11769 /* Use the cache of optimization nodes. */
11771 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node
),
11774 tree
*slot
= cl_option_hash_table
->find_slot (cl_target_option_node
, INSERT
);
11778 /* Insert this one into the hash table. */
11779 t
= cl_target_option_node
;
11782 /* Make a new node for next time round. */
11783 cl_target_option_node
= make_node (TARGET_OPTION_NODE
);
11789 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11790 so that they aren't saved during PCH writing. */
11793 prepare_target_option_nodes_for_pch (void)
11795 hash_table
<cl_option_hasher
>::iterator iter
= cl_option_hash_table
->begin ();
11796 for (; iter
!= cl_option_hash_table
->end (); ++iter
)
11797 if (TREE_CODE (*iter
) == TARGET_OPTION_NODE
)
11798 TREE_TARGET_GLOBALS (*iter
) = NULL
;
11801 /* Determine the "ultimate origin" of a block. The block may be an inlined
11802 instance of an inlined instance of a block which is local to an inline
11803 function, so we have to trace all of the way back through the origin chain
11804 to find out what sort of node actually served as the original seed for the
11808 block_ultimate_origin (const_tree block
)
11810 tree immediate_origin
= BLOCK_ABSTRACT_ORIGIN (block
);
11812 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
11813 we're trying to output the abstract instance of this function. */
11814 if (BLOCK_ABSTRACT (block
) && immediate_origin
== block
)
11817 if (immediate_origin
== NULL_TREE
)
11822 tree lookahead
= immediate_origin
;
11826 ret_val
= lookahead
;
11827 lookahead
= (TREE_CODE (ret_val
) == BLOCK
11828 ? BLOCK_ABSTRACT_ORIGIN (ret_val
) : NULL
);
11830 while (lookahead
!= NULL
&& lookahead
!= ret_val
);
11832 /* The block's abstract origin chain may not be the *ultimate* origin of
11833 the block. It could lead to a DECL that has an abstract origin set.
11834 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11835 will give us if it has one). Note that DECL's abstract origins are
11836 supposed to be the most distant ancestor (or so decl_ultimate_origin
11837 claims), so we don't need to loop following the DECL origins. */
11838 if (DECL_P (ret_val
))
11839 return DECL_ORIGIN (ret_val
);
11845 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11849 tree_nop_conversion_p (const_tree outer_type
, const_tree inner_type
)
11851 /* Do not strip casts into or out of differing address spaces. */
11852 if (POINTER_TYPE_P (outer_type
)
11853 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type
)) != ADDR_SPACE_GENERIC
)
11855 if (!POINTER_TYPE_P (inner_type
)
11856 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type
))
11857 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type
))))
11860 else if (POINTER_TYPE_P (inner_type
)
11861 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type
)) != ADDR_SPACE_GENERIC
)
11863 /* We already know that outer_type is not a pointer with
11864 a non-generic address space. */
11868 /* Use precision rather then machine mode when we can, which gives
11869 the correct answer even for submode (bit-field) types. */
11870 if ((INTEGRAL_TYPE_P (outer_type
)
11871 || POINTER_TYPE_P (outer_type
)
11872 || TREE_CODE (outer_type
) == OFFSET_TYPE
)
11873 && (INTEGRAL_TYPE_P (inner_type
)
11874 || POINTER_TYPE_P (inner_type
)
11875 || TREE_CODE (inner_type
) == OFFSET_TYPE
))
11876 return TYPE_PRECISION (outer_type
) == TYPE_PRECISION (inner_type
);
11878 /* Otherwise fall back on comparing machine modes (e.g. for
11879 aggregate types, floats). */
11880 return TYPE_MODE (outer_type
) == TYPE_MODE (inner_type
);
11883 /* Return true iff conversion in EXP generates no instruction. Mark
11884 it inline so that we fully inline into the stripping functions even
11885 though we have two uses of this function. */
11888 tree_nop_conversion (const_tree exp
)
11890 tree outer_type
, inner_type
;
11892 if (!CONVERT_EXPR_P (exp
)
11893 && TREE_CODE (exp
) != NON_LVALUE_EXPR
)
11895 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
11898 outer_type
= TREE_TYPE (exp
);
11899 inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
11904 return tree_nop_conversion_p (outer_type
, inner_type
);
11907 /* Return true iff conversion in EXP generates no instruction. Don't
11908 consider conversions changing the signedness. */
11911 tree_sign_nop_conversion (const_tree exp
)
11913 tree outer_type
, inner_type
;
11915 if (!tree_nop_conversion (exp
))
11918 outer_type
= TREE_TYPE (exp
);
11919 inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
11921 return (TYPE_UNSIGNED (outer_type
) == TYPE_UNSIGNED (inner_type
)
11922 && POINTER_TYPE_P (outer_type
) == POINTER_TYPE_P (inner_type
));
11925 /* Strip conversions from EXP according to tree_nop_conversion and
11926 return the resulting expression. */
11929 tree_strip_nop_conversions (tree exp
)
11931 while (tree_nop_conversion (exp
))
11932 exp
= TREE_OPERAND (exp
, 0);
11936 /* Strip conversions from EXP according to tree_sign_nop_conversion
11937 and return the resulting expression. */
11940 tree_strip_sign_nop_conversions (tree exp
)
11942 while (tree_sign_nop_conversion (exp
))
11943 exp
= TREE_OPERAND (exp
, 0);
11947 /* Avoid any floating point extensions from EXP. */
11949 strip_float_extensions (tree exp
)
11951 tree sub
, expt
, subt
;
11953 /* For floating point constant look up the narrowest type that can hold
11954 it properly and handle it like (type)(narrowest_type)constant.
11955 This way we can optimize for instance a=a*2.0 where "a" is float
11956 but 2.0 is double constant. */
11957 if (TREE_CODE (exp
) == REAL_CST
&& !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp
)))
11959 REAL_VALUE_TYPE orig
;
11962 orig
= TREE_REAL_CST (exp
);
11963 if (TYPE_PRECISION (TREE_TYPE (exp
)) > TYPE_PRECISION (float_type_node
)
11964 && exact_real_truncate (TYPE_MODE (float_type_node
), &orig
))
11965 type
= float_type_node
;
11966 else if (TYPE_PRECISION (TREE_TYPE (exp
))
11967 > TYPE_PRECISION (double_type_node
)
11968 && exact_real_truncate (TYPE_MODE (double_type_node
), &orig
))
11969 type
= double_type_node
;
11971 return build_real_truncate (type
, orig
);
11974 if (!CONVERT_EXPR_P (exp
))
11977 sub
= TREE_OPERAND (exp
, 0);
11978 subt
= TREE_TYPE (sub
);
11979 expt
= TREE_TYPE (exp
);
11981 if (!FLOAT_TYPE_P (subt
))
11984 if (DECIMAL_FLOAT_TYPE_P (expt
) != DECIMAL_FLOAT_TYPE_P (subt
))
11987 if (TYPE_PRECISION (subt
) > TYPE_PRECISION (expt
))
11990 return strip_float_extensions (sub
);
11993 /* Strip out all handled components that produce invariant
11997 strip_invariant_refs (const_tree op
)
11999 while (handled_component_p (op
))
12001 switch (TREE_CODE (op
))
12004 case ARRAY_RANGE_REF
:
12005 if (!is_gimple_constant (TREE_OPERAND (op
, 1))
12006 || TREE_OPERAND (op
, 2) != NULL_TREE
12007 || TREE_OPERAND (op
, 3) != NULL_TREE
)
12011 case COMPONENT_REF
:
12012 if (TREE_OPERAND (op
, 2) != NULL_TREE
)
12018 op
= TREE_OPERAND (op
, 0);
12024 static GTY(()) tree gcc_eh_personality_decl
;
12026 /* Return the GCC personality function decl. */
12029 lhd_gcc_personality (void)
12031 if (!gcc_eh_personality_decl
)
12032 gcc_eh_personality_decl
= build_personality_function ("gcc");
12033 return gcc_eh_personality_decl
;
12036 /* TARGET is a call target of GIMPLE call statement
12037 (obtained by gimple_call_fn). Return true if it is
12038 OBJ_TYPE_REF representing an virtual call of C++ method.
12039 (As opposed to OBJ_TYPE_REF representing objc calls
12040 through a cast where middle-end devirtualization machinery
12044 virtual_method_call_p (const_tree target
)
12046 if (TREE_CODE (target
) != OBJ_TYPE_REF
)
12048 tree t
= TREE_TYPE (target
);
12049 gcc_checking_assert (TREE_CODE (t
) == POINTER_TYPE
);
12051 if (TREE_CODE (t
) == FUNCTION_TYPE
)
12053 gcc_checking_assert (TREE_CODE (t
) == METHOD_TYPE
);
12054 /* If we do not have BINFO associated, it means that type was built
12055 without devirtualization enabled. Do not consider this a virtual
12057 if (!TYPE_BINFO (obj_type_ref_class (target
)))
12062 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
12065 obj_type_ref_class (const_tree ref
)
12067 gcc_checking_assert (TREE_CODE (ref
) == OBJ_TYPE_REF
);
12068 ref
= TREE_TYPE (ref
);
12069 gcc_checking_assert (TREE_CODE (ref
) == POINTER_TYPE
);
12070 ref
= TREE_TYPE (ref
);
12071 /* We look for type THIS points to. ObjC also builds
12072 OBJ_TYPE_REF with non-method calls, Their first parameter
12073 ID however also corresponds to class type. */
12074 gcc_checking_assert (TREE_CODE (ref
) == METHOD_TYPE
12075 || TREE_CODE (ref
) == FUNCTION_TYPE
);
12076 ref
= TREE_VALUE (TYPE_ARG_TYPES (ref
));
12077 gcc_checking_assert (TREE_CODE (ref
) == POINTER_TYPE
);
12078 return TREE_TYPE (ref
);
12081 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12084 lookup_binfo_at_offset (tree binfo
, tree type
, HOST_WIDE_INT pos
)
12087 tree base_binfo
, b
;
12089 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
12090 if (pos
== tree_to_shwi (BINFO_OFFSET (base_binfo
))
12091 && types_same_for_odr (TREE_TYPE (base_binfo
), type
))
12093 else if ((b
= lookup_binfo_at_offset (base_binfo
, type
, pos
)) != NULL
)
12098 /* Try to find a base info of BINFO that would have its field decl at offset
12099 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12100 found, return, otherwise return NULL_TREE. */
12103 get_binfo_at_offset (tree binfo
, HOST_WIDE_INT offset
, tree expected_type
)
12105 tree type
= BINFO_TYPE (binfo
);
12109 HOST_WIDE_INT pos
, size
;
12113 if (types_same_for_odr (type
, expected_type
))
12118 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
12120 if (TREE_CODE (fld
) != FIELD_DECL
|| !DECL_ARTIFICIAL (fld
))
12123 pos
= int_bit_position (fld
);
12124 size
= tree_to_uhwi (DECL_SIZE (fld
));
12125 if (pos
<= offset
&& (pos
+ size
) > offset
)
12128 if (!fld
|| TREE_CODE (TREE_TYPE (fld
)) != RECORD_TYPE
)
12131 /* Offset 0 indicates the primary base, whose vtable contents are
12132 represented in the binfo for the derived class. */
12133 else if (offset
!= 0)
12135 tree found_binfo
= NULL
, base_binfo
;
12136 /* Offsets in BINFO are in bytes relative to the whole structure
12137 while POS is in bits relative to the containing field. */
12138 int binfo_offset
= (tree_to_shwi (BINFO_OFFSET (binfo
)) + pos
12141 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
12142 if (tree_to_shwi (BINFO_OFFSET (base_binfo
)) == binfo_offset
12143 && types_same_for_odr (TREE_TYPE (base_binfo
), TREE_TYPE (fld
)))
12145 found_binfo
= base_binfo
;
12149 binfo
= found_binfo
;
12151 binfo
= lookup_binfo_at_offset (binfo
, TREE_TYPE (fld
),
12155 type
= TREE_TYPE (fld
);
12160 /* Returns true if X is a typedef decl. */
12163 is_typedef_decl (const_tree x
)
12165 return (x
&& TREE_CODE (x
) == TYPE_DECL
12166 && DECL_ORIGINAL_TYPE (x
) != NULL_TREE
);
12169 /* Returns true iff TYPE is a type variant created for a typedef. */
12172 typedef_variant_p (const_tree type
)
12174 return is_typedef_decl (TYPE_NAME (type
));
12177 /* Warn about a use of an identifier which was marked deprecated. */
12179 warn_deprecated_use (tree node
, tree attr
)
12183 if (node
== 0 || !warn_deprecated_decl
)
12189 attr
= DECL_ATTRIBUTES (node
);
12190 else if (TYPE_P (node
))
12192 tree decl
= TYPE_STUB_DECL (node
);
12194 attr
= lookup_attribute ("deprecated",
12195 TYPE_ATTRIBUTES (TREE_TYPE (decl
)));
12200 attr
= lookup_attribute ("deprecated", attr
);
12203 msg
= TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
)));
12211 w
= warning (OPT_Wdeprecated_declarations
,
12212 "%qD is deprecated: %s", node
, msg
);
12214 w
= warning (OPT_Wdeprecated_declarations
,
12215 "%qD is deprecated", node
);
12217 inform (DECL_SOURCE_LOCATION (node
), "declared here");
12219 else if (TYPE_P (node
))
12221 tree what
= NULL_TREE
;
12222 tree decl
= TYPE_STUB_DECL (node
);
12224 if (TYPE_NAME (node
))
12226 if (TREE_CODE (TYPE_NAME (node
)) == IDENTIFIER_NODE
)
12227 what
= TYPE_NAME (node
);
12228 else if (TREE_CODE (TYPE_NAME (node
)) == TYPE_DECL
12229 && DECL_NAME (TYPE_NAME (node
)))
12230 what
= DECL_NAME (TYPE_NAME (node
));
12238 w
= warning (OPT_Wdeprecated_declarations
,
12239 "%qE is deprecated: %s", what
, msg
);
12241 w
= warning (OPT_Wdeprecated_declarations
,
12242 "%qE is deprecated", what
);
12247 w
= warning (OPT_Wdeprecated_declarations
,
12248 "type is deprecated: %s", msg
);
12250 w
= warning (OPT_Wdeprecated_declarations
,
12251 "type is deprecated");
12254 inform (DECL_SOURCE_LOCATION (decl
), "declared here");
12261 warning (OPT_Wdeprecated_declarations
, "%qE is deprecated: %s",
12264 warning (OPT_Wdeprecated_declarations
, "%qE is deprecated", what
);
12269 warning (OPT_Wdeprecated_declarations
, "type is deprecated: %s",
12272 warning (OPT_Wdeprecated_declarations
, "type is deprecated");
12278 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12279 somewhere in it. */
12282 contains_bitfld_component_ref_p (const_tree ref
)
12284 while (handled_component_p (ref
))
12286 if (TREE_CODE (ref
) == COMPONENT_REF
12287 && DECL_BIT_FIELD (TREE_OPERAND (ref
, 1)))
12289 ref
= TREE_OPERAND (ref
, 0);
12295 /* Try to determine whether a TRY_CATCH expression can fall through.
12296 This is a subroutine of block_may_fallthru. */
12299 try_catch_may_fallthru (const_tree stmt
)
12301 tree_stmt_iterator i
;
12303 /* If the TRY block can fall through, the whole TRY_CATCH can
12305 if (block_may_fallthru (TREE_OPERAND (stmt
, 0)))
12308 i
= tsi_start (TREE_OPERAND (stmt
, 1));
12309 switch (TREE_CODE (tsi_stmt (i
)))
12312 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12313 catch expression and a body. The whole TRY_CATCH may fall
12314 through iff any of the catch bodies falls through. */
12315 for (; !tsi_end_p (i
); tsi_next (&i
))
12317 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i
))))
12322 case EH_FILTER_EXPR
:
12323 /* The exception filter expression only matters if there is an
12324 exception. If the exception does not match EH_FILTER_TYPES,
12325 we will execute EH_FILTER_FAILURE, and we will fall through
12326 if that falls through. If the exception does match
12327 EH_FILTER_TYPES, the stack unwinder will continue up the
12328 stack, so we will not fall through. We don't know whether we
12329 will throw an exception which matches EH_FILTER_TYPES or not,
12330 so we just ignore EH_FILTER_TYPES and assume that we might
12331 throw an exception which doesn't match. */
12332 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i
)));
12335 /* This case represents statements to be executed when an
12336 exception occurs. Those statements are implicitly followed
12337 by a RESX statement to resume execution after the exception.
12338 So in this case the TRY_CATCH never falls through. */
12343 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12344 need not be 100% accurate; simply be conservative and return true if we
12345 don't know. This is used only to avoid stupidly generating extra code.
12346 If we're wrong, we'll just delete the extra code later. */
12349 block_may_fallthru (const_tree block
)
12351 /* This CONST_CAST is okay because expr_last returns its argument
12352 unmodified and we assign it to a const_tree. */
12353 const_tree stmt
= expr_last (CONST_CAST_TREE (block
));
12355 switch (stmt
? TREE_CODE (stmt
) : ERROR_MARK
)
12359 /* Easy cases. If the last statement of the block implies
12360 control transfer, then we can't fall through. */
12364 /* If there is a default: label or case labels cover all possible
12365 SWITCH_COND values, then the SWITCH_EXPR will transfer control
12366 to some case label in all cases and all we care is whether the
12367 SWITCH_BODY falls through. */
12368 if (SWITCH_ALL_CASES_P (stmt
))
12369 return block_may_fallthru (SWITCH_BODY (stmt
));
12373 if (block_may_fallthru (COND_EXPR_THEN (stmt
)))
12375 return block_may_fallthru (COND_EXPR_ELSE (stmt
));
12378 return block_may_fallthru (BIND_EXPR_BODY (stmt
));
12380 case TRY_CATCH_EXPR
:
12381 return try_catch_may_fallthru (stmt
);
12383 case TRY_FINALLY_EXPR
:
12384 /* The finally clause is always executed after the try clause,
12385 so if it does not fall through, then the try-finally will not
12386 fall through. Otherwise, if the try clause does not fall
12387 through, then when the finally clause falls through it will
12388 resume execution wherever the try clause was going. So the
12389 whole try-finally will only fall through if both the try
12390 clause and the finally clause fall through. */
12391 return (block_may_fallthru (TREE_OPERAND (stmt
, 0))
12392 && block_may_fallthru (TREE_OPERAND (stmt
, 1)));
12395 if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
)
12396 stmt
= TREE_OPERAND (stmt
, 1);
12402 /* Functions that do not return do not fall through. */
12403 return (call_expr_flags (stmt
) & ECF_NORETURN
) == 0;
12405 case CLEANUP_POINT_EXPR
:
12406 return block_may_fallthru (TREE_OPERAND (stmt
, 0));
12409 return block_may_fallthru (TREE_OPERAND (stmt
, 1));
12415 return lang_hooks
.block_may_fallthru (stmt
);
12419 /* True if we are using EH to handle cleanups. */
12420 static bool using_eh_for_cleanups_flag
= false;
12422 /* This routine is called from front ends to indicate eh should be used for
12425 using_eh_for_cleanups (void)
12427 using_eh_for_cleanups_flag
= true;
12430 /* Query whether EH is used for cleanups. */
12432 using_eh_for_cleanups_p (void)
12434 return using_eh_for_cleanups_flag
;
12437 /* Wrapper for tree_code_name to ensure that tree code is valid */
12439 get_tree_code_name (enum tree_code code
)
12441 const char *invalid
= "<invalid tree code>";
12443 if (code
>= MAX_TREE_CODES
)
12446 return tree_code_name
[code
];
12449 /* Drops the TREE_OVERFLOW flag from T. */
12452 drop_tree_overflow (tree t
)
12454 gcc_checking_assert (TREE_OVERFLOW (t
));
12456 /* For tree codes with a sharing machinery re-build the result. */
12457 if (TREE_CODE (t
) == INTEGER_CST
)
12458 return wide_int_to_tree (TREE_TYPE (t
), wi::to_wide (t
));
12460 /* For VECTOR_CST, remove the overflow bits from the encoded elements
12461 and canonicalize the result. */
12462 if (TREE_CODE (t
) == VECTOR_CST
)
12464 tree_vector_builder builder
;
12465 builder
.new_unary_operation (TREE_TYPE (t
), t
, true);
12466 unsigned int count
= builder
.encoded_nelts ();
12467 for (unsigned int i
= 0; i
< count
; ++i
)
12469 tree elt
= VECTOR_CST_ELT (t
, i
);
12470 if (TREE_OVERFLOW (elt
))
12471 elt
= drop_tree_overflow (elt
);
12472 builder
.quick_push (elt
);
12474 return builder
.build ();
12477 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12478 and drop the flag. */
12480 TREE_OVERFLOW (t
) = 0;
12482 /* For constants that contain nested constants, drop the flag
12483 from those as well. */
12484 if (TREE_CODE (t
) == COMPLEX_CST
)
12486 if (TREE_OVERFLOW (TREE_REALPART (t
)))
12487 TREE_REALPART (t
) = drop_tree_overflow (TREE_REALPART (t
));
12488 if (TREE_OVERFLOW (TREE_IMAGPART (t
)))
12489 TREE_IMAGPART (t
) = drop_tree_overflow (TREE_IMAGPART (t
));
12495 /* Given a memory reference expression T, return its base address.
12496 The base address of a memory reference expression is the main
12497 object being referenced. For instance, the base address for
12498 'array[i].fld[j]' is 'array'. You can think of this as stripping
12499 away the offset part from a memory address.
12501 This function calls handled_component_p to strip away all the inner
12502 parts of the memory reference until it reaches the base object. */
12505 get_base_address (tree t
)
12507 while (handled_component_p (t
))
12508 t
= TREE_OPERAND (t
, 0);
12510 if ((TREE_CODE (t
) == MEM_REF
12511 || TREE_CODE (t
) == TARGET_MEM_REF
)
12512 && TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
)
12513 t
= TREE_OPERAND (TREE_OPERAND (t
, 0), 0);
12515 /* ??? Either the alias oracle or all callers need to properly deal
12516 with WITH_SIZE_EXPRs before we can look through those. */
12517 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
12523 /* Return a tree of sizetype representing the size, in bytes, of the element
12524 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12527 array_ref_element_size (tree exp
)
12529 tree aligned_size
= TREE_OPERAND (exp
, 3);
12530 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
12531 location_t loc
= EXPR_LOCATION (exp
);
12533 /* If a size was specified in the ARRAY_REF, it's the size measured
12534 in alignment units of the element type. So multiply by that value. */
12537 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12538 sizetype from another type of the same width and signedness. */
12539 if (TREE_TYPE (aligned_size
) != sizetype
)
12540 aligned_size
= fold_convert_loc (loc
, sizetype
, aligned_size
);
12541 return size_binop_loc (loc
, MULT_EXPR
, aligned_size
,
12542 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
12545 /* Otherwise, take the size from that of the element type. Substitute
12546 any PLACEHOLDER_EXPR that we have. */
12548 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type
), exp
);
12551 /* Return a tree representing the lower bound of the array mentioned in
12552 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12555 array_ref_low_bound (tree exp
)
12557 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
12559 /* If a lower bound is specified in EXP, use it. */
12560 if (TREE_OPERAND (exp
, 2))
12561 return TREE_OPERAND (exp
, 2);
12563 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12564 substituting for a PLACEHOLDER_EXPR as needed. */
12565 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
12566 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type
), exp
);
12568 /* Otherwise, return a zero of the appropriate type. */
12569 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp
, 1)), 0);
12572 /* Return a tree representing the upper bound of the array mentioned in
12573 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12576 array_ref_up_bound (tree exp
)
12578 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
12580 /* If there is a domain type and it has an upper bound, use it, substituting
12581 for a PLACEHOLDER_EXPR as needed. */
12582 if (domain_type
&& TYPE_MAX_VALUE (domain_type
))
12583 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type
), exp
);
12585 /* Otherwise fail. */
12589 /* Returns true if REF is an array reference or a component reference
12590 to an array at the end of a structure.
12591 If this is the case, the array may be allocated larger
12592 than its upper bound implies. */
12595 array_at_struct_end_p (tree ref
)
12599 if (TREE_CODE (ref
) == ARRAY_REF
12600 || TREE_CODE (ref
) == ARRAY_RANGE_REF
)
12602 atype
= TREE_TYPE (TREE_OPERAND (ref
, 0));
12603 ref
= TREE_OPERAND (ref
, 0);
12605 else if (TREE_CODE (ref
) == COMPONENT_REF
12606 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref
, 1))) == ARRAY_TYPE
)
12607 atype
= TREE_TYPE (TREE_OPERAND (ref
, 1));
12611 if (TREE_CODE (ref
) == STRING_CST
)
12614 while (handled_component_p (ref
))
12616 /* If the reference chain contains a component reference to a
12617 non-union type and there follows another field the reference
12618 is not at the end of a structure. */
12619 if (TREE_CODE (ref
) == COMPONENT_REF
)
12621 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref
, 0))) == RECORD_TYPE
)
12623 tree nextf
= DECL_CHAIN (TREE_OPERAND (ref
, 1));
12624 while (nextf
&& TREE_CODE (nextf
) != FIELD_DECL
)
12625 nextf
= DECL_CHAIN (nextf
);
12630 /* If we have a multi-dimensional array we do not consider
12631 a non-innermost dimension as flex array if the whole
12632 multi-dimensional array is at struct end.
12633 Same for an array of aggregates with a trailing array
12635 else if (TREE_CODE (ref
) == ARRAY_REF
)
12637 else if (TREE_CODE (ref
) == ARRAY_RANGE_REF
)
12639 /* If we view an underlying object as sth else then what we
12640 gathered up to now is what we have to rely on. */
12641 else if (TREE_CODE (ref
) == VIEW_CONVERT_EXPR
)
12644 gcc_unreachable ();
12646 ref
= TREE_OPERAND (ref
, 0);
12649 /* The array now is at struct end. Treat flexible arrays as
12650 always subject to extend, even into just padding constrained by
12651 an underlying decl. */
12652 if (! TYPE_SIZE (atype
))
12657 if (TREE_CODE (ref
) == MEM_REF
12658 && TREE_CODE (TREE_OPERAND (ref
, 0)) == ADDR_EXPR
)
12660 size
= TYPE_SIZE (TREE_TYPE (ref
));
12661 ref
= TREE_OPERAND (TREE_OPERAND (ref
, 0), 0);
12664 /* If the reference is based on a declared entity, the size of the array
12665 is constrained by its given domain. (Do not trust commons PR/69368). */
12667 /* Be sure the size of MEM_REF target match. For example:
12670 struct foo *str = (struct foo *)&buf;
12672 str->trailin_array[2] = 1;
12674 is valid because BUF allocate enough space. */
12676 && (!size
|| (DECL_SIZE (ref
) != NULL
12677 && operand_equal_p (DECL_SIZE (ref
), size
, 0)))
12678 && !(flag_unconstrained_commons
12679 && VAR_P (ref
) && DECL_COMMON (ref
)))
12685 /* Return a tree representing the offset, in bytes, of the field referenced
12686 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12689 component_ref_field_offset (tree exp
)
12691 tree aligned_offset
= TREE_OPERAND (exp
, 2);
12692 tree field
= TREE_OPERAND (exp
, 1);
12693 location_t loc
= EXPR_LOCATION (exp
);
12695 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12696 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12698 if (aligned_offset
)
12700 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12701 sizetype from another type of the same width and signedness. */
12702 if (TREE_TYPE (aligned_offset
) != sizetype
)
12703 aligned_offset
= fold_convert_loc (loc
, sizetype
, aligned_offset
);
12704 return size_binop_loc (loc
, MULT_EXPR
, aligned_offset
,
12705 size_int (DECL_OFFSET_ALIGN (field
)
12709 /* Otherwise, take the offset from that of the field. Substitute
12710 any PLACEHOLDER_EXPR that we have. */
12712 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field
), exp
);
12715 /* Return the machine mode of T. For vectors, returns the mode of the
12716 inner type. The main use case is to feed the result to HONOR_NANS,
12717 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12720 element_mode (const_tree t
)
12724 if (VECTOR_TYPE_P (t
) || TREE_CODE (t
) == COMPLEX_TYPE
)
12726 return TYPE_MODE (t
);
12729 /* Vector types need to re-check the target flags each time we report
12730 the machine mode. We need to do this because attribute target can
12731 change the result of vector_mode_supported_p and have_regs_of_mode
12732 on a per-function basis. Thus the TYPE_MODE of a VECTOR_TYPE can
12733 change on a per-function basis. */
12734 /* ??? Possibly a better solution is to run through all the types
12735 referenced by a function and re-compute the TYPE_MODE once, rather
12736 than make the TYPE_MODE macro call a function. */
12739 vector_type_mode (const_tree t
)
12743 gcc_assert (TREE_CODE (t
) == VECTOR_TYPE
);
12745 mode
= t
->type_common
.mode
;
12746 if (VECTOR_MODE_P (mode
)
12747 && (!targetm
.vector_mode_supported_p (mode
)
12748 || !have_regs_of_mode
[mode
]))
12750 scalar_int_mode innermode
;
12752 /* For integers, try mapping it to a same-sized scalar mode. */
12753 if (is_int_mode (TREE_TYPE (t
)->type_common
.mode
, &innermode
))
12755 unsigned int size
= (TYPE_VECTOR_SUBPARTS (t
)
12756 * GET_MODE_BITSIZE (innermode
));
12757 scalar_int_mode mode
;
12758 if (int_mode_for_size (size
, 0).exists (&mode
)
12759 && have_regs_of_mode
[mode
])
12769 /* Verify that basic properties of T match TV and thus T can be a variant of
12770 TV. TV should be the more specified variant (i.e. the main variant). */
12773 verify_type_variant (const_tree t
, tree tv
)
12775 /* Type variant can differ by:
12777 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
12778 ENCODE_QUAL_ADDR_SPACE.
12779 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
12780 in this case some values may not be set in the variant types
12781 (see TYPE_COMPLETE_P checks).
12782 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
12783 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
12784 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
12785 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
12786 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
12787 this is necessary to make it possible to merge types form different TUs
12788 - arrays, pointers and references may have TREE_TYPE that is a variant
12789 of TREE_TYPE of their main variants.
12790 - aggregates may have new TYPE_FIELDS list that list variants of
12791 the main variant TYPE_FIELDS.
12792 - vector types may differ by TYPE_VECTOR_OPAQUE
12795 /* Convenience macro for matching individual fields. */
12796 #define verify_variant_match(flag) \
12798 if (flag (tv) != flag (t)) \
12800 error ("type variant differs by " #flag "."); \
12806 /* tree_base checks. */
12808 verify_variant_match (TREE_CODE
);
12809 /* FIXME: Ada builds non-artificial variants of artificial types. */
12810 if (TYPE_ARTIFICIAL (tv
) && 0)
12811 verify_variant_match (TYPE_ARTIFICIAL
);
12812 if (POINTER_TYPE_P (tv
))
12813 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL
);
12814 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
12815 verify_variant_match (TYPE_UNSIGNED
);
12816 verify_variant_match (TYPE_PACKED
);
12817 if (TREE_CODE (t
) == REFERENCE_TYPE
)
12818 verify_variant_match (TYPE_REF_IS_RVALUE
);
12819 if (AGGREGATE_TYPE_P (t
))
12820 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER
);
12822 verify_variant_match (TYPE_SATURATING
);
12823 /* FIXME: This check trigger during libstdc++ build. */
12824 if (RECORD_OR_UNION_TYPE_P (t
) && COMPLETE_TYPE_P (t
) && 0)
12825 verify_variant_match (TYPE_FINAL_P
);
12827 /* tree_type_common checks. */
12829 if (COMPLETE_TYPE_P (t
))
12831 verify_variant_match (TYPE_MODE
);
12832 if (TREE_CODE (TYPE_SIZE (t
)) != PLACEHOLDER_EXPR
12833 && TREE_CODE (TYPE_SIZE (tv
)) != PLACEHOLDER_EXPR
)
12834 verify_variant_match (TYPE_SIZE
);
12835 if (TREE_CODE (TYPE_SIZE_UNIT (t
)) != PLACEHOLDER_EXPR
12836 && TREE_CODE (TYPE_SIZE_UNIT (tv
)) != PLACEHOLDER_EXPR
12837 && TYPE_SIZE_UNIT (t
) != TYPE_SIZE_UNIT (tv
))
12839 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t
),
12840 TYPE_SIZE_UNIT (tv
), 0));
12841 error ("type variant has different TYPE_SIZE_UNIT");
12843 error ("type variant's TYPE_SIZE_UNIT");
12844 debug_tree (TYPE_SIZE_UNIT (tv
));
12845 error ("type's TYPE_SIZE_UNIT");
12846 debug_tree (TYPE_SIZE_UNIT (t
));
12850 verify_variant_match (TYPE_PRECISION
);
12851 verify_variant_match (TYPE_NEEDS_CONSTRUCTING
);
12852 if (RECORD_OR_UNION_TYPE_P (t
))
12853 verify_variant_match (TYPE_TRANSPARENT_AGGR
);
12854 else if (TREE_CODE (t
) == ARRAY_TYPE
)
12855 verify_variant_match (TYPE_NONALIASED_COMPONENT
);
12856 /* During LTO we merge variant lists from diferent translation units
12857 that may differ BY TYPE_CONTEXT that in turn may point
12858 to TRANSLATION_UNIT_DECL.
12859 Ada also builds variants of types with different TYPE_CONTEXT. */
12860 if ((!in_lto_p
|| !TYPE_FILE_SCOPE_P (t
)) && 0)
12861 verify_variant_match (TYPE_CONTEXT
);
12862 verify_variant_match (TYPE_STRING_FLAG
);
12863 if (TYPE_ALIAS_SET_KNOWN_P (t
))
12865 error ("type variant with TYPE_ALIAS_SET_KNOWN_P");
12870 /* tree_type_non_common checks. */
12872 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
12873 and dangle the pointer from time to time. */
12874 if (RECORD_OR_UNION_TYPE_P (t
) && TYPE_VFIELD (t
) != TYPE_VFIELD (tv
)
12875 && (in_lto_p
|| !TYPE_VFIELD (tv
)
12876 || TREE_CODE (TYPE_VFIELD (tv
)) != TREE_LIST
))
12878 error ("type variant has different TYPE_VFIELD");
12882 if ((TREE_CODE (t
) == ENUMERAL_TYPE
&& COMPLETE_TYPE_P (t
))
12883 || TREE_CODE (t
) == INTEGER_TYPE
12884 || TREE_CODE (t
) == BOOLEAN_TYPE
12885 || TREE_CODE (t
) == REAL_TYPE
12886 || TREE_CODE (t
) == FIXED_POINT_TYPE
)
12888 verify_variant_match (TYPE_MAX_VALUE
);
12889 verify_variant_match (TYPE_MIN_VALUE
);
12891 if (TREE_CODE (t
) == METHOD_TYPE
)
12892 verify_variant_match (TYPE_METHOD_BASETYPE
);
12893 if (TREE_CODE (t
) == OFFSET_TYPE
)
12894 verify_variant_match (TYPE_OFFSET_BASETYPE
);
12895 if (TREE_CODE (t
) == ARRAY_TYPE
)
12896 verify_variant_match (TYPE_ARRAY_MAX_SIZE
);
12897 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
12898 or even type's main variant. This is needed to make bootstrap pass
12899 and the bug seems new in GCC 5.
12900 C++ FE should be updated to make this consistent and we should check
12901 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
12902 is a match with main variant.
12904 Also disable the check for Java for now because of parser hack that builds
12905 first an dummy BINFO and then sometimes replace it by real BINFO in some
12907 if (RECORD_OR_UNION_TYPE_P (t
) && TYPE_BINFO (t
) && TYPE_BINFO (tv
)
12908 && TYPE_BINFO (t
) != TYPE_BINFO (tv
)
12909 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
12910 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
12911 at LTO time only. */
12912 && (in_lto_p
&& odr_type_p (t
)))
12914 error ("type variant has different TYPE_BINFO");
12916 error ("type variant's TYPE_BINFO");
12917 debug_tree (TYPE_BINFO (tv
));
12918 error ("type's TYPE_BINFO");
12919 debug_tree (TYPE_BINFO (t
));
12923 /* Check various uses of TYPE_VALUES_RAW. */
12924 if (TREE_CODE (t
) == ENUMERAL_TYPE
)
12925 verify_variant_match (TYPE_VALUES
);
12926 else if (TREE_CODE (t
) == ARRAY_TYPE
)
12927 verify_variant_match (TYPE_DOMAIN
);
12928 /* Permit incomplete variants of complete type. While FEs may complete
12929 all variants, this does not happen for C++ templates in all cases. */
12930 else if (RECORD_OR_UNION_TYPE_P (t
)
12931 && COMPLETE_TYPE_P (t
)
12932 && TYPE_FIELDS (t
) != TYPE_FIELDS (tv
))
12936 /* Fortran builds qualified variants as new records with items of
12937 qualified type. Verify that they looks same. */
12938 for (f1
= TYPE_FIELDS (t
), f2
= TYPE_FIELDS (tv
);
12940 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
12941 if (TREE_CODE (f1
) != FIELD_DECL
|| TREE_CODE (f2
) != FIELD_DECL
12942 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1
))
12943 != TYPE_MAIN_VARIANT (TREE_TYPE (f2
))
12944 /* FIXME: gfc_nonrestricted_type builds all types as variants
12945 with exception of pointer types. It deeply copies the type
12946 which means that we may end up with a variant type
12947 referring non-variant pointer. We may change it to
12948 produce types as variants, too, like
12949 objc_get_protocol_qualified_type does. */
12950 && !POINTER_TYPE_P (TREE_TYPE (f1
)))
12951 || DECL_FIELD_OFFSET (f1
) != DECL_FIELD_OFFSET (f2
)
12952 || DECL_FIELD_BIT_OFFSET (f1
) != DECL_FIELD_BIT_OFFSET (f2
))
12956 error ("type variant has different TYPE_FIELDS");
12958 error ("first mismatch is field");
12960 error ("and field");
12965 else if ((TREE_CODE (t
) == FUNCTION_TYPE
|| TREE_CODE (t
) == METHOD_TYPE
))
12966 verify_variant_match (TYPE_ARG_TYPES
);
12967 /* For C++ the qualified variant of array type is really an array type
12968 of qualified TREE_TYPE.
12969 objc builds variants of pointer where pointer to type is a variant, too
12970 in objc_get_protocol_qualified_type. */
12971 if (TREE_TYPE (t
) != TREE_TYPE (tv
)
12972 && ((TREE_CODE (t
) != ARRAY_TYPE
12973 && !POINTER_TYPE_P (t
))
12974 || TYPE_MAIN_VARIANT (TREE_TYPE (t
))
12975 != TYPE_MAIN_VARIANT (TREE_TYPE (tv
))))
12977 error ("type variant has different TREE_TYPE");
12979 error ("type variant's TREE_TYPE");
12980 debug_tree (TREE_TYPE (tv
));
12981 error ("type's TREE_TYPE");
12982 debug_tree (TREE_TYPE (t
));
12985 if (type_with_alias_set_p (t
)
12986 && !gimple_canonical_types_compatible_p (t
, tv
, false))
12988 error ("type is not compatible with its variant");
12990 error ("type variant's TREE_TYPE");
12991 debug_tree (TREE_TYPE (tv
));
12992 error ("type's TREE_TYPE");
12993 debug_tree (TREE_TYPE (t
));
12997 #undef verify_variant_match
13001 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13002 the middle-end types_compatible_p function. It needs to avoid
13003 claiming types are different for types that should be treated
13004 the same with respect to TBAA. Canonical types are also used
13005 for IL consistency checks via the useless_type_conversion_p
13006 predicate which does not handle all type kinds itself but falls
13007 back to pointer-comparison of TYPE_CANONICAL for aggregates
13010 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13011 type calculation because we need to allow inter-operability between signed
13012 and unsigned variants. */
13015 type_with_interoperable_signedness (const_tree type
)
13017 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13018 signed char and unsigned char. Similarly fortran FE builds
13019 C_SIZE_T as signed type, while C defines it unsigned. */
13021 return tree_code_for_canonical_type_merging (TREE_CODE (type
))
13023 && (TYPE_PRECISION (type
) == TYPE_PRECISION (signed_char_type_node
)
13024 || TYPE_PRECISION (type
) == TYPE_PRECISION (size_type_node
));
13027 /* Return true iff T1 and T2 are structurally identical for what
13029 This function is used both by lto.c canonical type merging and by the
13030 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13031 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13032 only for LTO because only in these cases TYPE_CANONICAL equivalence
13033 correspond to one defined by gimple_canonical_types_compatible_p. */
13036 gimple_canonical_types_compatible_p (const_tree t1
, const_tree t2
,
13037 bool trust_type_canonical
)
13039 /* Type variants should be same as the main variant. When not doing sanity
13040 checking to verify this fact, go to main variants and save some work. */
13041 if (trust_type_canonical
)
13043 t1
= TYPE_MAIN_VARIANT (t1
);
13044 t2
= TYPE_MAIN_VARIANT (t2
);
13047 /* Check first for the obvious case of pointer identity. */
13051 /* Check that we have two types to compare. */
13052 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
13055 /* We consider complete types always compatible with incomplete type.
13056 This does not make sense for canonical type calculation and thus we
13057 need to ensure that we are never called on it.
13059 FIXME: For more correctness the function probably should have three modes
13060 1) mode assuming that types are complete mathcing their structure
13061 2) mode allowing incomplete types but producing equivalence classes
13062 and thus ignoring all info from complete types
13063 3) mode allowing incomplete types to match complete but checking
13064 compatibility between complete types.
13066 1 and 2 can be used for canonical type calculation. 3 is the real
13067 definition of type compatibility that can be used i.e. for warnings during
13068 declaration merging. */
13070 gcc_assert (!trust_type_canonical
13071 || (type_with_alias_set_p (t1
) && type_with_alias_set_p (t2
)));
13072 /* If the types have been previously registered and found equal
13075 if (TYPE_CANONICAL (t1
) && TYPE_CANONICAL (t2
)
13076 && trust_type_canonical
)
13078 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13079 they are always NULL, but they are set to non-NULL for types
13080 constructed by build_pointer_type and variants. In this case the
13081 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13082 all pointers are considered equal. Be sure to not return false
13084 gcc_checking_assert (canonical_type_used_p (t1
)
13085 && canonical_type_used_p (t2
));
13086 return TYPE_CANONICAL (t1
) == TYPE_CANONICAL (t2
);
13089 /* Can't be the same type if the types don't have the same code. */
13090 enum tree_code code
= tree_code_for_canonical_type_merging (TREE_CODE (t1
));
13091 if (code
!= tree_code_for_canonical_type_merging (TREE_CODE (t2
)))
13094 /* Qualifiers do not matter for canonical type comparison purposes. */
13096 /* Void types and nullptr types are always the same. */
13097 if (TREE_CODE (t1
) == VOID_TYPE
13098 || TREE_CODE (t1
) == NULLPTR_TYPE
)
13101 /* Can't be the same type if they have different mode. */
13102 if (TYPE_MODE (t1
) != TYPE_MODE (t2
))
13105 /* Non-aggregate types can be handled cheaply. */
13106 if (INTEGRAL_TYPE_P (t1
)
13107 || SCALAR_FLOAT_TYPE_P (t1
)
13108 || FIXED_POINT_TYPE_P (t1
)
13109 || TREE_CODE (t1
) == VECTOR_TYPE
13110 || TREE_CODE (t1
) == COMPLEX_TYPE
13111 || TREE_CODE (t1
) == OFFSET_TYPE
13112 || POINTER_TYPE_P (t1
))
13114 /* Can't be the same type if they have different recision. */
13115 if (TYPE_PRECISION (t1
) != TYPE_PRECISION (t2
))
13118 /* In some cases the signed and unsigned types are required to be
13120 if (TYPE_UNSIGNED (t1
) != TYPE_UNSIGNED (t2
)
13121 && !type_with_interoperable_signedness (t1
))
13124 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13125 interoperable with "signed char". Unless all frontends are revisited
13126 to agree on these types, we must ignore the flag completely. */
13128 /* Fortran standard define C_PTR type that is compatible with every
13129 C pointer. For this reason we need to glob all pointers into one.
13130 Still pointers in different address spaces are not compatible. */
13131 if (POINTER_TYPE_P (t1
))
13133 if (TYPE_ADDR_SPACE (TREE_TYPE (t1
))
13134 != TYPE_ADDR_SPACE (TREE_TYPE (t2
)))
13138 /* Tail-recurse to components. */
13139 if (TREE_CODE (t1
) == VECTOR_TYPE
13140 || TREE_CODE (t1
) == COMPLEX_TYPE
)
13141 return gimple_canonical_types_compatible_p (TREE_TYPE (t1
),
13143 trust_type_canonical
);
13148 /* Do type-specific comparisons. */
13149 switch (TREE_CODE (t1
))
13152 /* Array types are the same if the element types are the same and
13153 the number of elements are the same. */
13154 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
),
13155 trust_type_canonical
)
13156 || TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
)
13157 || TYPE_REVERSE_STORAGE_ORDER (t1
) != TYPE_REVERSE_STORAGE_ORDER (t2
)
13158 || TYPE_NONALIASED_COMPONENT (t1
) != TYPE_NONALIASED_COMPONENT (t2
))
13162 tree i1
= TYPE_DOMAIN (t1
);
13163 tree i2
= TYPE_DOMAIN (t2
);
13165 /* For an incomplete external array, the type domain can be
13166 NULL_TREE. Check this condition also. */
13167 if (i1
== NULL_TREE
&& i2
== NULL_TREE
)
13169 else if (i1
== NULL_TREE
|| i2
== NULL_TREE
)
13173 tree min1
= TYPE_MIN_VALUE (i1
);
13174 tree min2
= TYPE_MIN_VALUE (i2
);
13175 tree max1
= TYPE_MAX_VALUE (i1
);
13176 tree max2
= TYPE_MAX_VALUE (i2
);
13178 /* The minimum/maximum values have to be the same. */
13181 && ((TREE_CODE (min1
) == PLACEHOLDER_EXPR
13182 && TREE_CODE (min2
) == PLACEHOLDER_EXPR
)
13183 || operand_equal_p (min1
, min2
, 0))))
13186 && ((TREE_CODE (max1
) == PLACEHOLDER_EXPR
13187 && TREE_CODE (max2
) == PLACEHOLDER_EXPR
)
13188 || operand_equal_p (max1
, max2
, 0)))))
13196 case FUNCTION_TYPE
:
13197 /* Function types are the same if the return type and arguments types
13199 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
),
13200 trust_type_canonical
))
13203 if (TYPE_ARG_TYPES (t1
) == TYPE_ARG_TYPES (t2
))
13207 tree parms1
, parms2
;
13209 for (parms1
= TYPE_ARG_TYPES (t1
), parms2
= TYPE_ARG_TYPES (t2
);
13211 parms1
= TREE_CHAIN (parms1
), parms2
= TREE_CHAIN (parms2
))
13213 if (!gimple_canonical_types_compatible_p
13214 (TREE_VALUE (parms1
), TREE_VALUE (parms2
),
13215 trust_type_canonical
))
13219 if (parms1
|| parms2
)
13227 case QUAL_UNION_TYPE
:
13231 /* Don't try to compare variants of an incomplete type, before
13232 TYPE_FIELDS has been copied around. */
13233 if (!COMPLETE_TYPE_P (t1
) && !COMPLETE_TYPE_P (t2
))
13237 if (TYPE_REVERSE_STORAGE_ORDER (t1
) != TYPE_REVERSE_STORAGE_ORDER (t2
))
13240 /* For aggregate types, all the fields must be the same. */
13241 for (f1
= TYPE_FIELDS (t1
), f2
= TYPE_FIELDS (t2
);
13243 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
13245 /* Skip non-fields and zero-sized fields. */
13246 while (f1
&& (TREE_CODE (f1
) != FIELD_DECL
13248 && integer_zerop (DECL_SIZE (f1
)))))
13249 f1
= TREE_CHAIN (f1
);
13250 while (f2
&& (TREE_CODE (f2
) != FIELD_DECL
13252 && integer_zerop (DECL_SIZE (f2
)))))
13253 f2
= TREE_CHAIN (f2
);
13256 /* The fields must have the same name, offset and type. */
13257 if (DECL_NONADDRESSABLE_P (f1
) != DECL_NONADDRESSABLE_P (f2
)
13258 || !gimple_compare_field_offset (f1
, f2
)
13259 || !gimple_canonical_types_compatible_p
13260 (TREE_TYPE (f1
), TREE_TYPE (f2
),
13261 trust_type_canonical
))
13265 /* If one aggregate has more fields than the other, they
13266 are not the same. */
13274 /* Consider all types with language specific trees in them mutually
13275 compatible. This is executed only from verify_type and false
13276 positives can be tolerated. */
13277 gcc_assert (!in_lto_p
);
13282 /* Verify type T. */
13285 verify_type (const_tree t
)
13287 bool error_found
= false;
13288 tree mv
= TYPE_MAIN_VARIANT (t
);
13291 error ("Main variant is not defined");
13292 error_found
= true;
13294 else if (mv
!= TYPE_MAIN_VARIANT (mv
))
13296 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
13298 error_found
= true;
13300 else if (t
!= mv
&& !verify_type_variant (t
, mv
))
13301 error_found
= true;
13303 tree ct
= TYPE_CANONICAL (t
);
13306 else if (TYPE_CANONICAL (t
) != ct
)
13308 error ("TYPE_CANONICAL has different TYPE_CANONICAL");
13310 error_found
= true;
13312 /* Method and function types can not be used to address memory and thus
13313 TYPE_CANONICAL really matters only for determining useless conversions.
13315 FIXME: C++ FE produce declarations of builtin functions that are not
13316 compatible with main variants. */
13317 else if (TREE_CODE (t
) == FUNCTION_TYPE
)
13320 /* FIXME: gimple_canonical_types_compatible_p can not compare types
13321 with variably sized arrays because their sizes possibly
13322 gimplified to different variables. */
13323 && !variably_modified_type_p (ct
, NULL
)
13324 && !gimple_canonical_types_compatible_p (t
, ct
, false))
13326 error ("TYPE_CANONICAL is not compatible");
13328 error_found
= true;
13331 if (COMPLETE_TYPE_P (t
) && TYPE_CANONICAL (t
)
13332 && TYPE_MODE (t
) != TYPE_MODE (TYPE_CANONICAL (t
)))
13334 error ("TYPE_MODE of TYPE_CANONICAL is not compatible");
13336 error_found
= true;
13338 if (TYPE_MAIN_VARIANT (t
) == t
&& ct
&& TYPE_MAIN_VARIANT (ct
) != ct
)
13340 error ("TYPE_CANONICAL of main variant is not main variant");
13342 debug_tree (TYPE_MAIN_VARIANT (ct
));
13343 error_found
= true;
13347 /* Check various uses of TYPE_MIN_VALUE_RAW. */
13348 if (RECORD_OR_UNION_TYPE_P (t
))
13350 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13351 and danagle the pointer from time to time. */
13352 if (TYPE_VFIELD (t
)
13353 && TREE_CODE (TYPE_VFIELD (t
)) != FIELD_DECL
13354 && TREE_CODE (TYPE_VFIELD (t
)) != TREE_LIST
)
13356 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
13357 debug_tree (TYPE_VFIELD (t
));
13358 error_found
= true;
13361 else if (TREE_CODE (t
) == POINTER_TYPE
)
13363 if (TYPE_NEXT_PTR_TO (t
)
13364 && TREE_CODE (TYPE_NEXT_PTR_TO (t
)) != POINTER_TYPE
)
13366 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
13367 debug_tree (TYPE_NEXT_PTR_TO (t
));
13368 error_found
= true;
13371 else if (TREE_CODE (t
) == REFERENCE_TYPE
)
13373 if (TYPE_NEXT_REF_TO (t
)
13374 && TREE_CODE (TYPE_NEXT_REF_TO (t
)) != REFERENCE_TYPE
)
13376 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
13377 debug_tree (TYPE_NEXT_REF_TO (t
));
13378 error_found
= true;
13381 else if (INTEGRAL_TYPE_P (t
) || TREE_CODE (t
) == REAL_TYPE
13382 || TREE_CODE (t
) == FIXED_POINT_TYPE
)
13384 /* FIXME: The following check should pass:
13385 useless_type_conversion_p (const_cast <tree> (t),
13386 TREE_TYPE (TYPE_MIN_VALUE (t))
13387 but does not for C sizetypes in LTO. */
13390 /* Check various uses of TYPE_MAXVAL_RAW. */
13391 if (RECORD_OR_UNION_TYPE_P (t
))
13393 if (!TYPE_BINFO (t
))
13395 else if (TREE_CODE (TYPE_BINFO (t
)) != TREE_BINFO
)
13397 error ("TYPE_BINFO is not TREE_BINFO");
13398 debug_tree (TYPE_BINFO (t
));
13399 error_found
= true;
13401 else if (TREE_TYPE (TYPE_BINFO (t
)) != TYPE_MAIN_VARIANT (t
))
13403 error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT");
13404 debug_tree (TREE_TYPE (TYPE_BINFO (t
)));
13405 error_found
= true;
13408 else if (TREE_CODE (t
) == FUNCTION_TYPE
|| TREE_CODE (t
) == METHOD_TYPE
)
13410 if (TYPE_METHOD_BASETYPE (t
)
13411 && TREE_CODE (TYPE_METHOD_BASETYPE (t
)) != RECORD_TYPE
13412 && TREE_CODE (TYPE_METHOD_BASETYPE (t
)) != UNION_TYPE
)
13414 error ("TYPE_METHOD_BASETYPE is not record nor union");
13415 debug_tree (TYPE_METHOD_BASETYPE (t
));
13416 error_found
= true;
13419 else if (TREE_CODE (t
) == OFFSET_TYPE
)
13421 if (TYPE_OFFSET_BASETYPE (t
)
13422 && TREE_CODE (TYPE_OFFSET_BASETYPE (t
)) != RECORD_TYPE
13423 && TREE_CODE (TYPE_OFFSET_BASETYPE (t
)) != UNION_TYPE
)
13425 error ("TYPE_OFFSET_BASETYPE is not record nor union");
13426 debug_tree (TYPE_OFFSET_BASETYPE (t
));
13427 error_found
= true;
13430 else if (INTEGRAL_TYPE_P (t
) || TREE_CODE (t
) == REAL_TYPE
13431 || TREE_CODE (t
) == FIXED_POINT_TYPE
)
13433 /* FIXME: The following check should pass:
13434 useless_type_conversion_p (const_cast <tree> (t),
13435 TREE_TYPE (TYPE_MAX_VALUE (t))
13436 but does not for C sizetypes in LTO. */
13438 else if (TREE_CODE (t
) == ARRAY_TYPE
)
13440 if (TYPE_ARRAY_MAX_SIZE (t
)
13441 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t
)) != INTEGER_CST
)
13443 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
13444 debug_tree (TYPE_ARRAY_MAX_SIZE (t
));
13445 error_found
= true;
13448 else if (TYPE_MAX_VALUE_RAW (t
))
13450 error ("TYPE_MAX_VALUE_RAW non-NULL");
13451 debug_tree (TYPE_MAX_VALUE_RAW (t
));
13452 error_found
= true;
13455 if (TYPE_LANG_SLOT_1 (t
) && in_lto_p
)
13457 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
13458 debug_tree (TYPE_LANG_SLOT_1 (t
));
13459 error_found
= true;
13462 /* Check various uses of TYPE_VALUES_RAW. */
13463 if (TREE_CODE (t
) == ENUMERAL_TYPE
)
13464 for (tree l
= TYPE_VALUES (t
); l
; l
= TREE_CHAIN (l
))
13466 tree value
= TREE_VALUE (l
);
13467 tree name
= TREE_PURPOSE (l
);
13469 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13470 CONST_DECL of ENUMERAL TYPE. */
13471 if (TREE_CODE (value
) != INTEGER_CST
&& TREE_CODE (value
) != CONST_DECL
)
13473 error ("Enum value is not CONST_DECL or INTEGER_CST");
13474 debug_tree (value
);
13476 error_found
= true;
13478 if (TREE_CODE (TREE_TYPE (value
)) != INTEGER_TYPE
13479 && !useless_type_conversion_p (const_cast <tree
> (t
), TREE_TYPE (value
)))
13481 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
13482 debug_tree (value
);
13484 error_found
= true;
13486 if (TREE_CODE (name
) != IDENTIFIER_NODE
)
13488 error ("Enum value name is not IDENTIFIER_NODE");
13489 debug_tree (value
);
13491 error_found
= true;
13494 else if (TREE_CODE (t
) == ARRAY_TYPE
)
13496 if (TYPE_DOMAIN (t
) && TREE_CODE (TYPE_DOMAIN (t
)) != INTEGER_TYPE
)
13498 error ("Array TYPE_DOMAIN is not integer type");
13499 debug_tree (TYPE_DOMAIN (t
));
13500 error_found
= true;
13503 else if (RECORD_OR_UNION_TYPE_P (t
))
13505 if (TYPE_FIELDS (t
) && !COMPLETE_TYPE_P (t
) && in_lto_p
)
13507 error ("TYPE_FIELDS defined in incomplete type");
13508 error_found
= true;
13510 for (tree fld
= TYPE_FIELDS (t
); fld
; fld
= TREE_CHAIN (fld
))
13512 /* TODO: verify properties of decls. */
13513 if (TREE_CODE (fld
) == FIELD_DECL
)
13515 else if (TREE_CODE (fld
) == TYPE_DECL
)
13517 else if (TREE_CODE (fld
) == CONST_DECL
)
13519 else if (VAR_P (fld
))
13521 else if (TREE_CODE (fld
) == TEMPLATE_DECL
)
13523 else if (TREE_CODE (fld
) == USING_DECL
)
13525 else if (TREE_CODE (fld
) == FUNCTION_DECL
)
13529 error ("Wrong tree in TYPE_FIELDS list");
13531 error_found
= true;
13535 else if (TREE_CODE (t
) == INTEGER_TYPE
13536 || TREE_CODE (t
) == BOOLEAN_TYPE
13537 || TREE_CODE (t
) == OFFSET_TYPE
13538 || TREE_CODE (t
) == REFERENCE_TYPE
13539 || TREE_CODE (t
) == NULLPTR_TYPE
13540 || TREE_CODE (t
) == POINTER_TYPE
)
13542 if (TYPE_CACHED_VALUES_P (t
) != (TYPE_CACHED_VALUES (t
) != NULL
))
13544 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
13545 TYPE_CACHED_VALUES_P (t
), (void *)TYPE_CACHED_VALUES (t
));
13546 error_found
= true;
13548 else if (TYPE_CACHED_VALUES_P (t
) && TREE_CODE (TYPE_CACHED_VALUES (t
)) != TREE_VEC
)
13550 error ("TYPE_CACHED_VALUES is not TREE_VEC");
13551 debug_tree (TYPE_CACHED_VALUES (t
));
13552 error_found
= true;
13554 /* Verify just enough of cache to ensure that no one copied it to new type.
13555 All copying should go by copy_node that should clear it. */
13556 else if (TYPE_CACHED_VALUES_P (t
))
13559 for (i
= 0; i
< TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t
)); i
++)
13560 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t
), i
)
13561 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t
), i
)) != t
)
13563 error ("wrong TYPE_CACHED_VALUES entry");
13564 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t
), i
));
13565 error_found
= true;
13570 else if (TREE_CODE (t
) == FUNCTION_TYPE
|| TREE_CODE (t
) == METHOD_TYPE
)
13571 for (tree l
= TYPE_ARG_TYPES (t
); l
; l
= TREE_CHAIN (l
))
13573 /* C++ FE uses TREE_PURPOSE to store initial values. */
13574 if (TREE_PURPOSE (l
) && in_lto_p
)
13576 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
13578 error_found
= true;
13580 if (!TYPE_P (TREE_VALUE (l
)))
13582 error ("Wrong entry in TYPE_ARG_TYPES list");
13584 error_found
= true;
13587 else if (!is_lang_specific (t
) && TYPE_VALUES_RAW (t
))
13589 error ("TYPE_VALUES_RAW field is non-NULL");
13590 debug_tree (TYPE_VALUES_RAW (t
));
13591 error_found
= true;
13593 if (TREE_CODE (t
) != INTEGER_TYPE
13594 && TREE_CODE (t
) != BOOLEAN_TYPE
13595 && TREE_CODE (t
) != OFFSET_TYPE
13596 && TREE_CODE (t
) != REFERENCE_TYPE
13597 && TREE_CODE (t
) != NULLPTR_TYPE
13598 && TREE_CODE (t
) != POINTER_TYPE
13599 && TYPE_CACHED_VALUES_P (t
))
13601 error ("TYPE_CACHED_VALUES_P is set while it should not");
13602 error_found
= true;
13604 if (TYPE_STRING_FLAG (t
)
13605 && TREE_CODE (t
) != ARRAY_TYPE
&& TREE_CODE (t
) != INTEGER_TYPE
)
13607 error ("TYPE_STRING_FLAG is set on wrong type code");
13608 error_found
= true;
13611 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13612 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13614 if (TREE_CODE (t
) == METHOD_TYPE
13615 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t
)) != TYPE_METHOD_BASETYPE (t
))
13617 error ("TYPE_METHOD_BASETYPE is not main variant");
13618 error_found
= true;
13623 debug_tree (const_cast <tree
> (t
));
13624 internal_error ("verify_type failed");
13629 /* Return 1 if ARG interpreted as signed in its precision is known to be
13630 always positive or 2 if ARG is known to be always negative, or 3 if
13631 ARG may be positive or negative. */
13634 get_range_pos_neg (tree arg
)
13636 if (arg
== error_mark_node
)
13639 int prec
= TYPE_PRECISION (TREE_TYPE (arg
));
13641 if (TREE_CODE (arg
) == INTEGER_CST
)
13643 wide_int w
= wi::sext (wi::to_wide (arg
), prec
);
13649 while (CONVERT_EXPR_P (arg
)
13650 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg
, 0)))
13651 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg
, 0))) <= prec
)
13653 arg
= TREE_OPERAND (arg
, 0);
13654 /* Narrower value zero extended into wider type
13655 will always result in positive values. */
13656 if (TYPE_UNSIGNED (TREE_TYPE (arg
))
13657 && TYPE_PRECISION (TREE_TYPE (arg
)) < prec
)
13659 prec
= TYPE_PRECISION (TREE_TYPE (arg
));
13664 if (TREE_CODE (arg
) != SSA_NAME
)
13666 wide_int arg_min
, arg_max
;
13667 while (get_range_info (arg
, &arg_min
, &arg_max
) != VR_RANGE
)
13669 gimple
*g
= SSA_NAME_DEF_STMT (arg
);
13670 if (is_gimple_assign (g
)
13671 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g
)))
13673 tree t
= gimple_assign_rhs1 (g
);
13674 if (INTEGRAL_TYPE_P (TREE_TYPE (t
))
13675 && TYPE_PRECISION (TREE_TYPE (t
)) <= prec
)
13677 if (TYPE_UNSIGNED (TREE_TYPE (t
))
13678 && TYPE_PRECISION (TREE_TYPE (t
)) < prec
)
13680 prec
= TYPE_PRECISION (TREE_TYPE (t
));
13689 if (TYPE_UNSIGNED (TREE_TYPE (arg
)))
13691 /* For unsigned values, the "positive" range comes
13692 below the "negative" range. */
13693 if (!wi::neg_p (wi::sext (arg_max
, prec
), SIGNED
))
13695 if (wi::neg_p (wi::sext (arg_min
, prec
), SIGNED
))
13700 if (!wi::neg_p (wi::sext (arg_min
, prec
), SIGNED
))
13702 if (wi::neg_p (wi::sext (arg_max
, prec
), SIGNED
))
13711 /* Return true if ARG is marked with the nonnull attribute in the
13712 current function signature. */
13715 nonnull_arg_p (const_tree arg
)
13717 tree t
, attrs
, fntype
;
13718 unsigned HOST_WIDE_INT arg_num
;
13720 gcc_assert (TREE_CODE (arg
) == PARM_DECL
13721 && (POINTER_TYPE_P (TREE_TYPE (arg
))
13722 || TREE_CODE (TREE_TYPE (arg
)) == OFFSET_TYPE
));
13724 /* The static chain decl is always non null. */
13725 if (arg
== cfun
->static_chain_decl
)
13728 /* THIS argument of method is always non-NULL. */
13729 if (TREE_CODE (TREE_TYPE (cfun
->decl
)) == METHOD_TYPE
13730 && arg
== DECL_ARGUMENTS (cfun
->decl
)
13731 && flag_delete_null_pointer_checks
)
13734 /* Values passed by reference are always non-NULL. */
13735 if (TREE_CODE (TREE_TYPE (arg
)) == REFERENCE_TYPE
13736 && flag_delete_null_pointer_checks
)
13739 fntype
= TREE_TYPE (cfun
->decl
);
13740 for (attrs
= TYPE_ATTRIBUTES (fntype
); attrs
; attrs
= TREE_CHAIN (attrs
))
13742 attrs
= lookup_attribute ("nonnull", attrs
);
13744 /* If "nonnull" wasn't specified, we know nothing about the argument. */
13745 if (attrs
== NULL_TREE
)
13748 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
13749 if (TREE_VALUE (attrs
) == NULL_TREE
)
13752 /* Get the position number for ARG in the function signature. */
13753 for (arg_num
= 1, t
= DECL_ARGUMENTS (cfun
->decl
);
13755 t
= DECL_CHAIN (t
), arg_num
++)
13761 gcc_assert (t
== arg
);
13763 /* Now see if ARG_NUM is mentioned in the nonnull list. */
13764 for (t
= TREE_VALUE (attrs
); t
; t
= TREE_CHAIN (t
))
13766 if (compare_tree_int (TREE_VALUE (t
), arg_num
) == 0)
13774 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
13778 set_block (location_t loc
, tree block
)
13780 location_t pure_loc
= get_pure_location (loc
);
13781 source_range src_range
= get_range_from_loc (line_table
, loc
);
13782 return COMBINE_LOCATION_DATA (line_table
, pure_loc
, src_range
, block
);
13786 set_source_range (tree expr
, location_t start
, location_t finish
)
13788 source_range src_range
;
13789 src_range
.m_start
= start
;
13790 src_range
.m_finish
= finish
;
13791 return set_source_range (expr
, src_range
);
13795 set_source_range (tree expr
, source_range src_range
)
13797 if (!EXPR_P (expr
))
13798 return UNKNOWN_LOCATION
;
13800 location_t pure_loc
= get_pure_location (EXPR_LOCATION (expr
));
13801 location_t adhoc
= COMBINE_LOCATION_DATA (line_table
,
13805 SET_EXPR_LOCATION (expr
, adhoc
);
13809 /* Return the name of combined function FN, for debugging purposes. */
13812 combined_fn_name (combined_fn fn
)
13814 if (builtin_fn_p (fn
))
13816 tree fndecl
= builtin_decl_explicit (as_builtin_fn (fn
));
13817 return IDENTIFIER_POINTER (DECL_NAME (fndecl
));
13820 return internal_fn_name (as_internal_fn (fn
));
13823 /* Return a bitmap with a bit set corresponding to each argument in
13824 a function call type FNTYPE declared with attribute nonnull,
13825 or null if none of the function's argument are nonnull. The caller
13826 must free the bitmap. */
13829 get_nonnull_args (const_tree fntype
)
13831 if (fntype
== NULL_TREE
)
13834 tree attrs
= TYPE_ATTRIBUTES (fntype
);
13838 bitmap argmap
= NULL
;
13840 /* A function declaration can specify multiple attribute nonnull,
13841 each with zero or more arguments. The loop below creates a bitmap
13842 representing a union of all the arguments. An empty (but non-null)
13843 bitmap means that all arguments have been declaraed nonnull. */
13844 for ( ; attrs
; attrs
= TREE_CHAIN (attrs
))
13846 attrs
= lookup_attribute ("nonnull", attrs
);
13851 argmap
= BITMAP_ALLOC (NULL
);
13853 if (!TREE_VALUE (attrs
))
13855 /* Clear the bitmap in case a previous attribute nonnull
13856 set it and this one overrides it for all arguments. */
13857 bitmap_clear (argmap
);
13861 /* Iterate over the indices of the format arguments declared nonnull
13862 and set a bit for each. */
13863 for (tree idx
= TREE_VALUE (attrs
); idx
; idx
= TREE_CHAIN (idx
))
13865 unsigned int val
= TREE_INT_CST_LOW (TREE_VALUE (idx
)) - 1;
13866 bitmap_set_bit (argmap
, val
);
13873 /* Returns true if TYPE is a type where it and all of its subobjects
13874 (recursively) are of structure, union, or array type. */
13877 default_is_empty_type (tree type
)
13879 if (RECORD_OR_UNION_TYPE_P (type
))
13881 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
13882 if (TREE_CODE (field
) == FIELD_DECL
13883 && !DECL_PADDING_P (field
)
13884 && !default_is_empty_type (TREE_TYPE (field
)))
13888 else if (TREE_CODE (type
) == ARRAY_TYPE
)
13889 return (integer_minus_onep (array_type_nelts (type
))
13890 || TYPE_DOMAIN (type
) == NULL_TREE
13891 || default_is_empty_type (TREE_TYPE (type
)));
13895 /* Implement TARGET_EMPTY_RECORD_P. Return true if TYPE is an empty type
13896 that shouldn't be passed via stack. */
13899 default_is_empty_record (const_tree type
)
13901 if (!abi_version_at_least (12))
13904 if (type
== error_mark_node
)
13907 if (TREE_ADDRESSABLE (type
))
13910 return default_is_empty_type (TYPE_MAIN_VARIANT (type
));
13913 /* Like int_size_in_bytes, but handle empty records specially. */
13916 arg_int_size_in_bytes (const_tree type
)
13918 return TYPE_EMPTY_P (type
) ? 0 : int_size_in_bytes (type
);
13921 /* Like size_in_bytes, but handle empty records specially. */
13924 arg_size_in_bytes (const_tree type
)
13926 return TYPE_EMPTY_P (type
) ? size_zero_node
: size_in_bytes (type
);
13929 /* Return true if an expression with CODE has to have the same result type as
13930 its first operand. */
13933 expr_type_first_operand_type_p (tree_code code
)
13946 case TRUNC_DIV_EXPR
:
13947 case CEIL_DIV_EXPR
:
13948 case FLOOR_DIV_EXPR
:
13949 case ROUND_DIV_EXPR
:
13950 case TRUNC_MOD_EXPR
:
13951 case CEIL_MOD_EXPR
:
13952 case FLOOR_MOD_EXPR
:
13953 case ROUND_MOD_EXPR
:
13955 case EXACT_DIV_EXPR
:
13973 /* List of pointer types used to declare builtins before we have seen their
13976 Keep the size up to date in tree.h ! */
13977 const builtin_structptr_type builtin_structptr_types
[6] =
13979 { fileptr_type_node
, ptr_type_node
, "FILE" },
13980 { const_tm_ptr_type_node
, const_ptr_type_node
, "tm" },
13981 { fenv_t_ptr_type_node
, ptr_type_node
, "fenv_t" },
13982 { const_fenv_t_ptr_type_node
, const_ptr_type_node
, "fenv_t" },
13983 { fexcept_t_ptr_type_node
, ptr_type_node
, "fexcept_t" },
13984 { const_fexcept_t_ptr_type_node
, const_ptr_type_node
, "fexcept_t" }
13989 namespace selftest
{
13991 /* Selftests for tree. */
13993 /* Verify that integer constants are sane. */
13996 test_integer_constants ()
13998 ASSERT_TRUE (integer_type_node
!= NULL
);
13999 ASSERT_TRUE (build_int_cst (integer_type_node
, 0) != NULL
);
14001 tree type
= integer_type_node
;
14003 tree zero
= build_zero_cst (type
);
14004 ASSERT_EQ (INTEGER_CST
, TREE_CODE (zero
));
14005 ASSERT_EQ (type
, TREE_TYPE (zero
));
14007 tree one
= build_int_cst (type
, 1);
14008 ASSERT_EQ (INTEGER_CST
, TREE_CODE (one
));
14009 ASSERT_EQ (type
, TREE_TYPE (zero
));
14012 /* Verify identifiers. */
14015 test_identifiers ()
14017 tree identifier
= get_identifier ("foo");
14018 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier
));
14019 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier
));
14022 /* Verify LABEL_DECL. */
14027 tree identifier
= get_identifier ("err");
14028 tree label_decl
= build_decl (UNKNOWN_LOCATION
, LABEL_DECL
,
14029 identifier
, void_type_node
);
14030 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl
));
14031 ASSERT_FALSE (FORCED_LABEL (label_decl
));
14034 /* Return a new VECTOR_CST node whose type is TYPE and whose values
14035 are given by VALS. */
14038 build_vector (tree type
, vec
<tree
> vals MEM_STAT_DECL
)
14040 gcc_assert (vals
.length () == TYPE_VECTOR_SUBPARTS (type
));
14041 tree_vector_builder
builder (type
, vals
.length (), 1);
14042 builder
.splice (vals
);
14043 return builder
.build ();
14046 /* Check that VECTOR_CST ACTUAL contains the elements in EXPECTED. */
14049 check_vector_cst (vec
<tree
> expected
, tree actual
)
14051 ASSERT_EQ (expected
.length (), TYPE_VECTOR_SUBPARTS (TREE_TYPE (actual
)));
14052 for (unsigned int i
= 0; i
< expected
.length (); ++i
)
14053 ASSERT_EQ (wi::to_wide (expected
[i
]),
14054 wi::to_wide (vector_cst_elt (actual
, i
)));
14057 /* Check that VECTOR_CST ACTUAL contains NPATTERNS duplicated elements,
14058 and that its elements match EXPECTED. */
14061 check_vector_cst_duplicate (vec
<tree
> expected
, tree actual
,
14062 unsigned int npatterns
)
14064 ASSERT_EQ (npatterns
, VECTOR_CST_NPATTERNS (actual
));
14065 ASSERT_EQ (1, VECTOR_CST_NELTS_PER_PATTERN (actual
));
14066 ASSERT_EQ (npatterns
, vector_cst_encoded_nelts (actual
));
14067 ASSERT_TRUE (VECTOR_CST_DUPLICATE_P (actual
));
14068 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual
));
14069 check_vector_cst (expected
, actual
);
14072 /* Check that VECTOR_CST ACTUAL contains NPATTERNS foreground elements
14073 and NPATTERNS background elements, and that its elements match
14077 check_vector_cst_fill (vec
<tree
> expected
, tree actual
,
14078 unsigned int npatterns
)
14080 ASSERT_EQ (npatterns
, VECTOR_CST_NPATTERNS (actual
));
14081 ASSERT_EQ (2, VECTOR_CST_NELTS_PER_PATTERN (actual
));
14082 ASSERT_EQ (2 * npatterns
, vector_cst_encoded_nelts (actual
));
14083 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual
));
14084 ASSERT_FALSE (VECTOR_CST_STEPPED_P (actual
));
14085 check_vector_cst (expected
, actual
);
14088 /* Check that VECTOR_CST ACTUAL contains NPATTERNS stepped patterns,
14089 and that its elements match EXPECTED. */
14092 check_vector_cst_stepped (vec
<tree
> expected
, tree actual
,
14093 unsigned int npatterns
)
14095 ASSERT_EQ (npatterns
, VECTOR_CST_NPATTERNS (actual
));
14096 ASSERT_EQ (3, VECTOR_CST_NELTS_PER_PATTERN (actual
));
14097 ASSERT_EQ (3 * npatterns
, vector_cst_encoded_nelts (actual
));
14098 ASSERT_FALSE (VECTOR_CST_DUPLICATE_P (actual
));
14099 ASSERT_TRUE (VECTOR_CST_STEPPED_P (actual
));
14100 check_vector_cst (expected
, actual
);
14103 /* Test the creation of VECTOR_CSTs. */
14106 test_vector_cst_patterns ()
14108 auto_vec
<tree
, 8> elements (8);
14109 elements
.quick_grow (8);
14110 tree element_type
= build_nonstandard_integer_type (16, true);
14111 tree vector_type
= build_vector_type (element_type
, 8);
14113 /* Test a simple linear series with a base of 0 and a step of 1:
14114 { 0, 1, 2, 3, 4, 5, 6, 7 }. */
14115 for (unsigned int i
= 0; i
< 8; ++i
)
14116 elements
[i
] = build_int_cst (element_type
, i
);
14117 check_vector_cst_stepped (elements
, build_vector (vector_type
, elements
), 1);
14119 /* Try the same with the first element replaced by 100:
14120 { 100, 1, 2, 3, 4, 5, 6, 7 }. */
14121 elements
[0] = build_int_cst (element_type
, 100);
14122 check_vector_cst_stepped (elements
, build_vector (vector_type
, elements
), 1);
14124 /* Try a series that wraps around.
14125 { 100, 65531, 65532, 65533, 65534, 65535, 0, 1 }. */
14126 for (unsigned int i
= 1; i
< 8; ++i
)
14127 elements
[i
] = build_int_cst (element_type
, (65530 + i
) & 0xffff);
14128 check_vector_cst_stepped (elements
, build_vector (vector_type
, elements
), 1);
14130 /* Try a downward series:
14131 { 100, 79, 78, 77, 76, 75, 75, 73 }. */
14132 for (unsigned int i
= 1; i
< 8; ++i
)
14133 elements
[i
] = build_int_cst (element_type
, 80 - i
);
14134 check_vector_cst_stepped (elements
, build_vector (vector_type
, elements
), 1);
14136 /* Try two interleaved series with different bases and steps:
14137 { 100, 53, 66, 206, 62, 212, 58, 218 }. */
14138 elements
[1] = build_int_cst (element_type
, 53);
14139 for (unsigned int i
= 2; i
< 8; i
+= 2)
14141 elements
[i
] = build_int_cst (element_type
, 70 - i
* 2);
14142 elements
[i
+ 1] = build_int_cst (element_type
, 200 + i
* 3);
14144 check_vector_cst_stepped (elements
, build_vector (vector_type
, elements
), 2);
14146 /* Try a duplicated value:
14147 { 100, 100, 100, 100, 100, 100, 100, 100 }. */
14148 for (unsigned int i
= 1; i
< 8; ++i
)
14149 elements
[i
] = elements
[0];
14150 check_vector_cst_duplicate (elements
,
14151 build_vector (vector_type
, elements
), 1);
14153 /* Try an interleaved duplicated value:
14154 { 100, 55, 100, 55, 100, 55, 100, 55 }. */
14155 elements
[1] = build_int_cst (element_type
, 55);
14156 for (unsigned int i
= 2; i
< 8; ++i
)
14157 elements
[i
] = elements
[i
- 2];
14158 check_vector_cst_duplicate (elements
,
14159 build_vector (vector_type
, elements
), 2);
14161 /* Try a duplicated value with 2 exceptions
14162 { 41, 97, 100, 55, 100, 55, 100, 55 }. */
14163 elements
[0] = build_int_cst (element_type
, 41);
14164 elements
[1] = build_int_cst (element_type
, 97);
14165 check_vector_cst_fill (elements
, build_vector (vector_type
, elements
), 2);
14167 /* Try with and without a step
14168 { 41, 97, 100, 21, 100, 35, 100, 49 }. */
14169 for (unsigned int i
= 3; i
< 8; i
+= 2)
14170 elements
[i
] = build_int_cst (element_type
, i
* 7);
14171 check_vector_cst_stepped (elements
, build_vector (vector_type
, elements
), 2);
14173 /* Try a fully-general constant:
14174 { 41, 97, 100, 21, 100, 9990, 100, 49 }. */
14175 elements
[5] = build_int_cst (element_type
, 9990);
14176 check_vector_cst_fill (elements
, build_vector (vector_type
, elements
), 4);
14179 /* Run all of the selftests within this file. */
14184 test_integer_constants ();
14185 test_identifiers ();
14187 test_vector_cst_patterns ();
14190 } // namespace selftest
14192 #endif /* CHECKING_P */
14194 #include "gt-tree.h"