1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
32 #include "coretypes.h"
37 #include "tree-pass.h"
40 #include "diagnostic.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
47 #include "toplev.h" /* get_random_seed */
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
58 #include "langhooks-def.h"
59 #include "tree-diagnostic.h"
62 #include "print-tree.h"
63 #include "ipa-utils.h"
65 /* Tree code classes. */
67 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
68 #define END_OF_BASE_TREE_CODES tcc_exceptional,
70 const enum tree_code_class tree_code_type
[] = {
71 #include "all-tree.def"
75 #undef END_OF_BASE_TREE_CODES
77 /* Table indexed by tree code giving number of expression
78 operands beyond the fixed part of the node structure.
79 Not used for types or decls. */
81 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
82 #define END_OF_BASE_TREE_CODES 0,
84 const unsigned char tree_code_length
[] = {
85 #include "all-tree.def"
89 #undef END_OF_BASE_TREE_CODES
91 /* Names of tree components.
92 Used for printing out the tree and error messages. */
93 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
94 #define END_OF_BASE_TREE_CODES "@dummy",
96 static const char *const tree_code_name
[] = {
97 #include "all-tree.def"
101 #undef END_OF_BASE_TREE_CODES
103 /* Each tree code class has an associated string representation.
104 These must correspond to the tree_code_class entries. */
106 const char *const tree_code_class_strings
[] =
121 /* obstack.[ch] explicitly declined to prototype this. */
122 extern int _obstack_allocated_p (struct obstack
*h
, void *obj
);
124 /* Statistics-gathering stuff. */
126 static int tree_code_counts
[MAX_TREE_CODES
];
127 int tree_node_counts
[(int) all_kinds
];
128 int tree_node_sizes
[(int) all_kinds
];
130 /* Keep in sync with tree.h:enum tree_node_kind. */
131 static const char * const tree_node_kind_names
[] = {
150 /* Unique id for next decl created. */
151 static GTY(()) int next_decl_uid
;
152 /* Unique id for next type created. */
153 static GTY(()) int next_type_uid
= 1;
154 /* Unique id for next debug decl created. Use negative numbers,
155 to catch erroneous uses. */
156 static GTY(()) int next_debug_decl_uid
;
158 /* Since we cannot rehash a type after it is in the table, we have to
159 keep the hash code. */
161 struct GTY((for_user
)) type_hash
{
166 /* Initial size of the hash table (rounded to next prime). */
167 #define TYPE_HASH_INITIAL_SIZE 1000
169 struct type_cache_hasher
: ggc_cache_ptr_hash
<type_hash
>
171 static hashval_t
hash (type_hash
*t
) { return t
->hash
; }
172 static bool equal (type_hash
*a
, type_hash
*b
);
175 keep_cache_entry (type_hash
*&t
)
177 return ggc_marked_p (t
->type
);
181 /* Now here is the hash table. When recording a type, it is added to
182 the slot whose index is the hash code. Note that the hash table is
183 used for several kinds of types (function types, array types and
184 array index range types, for now). While all these live in the
185 same table, they are completely independent, and the hash code is
186 computed differently for each of these. */
188 static GTY ((cache
)) hash_table
<type_cache_hasher
> *type_hash_table
;
190 /* Hash table and temporary node for larger integer const values. */
191 static GTY (()) tree int_cst_node
;
193 struct int_cst_hasher
: ggc_cache_ptr_hash
<tree_node
>
195 static hashval_t
hash (tree t
);
196 static bool equal (tree x
, tree y
);
199 static GTY ((cache
)) hash_table
<int_cst_hasher
> *int_cst_hash_table
;
201 /* Hash table for optimization flags and target option flags. Use the same
202 hash table for both sets of options. Nodes for building the current
203 optimization and target option nodes. The assumption is most of the time
204 the options created will already be in the hash table, so we avoid
205 allocating and freeing up a node repeatably. */
206 static GTY (()) tree cl_optimization_node
;
207 static GTY (()) tree cl_target_option_node
;
209 struct cl_option_hasher
: ggc_cache_ptr_hash
<tree_node
>
211 static hashval_t
hash (tree t
);
212 static bool equal (tree x
, tree y
);
215 static GTY ((cache
)) hash_table
<cl_option_hasher
> *cl_option_hash_table
;
217 /* General tree->tree mapping structure for use in hash tables. */
221 hash_table
<tree_decl_map_cache_hasher
> *debug_expr_for_decl
;
224 hash_table
<tree_decl_map_cache_hasher
> *value_expr_for_decl
;
226 struct tree_vec_map_cache_hasher
: ggc_cache_ptr_hash
<tree_vec_map
>
228 static hashval_t
hash (tree_vec_map
*m
) { return DECL_UID (m
->base
.from
); }
231 equal (tree_vec_map
*a
, tree_vec_map
*b
)
233 return a
->base
.from
== b
->base
.from
;
237 keep_cache_entry (tree_vec_map
*&m
)
239 return ggc_marked_p (m
->base
.from
);
244 hash_table
<tree_vec_map_cache_hasher
> *debug_args_for_decl
;
246 static void set_type_quals (tree
, int);
247 static void print_type_hash_statistics (void);
248 static void print_debug_expr_statistics (void);
249 static void print_value_expr_statistics (void);
250 static void type_hash_list (const_tree
, inchash::hash
&);
251 static void attribute_hash_list (const_tree
, inchash::hash
&);
253 tree global_trees
[TI_MAX
];
254 tree integer_types
[itk_none
];
256 bool int_n_enabled_p
[NUM_INT_N_ENTS
];
257 struct int_n_trees_t int_n_trees
[NUM_INT_N_ENTS
];
259 unsigned char tree_contains_struct
[MAX_TREE_CODES
][64];
261 /* Number of operands for each OpenMP clause. */
262 unsigned const char omp_clause_num_ops
[] =
264 0, /* OMP_CLAUSE_ERROR */
265 1, /* OMP_CLAUSE_PRIVATE */
266 1, /* OMP_CLAUSE_SHARED */
267 1, /* OMP_CLAUSE_FIRSTPRIVATE */
268 2, /* OMP_CLAUSE_LASTPRIVATE */
269 5, /* OMP_CLAUSE_REDUCTION */
270 1, /* OMP_CLAUSE_COPYIN */
271 1, /* OMP_CLAUSE_COPYPRIVATE */
272 3, /* OMP_CLAUSE_LINEAR */
273 2, /* OMP_CLAUSE_ALIGNED */
274 1, /* OMP_CLAUSE_DEPEND */
275 1, /* OMP_CLAUSE_UNIFORM */
276 1, /* OMP_CLAUSE_TO_DECLARE */
277 1, /* OMP_CLAUSE_LINK */
278 2, /* OMP_CLAUSE_FROM */
279 2, /* OMP_CLAUSE_TO */
280 2, /* OMP_CLAUSE_MAP */
281 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
282 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
283 2, /* OMP_CLAUSE__CACHE_ */
284 1, /* OMP_CLAUSE_DEVICE_RESIDENT */
285 2, /* OMP_CLAUSE_GANG */
286 1, /* OMP_CLAUSE_ASYNC */
287 1, /* OMP_CLAUSE_WAIT */
288 0, /* OMP_CLAUSE_AUTO */
289 0, /* OMP_CLAUSE_SEQ */
290 1, /* OMP_CLAUSE__LOOPTEMP_ */
291 1, /* OMP_CLAUSE_IF */
292 1, /* OMP_CLAUSE_NUM_THREADS */
293 1, /* OMP_CLAUSE_SCHEDULE */
294 0, /* OMP_CLAUSE_NOWAIT */
295 1, /* OMP_CLAUSE_ORDERED */
296 0, /* OMP_CLAUSE_DEFAULT */
297 3, /* OMP_CLAUSE_COLLAPSE */
298 0, /* OMP_CLAUSE_UNTIED */
299 1, /* OMP_CLAUSE_FINAL */
300 0, /* OMP_CLAUSE_MERGEABLE */
301 1, /* OMP_CLAUSE_DEVICE */
302 1, /* OMP_CLAUSE_DIST_SCHEDULE */
303 0, /* OMP_CLAUSE_INBRANCH */
304 0, /* OMP_CLAUSE_NOTINBRANCH */
305 1, /* OMP_CLAUSE_NUM_TEAMS */
306 1, /* OMP_CLAUSE_THREAD_LIMIT */
307 0, /* OMP_CLAUSE_PROC_BIND */
308 1, /* OMP_CLAUSE_SAFELEN */
309 1, /* OMP_CLAUSE_SIMDLEN */
310 0, /* OMP_CLAUSE_FOR */
311 0, /* OMP_CLAUSE_PARALLEL */
312 0, /* OMP_CLAUSE_SECTIONS */
313 0, /* OMP_CLAUSE_TASKGROUP */
314 1, /* OMP_CLAUSE_PRIORITY */
315 1, /* OMP_CLAUSE_GRAINSIZE */
316 1, /* OMP_CLAUSE_NUM_TASKS */
317 0, /* OMP_CLAUSE_NOGROUP */
318 0, /* OMP_CLAUSE_THREADS */
319 0, /* OMP_CLAUSE_SIMD */
320 1, /* OMP_CLAUSE_HINT */
321 0, /* OMP_CLAUSE_DEFALTMAP */
322 1, /* OMP_CLAUSE__SIMDUID_ */
323 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
324 0, /* OMP_CLAUSE_INDEPENDENT */
325 1, /* OMP_CLAUSE_WORKER */
326 1, /* OMP_CLAUSE_VECTOR */
327 1, /* OMP_CLAUSE_NUM_GANGS */
328 1, /* OMP_CLAUSE_NUM_WORKERS */
329 1, /* OMP_CLAUSE_VECTOR_LENGTH */
330 1, /* OMP_CLAUSE_TILE */
331 2, /* OMP_CLAUSE__GRIDDIM_ */
334 const char * const omp_clause_code_name
[] =
407 /* Return the tree node structure used by tree code CODE. */
409 static inline enum tree_node_structure_enum
410 tree_node_structure_for_code (enum tree_code code
)
412 switch (TREE_CODE_CLASS (code
))
414 case tcc_declaration
:
419 return TS_FIELD_DECL
;
425 return TS_LABEL_DECL
;
427 return TS_RESULT_DECL
;
428 case DEBUG_EXPR_DECL
:
431 return TS_CONST_DECL
;
435 return TS_FUNCTION_DECL
;
436 case TRANSLATION_UNIT_DECL
:
437 return TS_TRANSLATION_UNIT_DECL
;
439 return TS_DECL_NON_COMMON
;
443 return TS_TYPE_NON_COMMON
;
452 default: /* tcc_constant and tcc_exceptional */
457 /* tcc_constant cases. */
458 case VOID_CST
: return TS_TYPED
;
459 case INTEGER_CST
: return TS_INT_CST
;
460 case REAL_CST
: return TS_REAL_CST
;
461 case FIXED_CST
: return TS_FIXED_CST
;
462 case COMPLEX_CST
: return TS_COMPLEX
;
463 case VECTOR_CST
: return TS_VECTOR
;
464 case STRING_CST
: return TS_STRING
;
465 /* tcc_exceptional cases. */
466 case ERROR_MARK
: return TS_COMMON
;
467 case IDENTIFIER_NODE
: return TS_IDENTIFIER
;
468 case TREE_LIST
: return TS_LIST
;
469 case TREE_VEC
: return TS_VEC
;
470 case SSA_NAME
: return TS_SSA_NAME
;
471 case PLACEHOLDER_EXPR
: return TS_COMMON
;
472 case STATEMENT_LIST
: return TS_STATEMENT_LIST
;
473 case BLOCK
: return TS_BLOCK
;
474 case CONSTRUCTOR
: return TS_CONSTRUCTOR
;
475 case TREE_BINFO
: return TS_BINFO
;
476 case OMP_CLAUSE
: return TS_OMP_CLAUSE
;
477 case OPTIMIZATION_NODE
: return TS_OPTIMIZATION
;
478 case TARGET_OPTION_NODE
: return TS_TARGET_OPTION
;
486 /* Initialize tree_contains_struct to describe the hierarchy of tree
490 initialize_tree_contains_struct (void)
494 for (i
= ERROR_MARK
; i
< LAST_AND_UNUSED_TREE_CODE
; i
++)
497 enum tree_node_structure_enum ts_code
;
499 code
= (enum tree_code
) i
;
500 ts_code
= tree_node_structure_for_code (code
);
502 /* Mark the TS structure itself. */
503 tree_contains_struct
[code
][ts_code
] = 1;
505 /* Mark all the structures that TS is derived from. */
523 case TS_STATEMENT_LIST
:
524 MARK_TS_TYPED (code
);
528 case TS_DECL_MINIMAL
:
534 case TS_OPTIMIZATION
:
535 case TS_TARGET_OPTION
:
536 MARK_TS_COMMON (code
);
539 case TS_TYPE_WITH_LANG_SPECIFIC
:
540 MARK_TS_TYPE_COMMON (code
);
543 case TS_TYPE_NON_COMMON
:
544 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code
);
548 MARK_TS_DECL_MINIMAL (code
);
553 MARK_TS_DECL_COMMON (code
);
556 case TS_DECL_NON_COMMON
:
557 MARK_TS_DECL_WITH_VIS (code
);
560 case TS_DECL_WITH_VIS
:
564 MARK_TS_DECL_WRTL (code
);
568 MARK_TS_DECL_COMMON (code
);
572 MARK_TS_DECL_WITH_VIS (code
);
576 case TS_FUNCTION_DECL
:
577 MARK_TS_DECL_NON_COMMON (code
);
580 case TS_TRANSLATION_UNIT_DECL
:
581 MARK_TS_DECL_COMMON (code
);
589 /* Basic consistency checks for attributes used in fold. */
590 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_NON_COMMON
]);
591 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_NON_COMMON
]);
592 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_DECL_COMMON
]);
593 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_COMMON
]);
594 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_COMMON
]);
595 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_COMMON
]);
596 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_COMMON
]);
597 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_COMMON
]);
598 gcc_assert (tree_contains_struct
[TRANSLATION_UNIT_DECL
][TS_DECL_COMMON
]);
599 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_COMMON
]);
600 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_DECL_COMMON
]);
601 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_WRTL
]);
602 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_WRTL
]);
603 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_WRTL
]);
604 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_WRTL
]);
605 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_WRTL
]);
606 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_DECL_MINIMAL
]);
607 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_MINIMAL
]);
608 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_MINIMAL
]);
609 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_MINIMAL
]);
610 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_MINIMAL
]);
611 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_MINIMAL
]);
612 gcc_assert (tree_contains_struct
[TRANSLATION_UNIT_DECL
][TS_DECL_MINIMAL
]);
613 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_MINIMAL
]);
614 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_DECL_MINIMAL
]);
615 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_WITH_VIS
]);
616 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_WITH_VIS
]);
617 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_WITH_VIS
]);
618 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_VAR_DECL
]);
619 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_FIELD_DECL
]);
620 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_PARM_DECL
]);
621 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_LABEL_DECL
]);
622 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_RESULT_DECL
]);
623 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_CONST_DECL
]);
624 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_TYPE_DECL
]);
625 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_FUNCTION_DECL
]);
626 gcc_assert (tree_contains_struct
[IMPORTED_DECL
][TS_DECL_MINIMAL
]);
627 gcc_assert (tree_contains_struct
[IMPORTED_DECL
][TS_DECL_COMMON
]);
628 gcc_assert (tree_contains_struct
[NAMELIST_DECL
][TS_DECL_MINIMAL
]);
629 gcc_assert (tree_contains_struct
[NAMELIST_DECL
][TS_DECL_COMMON
]);
638 /* Initialize the hash table of types. */
640 = hash_table
<type_cache_hasher
>::create_ggc (TYPE_HASH_INITIAL_SIZE
);
643 = hash_table
<tree_decl_map_cache_hasher
>::create_ggc (512);
646 = hash_table
<tree_decl_map_cache_hasher
>::create_ggc (512);
648 int_cst_hash_table
= hash_table
<int_cst_hasher
>::create_ggc (1024);
650 int_cst_node
= make_int_cst (1, 1);
652 cl_option_hash_table
= hash_table
<cl_option_hasher
>::create_ggc (64);
654 cl_optimization_node
= make_node (OPTIMIZATION_NODE
);
655 cl_target_option_node
= make_node (TARGET_OPTION_NODE
);
657 /* Initialize the tree_contains_struct array. */
658 initialize_tree_contains_struct ();
659 lang_hooks
.init_ts ();
663 /* The name of the object as the assembler will see it (but before any
664 translations made by ASM_OUTPUT_LABELREF). Often this is the same
665 as DECL_NAME. It is an IDENTIFIER_NODE. */
667 decl_assembler_name (tree decl
)
669 if (!DECL_ASSEMBLER_NAME_SET_P (decl
))
670 lang_hooks
.set_decl_assembler_name (decl
);
671 return DECL_WITH_VIS_CHECK (decl
)->decl_with_vis
.assembler_name
;
674 /* When the target supports COMDAT groups, this indicates which group the
675 DECL is associated with. This can be either an IDENTIFIER_NODE or a
676 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
678 decl_comdat_group (const_tree node
)
680 struct symtab_node
*snode
= symtab_node::get (node
);
683 return snode
->get_comdat_group ();
686 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
688 decl_comdat_group_id (const_tree node
)
690 struct symtab_node
*snode
= symtab_node::get (node
);
693 return snode
->get_comdat_group_id ();
696 /* When the target supports named section, return its name as IDENTIFIER_NODE
697 or NULL if it is in no section. */
699 decl_section_name (const_tree node
)
701 struct symtab_node
*snode
= symtab_node::get (node
);
704 return snode
->get_section ();
707 /* Set section name of NODE to VALUE (that is expected to be
710 set_decl_section_name (tree node
, const char *value
)
712 struct symtab_node
*snode
;
716 snode
= symtab_node::get (node
);
720 else if (TREE_CODE (node
) == VAR_DECL
)
721 snode
= varpool_node::get_create (node
);
723 snode
= cgraph_node::get_create (node
);
724 snode
->set_section (value
);
727 /* Return TLS model of a variable NODE. */
729 decl_tls_model (const_tree node
)
731 struct varpool_node
*snode
= varpool_node::get (node
);
733 return TLS_MODEL_NONE
;
734 return snode
->tls_model
;
737 /* Set TLS model of variable NODE to MODEL. */
739 set_decl_tls_model (tree node
, enum tls_model model
)
741 struct varpool_node
*vnode
;
743 if (model
== TLS_MODEL_NONE
)
745 vnode
= varpool_node::get (node
);
750 vnode
= varpool_node::get_create (node
);
751 vnode
->tls_model
= model
;
754 /* Compute the number of bytes occupied by a tree with code CODE.
755 This function cannot be used for nodes that have variable sizes,
756 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
758 tree_code_size (enum tree_code code
)
760 switch (TREE_CODE_CLASS (code
))
762 case tcc_declaration
: /* A decl node */
767 return sizeof (struct tree_field_decl
);
769 return sizeof (struct tree_parm_decl
);
771 return sizeof (struct tree_var_decl
);
773 return sizeof (struct tree_label_decl
);
775 return sizeof (struct tree_result_decl
);
777 return sizeof (struct tree_const_decl
);
779 return sizeof (struct tree_type_decl
);
781 return sizeof (struct tree_function_decl
);
782 case DEBUG_EXPR_DECL
:
783 return sizeof (struct tree_decl_with_rtl
);
784 case TRANSLATION_UNIT_DECL
:
785 return sizeof (struct tree_translation_unit_decl
);
789 return sizeof (struct tree_decl_non_common
);
791 return lang_hooks
.tree_size (code
);
795 case tcc_type
: /* a type node */
796 return sizeof (struct tree_type_non_common
);
798 case tcc_reference
: /* a reference */
799 case tcc_expression
: /* an expression */
800 case tcc_statement
: /* an expression with side effects */
801 case tcc_comparison
: /* a comparison expression */
802 case tcc_unary
: /* a unary arithmetic expression */
803 case tcc_binary
: /* a binary arithmetic expression */
804 return (sizeof (struct tree_exp
)
805 + (TREE_CODE_LENGTH (code
) - 1) * sizeof (tree
));
807 case tcc_constant
: /* a constant */
810 case VOID_CST
: return sizeof (struct tree_typed
);
811 case INTEGER_CST
: gcc_unreachable ();
812 case REAL_CST
: return sizeof (struct tree_real_cst
);
813 case FIXED_CST
: return sizeof (struct tree_fixed_cst
);
814 case COMPLEX_CST
: return sizeof (struct tree_complex
);
815 case VECTOR_CST
: return sizeof (struct tree_vector
);
816 case STRING_CST
: gcc_unreachable ();
818 return lang_hooks
.tree_size (code
);
821 case tcc_exceptional
: /* something random, like an identifier. */
824 case IDENTIFIER_NODE
: return lang_hooks
.identifier_size
;
825 case TREE_LIST
: return sizeof (struct tree_list
);
828 case PLACEHOLDER_EXPR
: return sizeof (struct tree_common
);
831 case OMP_CLAUSE
: gcc_unreachable ();
833 case SSA_NAME
: return sizeof (struct tree_ssa_name
);
835 case STATEMENT_LIST
: return sizeof (struct tree_statement_list
);
836 case BLOCK
: return sizeof (struct tree_block
);
837 case CONSTRUCTOR
: return sizeof (struct tree_constructor
);
838 case OPTIMIZATION_NODE
: return sizeof (struct tree_optimization_option
);
839 case TARGET_OPTION_NODE
: return sizeof (struct tree_target_option
);
842 return lang_hooks
.tree_size (code
);
850 /* Compute the number of bytes occupied by NODE. This routine only
851 looks at TREE_CODE, except for those nodes that have variable sizes. */
853 tree_size (const_tree node
)
855 const enum tree_code code
= TREE_CODE (node
);
859 return (sizeof (struct tree_int_cst
)
860 + (TREE_INT_CST_EXT_NUNITS (node
) - 1) * sizeof (HOST_WIDE_INT
));
863 return (offsetof (struct tree_binfo
, base_binfos
)
865 ::embedded_size (BINFO_N_BASE_BINFOS (node
)));
868 return (sizeof (struct tree_vec
)
869 + (TREE_VEC_LENGTH (node
) - 1) * sizeof (tree
));
872 return (sizeof (struct tree_vector
)
873 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node
)) - 1) * sizeof (tree
));
876 return TREE_STRING_LENGTH (node
) + offsetof (struct tree_string
, str
) + 1;
879 return (sizeof (struct tree_omp_clause
)
880 + (omp_clause_num_ops
[OMP_CLAUSE_CODE (node
)] - 1)
884 if (TREE_CODE_CLASS (code
) == tcc_vl_exp
)
885 return (sizeof (struct tree_exp
)
886 + (VL_EXP_OPERAND_LENGTH (node
) - 1) * sizeof (tree
));
888 return tree_code_size (code
);
892 /* Record interesting allocation statistics for a tree node with CODE
896 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED
,
897 size_t length ATTRIBUTE_UNUSED
)
899 enum tree_code_class type
= TREE_CODE_CLASS (code
);
902 if (!GATHER_STATISTICS
)
907 case tcc_declaration
: /* A decl node */
911 case tcc_type
: /* a type node */
915 case tcc_statement
: /* an expression with side effects */
919 case tcc_reference
: /* a reference */
923 case tcc_expression
: /* an expression */
924 case tcc_comparison
: /* a comparison expression */
925 case tcc_unary
: /* a unary arithmetic expression */
926 case tcc_binary
: /* a binary arithmetic expression */
930 case tcc_constant
: /* a constant */
934 case tcc_exceptional
: /* something random, like an identifier. */
937 case IDENTIFIER_NODE
:
950 kind
= ssa_name_kind
;
962 kind
= omp_clause_kind
;
979 tree_code_counts
[(int) code
]++;
980 tree_node_counts
[(int) kind
]++;
981 tree_node_sizes
[(int) kind
] += length
;
984 /* Allocate and return a new UID from the DECL_UID namespace. */
987 allocate_decl_uid (void)
989 return next_decl_uid
++;
992 /* Return a newly allocated node of code CODE. For decl and type
993 nodes, some other fields are initialized. The rest of the node is
994 initialized to zero. This function cannot be used for TREE_VEC,
995 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
998 Achoo! I got a code in the node. */
1001 make_node_stat (enum tree_code code MEM_STAT_DECL
)
1004 enum tree_code_class type
= TREE_CODE_CLASS (code
);
1005 size_t length
= tree_code_size (code
);
1007 record_node_allocation_statistics (code
, length
);
1009 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
1010 TREE_SET_CODE (t
, code
);
1015 TREE_SIDE_EFFECTS (t
) = 1;
1018 case tcc_declaration
:
1019 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_COMMON
))
1021 if (code
== FUNCTION_DECL
)
1023 DECL_ALIGN (t
) = FUNCTION_BOUNDARY
;
1024 DECL_MODE (t
) = FUNCTION_MODE
;
1029 DECL_SOURCE_LOCATION (t
) = input_location
;
1030 if (TREE_CODE (t
) == DEBUG_EXPR_DECL
)
1031 DECL_UID (t
) = --next_debug_decl_uid
;
1034 DECL_UID (t
) = allocate_decl_uid ();
1035 SET_DECL_PT_UID (t
, -1);
1037 if (TREE_CODE (t
) == LABEL_DECL
)
1038 LABEL_DECL_UID (t
) = -1;
1043 TYPE_UID (t
) = next_type_uid
++;
1044 TYPE_ALIGN (t
) = BITS_PER_UNIT
;
1045 TYPE_USER_ALIGN (t
) = 0;
1046 TYPE_MAIN_VARIANT (t
) = t
;
1047 TYPE_CANONICAL (t
) = t
;
1049 /* Default to no attributes for type, but let target change that. */
1050 TYPE_ATTRIBUTES (t
) = NULL_TREE
;
1051 targetm
.set_default_type_attributes (t
);
1053 /* We have not yet computed the alias set for this type. */
1054 TYPE_ALIAS_SET (t
) = -1;
1058 TREE_CONSTANT (t
) = 1;
1061 case tcc_expression
:
1067 case PREDECREMENT_EXPR
:
1068 case PREINCREMENT_EXPR
:
1069 case POSTDECREMENT_EXPR
:
1070 case POSTINCREMENT_EXPR
:
1071 /* All of these have side-effects, no matter what their
1073 TREE_SIDE_EFFECTS (t
) = 1;
1081 case tcc_exceptional
:
1084 case TARGET_OPTION_NODE
:
1085 TREE_TARGET_OPTION(t
)
1086 = ggc_cleared_alloc
<struct cl_target_option
> ();
1089 case OPTIMIZATION_NODE
:
1090 TREE_OPTIMIZATION (t
)
1091 = ggc_cleared_alloc
<struct cl_optimization
> ();
1100 /* Other classes need no special treatment. */
1107 /* Free tree node. */
1110 free_node (tree node
)
1112 enum tree_code code
= TREE_CODE (node
);
1113 if (GATHER_STATISTICS
)
1115 tree_code_counts
[(int) TREE_CODE (node
)]--;
1116 tree_node_counts
[(int) t_kind
]--;
1117 tree_node_sizes
[(int) t_kind
] -= tree_code_size (TREE_CODE (node
));
1119 if (CODE_CONTAINS_STRUCT (code
, TS_CONSTRUCTOR
))
1120 vec_free (CONSTRUCTOR_ELTS (node
));
1121 else if (code
== BLOCK
)
1122 vec_free (BLOCK_NONLOCALIZED_VARS (node
));
1123 else if (code
== TREE_BINFO
)
1124 vec_free (BINFO_BASE_ACCESSES (node
));
1128 /* Return a new node with the same contents as NODE except that its
1129 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1132 copy_node_stat (tree node MEM_STAT_DECL
)
1135 enum tree_code code
= TREE_CODE (node
);
1138 gcc_assert (code
!= STATEMENT_LIST
);
1140 length
= tree_size (node
);
1141 record_node_allocation_statistics (code
, length
);
1142 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
1143 memcpy (t
, node
, length
);
1145 if (CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
1147 TREE_ASM_WRITTEN (t
) = 0;
1148 TREE_VISITED (t
) = 0;
1150 if (TREE_CODE_CLASS (code
) == tcc_declaration
)
1152 if (code
== DEBUG_EXPR_DECL
)
1153 DECL_UID (t
) = --next_debug_decl_uid
;
1156 DECL_UID (t
) = allocate_decl_uid ();
1157 if (DECL_PT_UID_SET_P (node
))
1158 SET_DECL_PT_UID (t
, DECL_PT_UID (node
));
1160 if ((TREE_CODE (node
) == PARM_DECL
|| TREE_CODE (node
) == VAR_DECL
)
1161 && DECL_HAS_VALUE_EXPR_P (node
))
1163 SET_DECL_VALUE_EXPR (t
, DECL_VALUE_EXPR (node
));
1164 DECL_HAS_VALUE_EXPR_P (t
) = 1;
1166 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1167 if (TREE_CODE (node
) == VAR_DECL
)
1169 DECL_HAS_DEBUG_EXPR_P (t
) = 0;
1170 t
->decl_with_vis
.symtab_node
= NULL
;
1172 if (TREE_CODE (node
) == VAR_DECL
&& DECL_HAS_INIT_PRIORITY_P (node
))
1174 SET_DECL_INIT_PRIORITY (t
, DECL_INIT_PRIORITY (node
));
1175 DECL_HAS_INIT_PRIORITY_P (t
) = 1;
1177 if (TREE_CODE (node
) == FUNCTION_DECL
)
1179 DECL_STRUCT_FUNCTION (t
) = NULL
;
1180 t
->decl_with_vis
.symtab_node
= NULL
;
1183 else if (TREE_CODE_CLASS (code
) == tcc_type
)
1185 TYPE_UID (t
) = next_type_uid
++;
1186 /* The following is so that the debug code for
1187 the copy is different from the original type.
1188 The two statements usually duplicate each other
1189 (because they clear fields of the same union),
1190 but the optimizer should catch that. */
1191 TYPE_SYMTAB_POINTER (t
) = 0;
1192 TYPE_SYMTAB_ADDRESS (t
) = 0;
1194 /* Do not copy the values cache. */
1195 if (TYPE_CACHED_VALUES_P (t
))
1197 TYPE_CACHED_VALUES_P (t
) = 0;
1198 TYPE_CACHED_VALUES (t
) = NULL_TREE
;
1201 else if (code
== TARGET_OPTION_NODE
)
1203 TREE_TARGET_OPTION (t
) = ggc_alloc
<struct cl_target_option
>();
1204 memcpy (TREE_TARGET_OPTION (t
), TREE_TARGET_OPTION (node
),
1205 sizeof (struct cl_target_option
));
1207 else if (code
== OPTIMIZATION_NODE
)
1209 TREE_OPTIMIZATION (t
) = ggc_alloc
<struct cl_optimization
>();
1210 memcpy (TREE_OPTIMIZATION (t
), TREE_OPTIMIZATION (node
),
1211 sizeof (struct cl_optimization
));
1217 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1218 For example, this can copy a list made of TREE_LIST nodes. */
1221 copy_list (tree list
)
1229 head
= prev
= copy_node (list
);
1230 next
= TREE_CHAIN (list
);
1233 TREE_CHAIN (prev
) = copy_node (next
);
1234 prev
= TREE_CHAIN (prev
);
1235 next
= TREE_CHAIN (next
);
1241 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1242 INTEGER_CST with value CST and type TYPE. */
1245 get_int_cst_ext_nunits (tree type
, const wide_int
&cst
)
1247 gcc_checking_assert (cst
.get_precision () == TYPE_PRECISION (type
));
1248 /* We need extra HWIs if CST is an unsigned integer with its
1250 if (TYPE_UNSIGNED (type
) && wi::neg_p (cst
))
1251 return cst
.get_precision () / HOST_BITS_PER_WIDE_INT
+ 1;
1252 return cst
.get_len ();
1255 /* Return a new INTEGER_CST with value CST and type TYPE. */
1258 build_new_int_cst (tree type
, const wide_int
&cst
)
1260 unsigned int len
= cst
.get_len ();
1261 unsigned int ext_len
= get_int_cst_ext_nunits (type
, cst
);
1262 tree nt
= make_int_cst (len
, ext_len
);
1267 TREE_INT_CST_ELT (nt
, ext_len
)
1268 = zext_hwi (-1, cst
.get_precision () % HOST_BITS_PER_WIDE_INT
);
1269 for (unsigned int i
= len
; i
< ext_len
; ++i
)
1270 TREE_INT_CST_ELT (nt
, i
) = -1;
1272 else if (TYPE_UNSIGNED (type
)
1273 && cst
.get_precision () < len
* HOST_BITS_PER_WIDE_INT
)
1276 TREE_INT_CST_ELT (nt
, len
)
1277 = zext_hwi (cst
.elt (len
),
1278 cst
.get_precision () % HOST_BITS_PER_WIDE_INT
);
1281 for (unsigned int i
= 0; i
< len
; i
++)
1282 TREE_INT_CST_ELT (nt
, i
) = cst
.elt (i
);
1283 TREE_TYPE (nt
) = type
;
1287 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1290 build_int_cst (tree type
, HOST_WIDE_INT low
)
1292 /* Support legacy code. */
1294 type
= integer_type_node
;
1296 return wide_int_to_tree (type
, wi::shwi (low
, TYPE_PRECISION (type
)));
1300 build_int_cstu (tree type
, unsigned HOST_WIDE_INT cst
)
1302 return wide_int_to_tree (type
, wi::uhwi (cst
, TYPE_PRECISION (type
)));
1305 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1308 build_int_cst_type (tree type
, HOST_WIDE_INT low
)
1311 return wide_int_to_tree (type
, wi::shwi (low
, TYPE_PRECISION (type
)));
1314 /* Constructs tree in type TYPE from with value given by CST. Signedness
1315 of CST is assumed to be the same as the signedness of TYPE. */
1318 double_int_to_tree (tree type
, double_int cst
)
1320 return wide_int_to_tree (type
, widest_int::from (cst
, TYPE_SIGN (type
)));
1323 /* We force the wide_int CST to the range of the type TYPE by sign or
1324 zero extending it. OVERFLOWABLE indicates if we are interested in
1325 overflow of the value, when >0 we are only interested in signed
1326 overflow, for <0 we are interested in any overflow. OVERFLOWED
1327 indicates whether overflow has already occurred. CONST_OVERFLOWED
1328 indicates whether constant overflow has already occurred. We force
1329 T's value to be within range of T's type (by setting to 0 or 1 all
1330 the bits outside the type's range). We set TREE_OVERFLOWED if,
1331 OVERFLOWED is nonzero,
1332 or OVERFLOWABLE is >0 and signed overflow occurs
1333 or OVERFLOWABLE is <0 and any overflow occurs
1334 We return a new tree node for the extended wide_int. The node
1335 is shared if no overflow flags are set. */
1339 force_fit_type (tree type
, const wide_int_ref
&cst
,
1340 int overflowable
, bool overflowed
)
1342 signop sign
= TYPE_SIGN (type
);
1344 /* If we need to set overflow flags, return a new unshared node. */
1345 if (overflowed
|| !wi::fits_to_tree_p (cst
, type
))
1349 || (overflowable
> 0 && sign
== SIGNED
))
1351 wide_int tmp
= wide_int::from (cst
, TYPE_PRECISION (type
), sign
);
1352 tree t
= build_new_int_cst (type
, tmp
);
1353 TREE_OVERFLOW (t
) = 1;
1358 /* Else build a shared node. */
1359 return wide_int_to_tree (type
, cst
);
1362 /* These are the hash table functions for the hash table of INTEGER_CST
1363 nodes of a sizetype. */
1365 /* Return the hash code X, an INTEGER_CST. */
1368 int_cst_hasher::hash (tree x
)
1370 const_tree
const t
= x
;
1371 hashval_t code
= TYPE_UID (TREE_TYPE (t
));
1374 for (i
= 0; i
< TREE_INT_CST_NUNITS (t
); i
++)
1375 code
= iterative_hash_host_wide_int (TREE_INT_CST_ELT(t
, i
), code
);
1380 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1381 is the same as that given by *Y, which is the same. */
1384 int_cst_hasher::equal (tree x
, tree y
)
1386 const_tree
const xt
= x
;
1387 const_tree
const yt
= y
;
1389 if (TREE_TYPE (xt
) != TREE_TYPE (yt
)
1390 || TREE_INT_CST_NUNITS (xt
) != TREE_INT_CST_NUNITS (yt
)
1391 || TREE_INT_CST_EXT_NUNITS (xt
) != TREE_INT_CST_EXT_NUNITS (yt
))
1394 for (int i
= 0; i
< TREE_INT_CST_NUNITS (xt
); i
++)
1395 if (TREE_INT_CST_ELT (xt
, i
) != TREE_INT_CST_ELT (yt
, i
))
1401 /* Create an INT_CST node of TYPE and value CST.
1402 The returned node is always shared. For small integers we use a
1403 per-type vector cache, for larger ones we use a single hash table.
1404 The value is extended from its precision according to the sign of
1405 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1406 the upper bits and ensures that hashing and value equality based
1407 upon the underlying HOST_WIDE_INTs works without masking. */
1410 wide_int_to_tree (tree type
, const wide_int_ref
&pcst
)
1417 unsigned int prec
= TYPE_PRECISION (type
);
1418 signop sgn
= TYPE_SIGN (type
);
1420 /* Verify that everything is canonical. */
1421 int l
= pcst
.get_len ();
1424 if (pcst
.elt (l
- 1) == 0)
1425 gcc_checking_assert (pcst
.elt (l
- 2) < 0);
1426 if (pcst
.elt (l
- 1) == (HOST_WIDE_INT
) -1)
1427 gcc_checking_assert (pcst
.elt (l
- 2) >= 0);
1430 wide_int cst
= wide_int::from (pcst
, prec
, sgn
);
1431 unsigned int ext_len
= get_int_cst_ext_nunits (type
, cst
);
1435 /* We just need to store a single HOST_WIDE_INT. */
1437 if (TYPE_UNSIGNED (type
))
1438 hwi
= cst
.to_uhwi ();
1440 hwi
= cst
.to_shwi ();
1442 switch (TREE_CODE (type
))
1445 gcc_assert (hwi
== 0);
1449 case REFERENCE_TYPE
:
1450 case POINTER_BOUNDS_TYPE
:
1451 /* Cache NULL pointer and zero bounds. */
1460 /* Cache false or true. */
1462 if (IN_RANGE (hwi
, 0, 1))
1468 if (TYPE_SIGN (type
) == UNSIGNED
)
1471 limit
= INTEGER_SHARE_LIMIT
;
1472 if (IN_RANGE (hwi
, 0, INTEGER_SHARE_LIMIT
- 1))
1477 /* Cache [-1, N). */
1478 limit
= INTEGER_SHARE_LIMIT
+ 1;
1479 if (IN_RANGE (hwi
, -1, INTEGER_SHARE_LIMIT
- 1))
1493 /* Look for it in the type's vector of small shared ints. */
1494 if (!TYPE_CACHED_VALUES_P (type
))
1496 TYPE_CACHED_VALUES_P (type
) = 1;
1497 TYPE_CACHED_VALUES (type
) = make_tree_vec (limit
);
1500 t
= TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
);
1502 /* Make sure no one is clobbering the shared constant. */
1503 gcc_checking_assert (TREE_TYPE (t
) == type
1504 && TREE_INT_CST_NUNITS (t
) == 1
1505 && TREE_INT_CST_OFFSET_NUNITS (t
) == 1
1506 && TREE_INT_CST_EXT_NUNITS (t
) == 1
1507 && TREE_INT_CST_ELT (t
, 0) == hwi
);
1510 /* Create a new shared int. */
1511 t
= build_new_int_cst (type
, cst
);
1512 TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
) = t
;
1517 /* Use the cache of larger shared ints, using int_cst_node as
1520 TREE_INT_CST_ELT (int_cst_node
, 0) = hwi
;
1521 TREE_TYPE (int_cst_node
) = type
;
1523 tree
*slot
= int_cst_hash_table
->find_slot (int_cst_node
, INSERT
);
1527 /* Insert this one into the hash table. */
1530 /* Make a new node for next time round. */
1531 int_cst_node
= make_int_cst (1, 1);
1537 /* The value either hashes properly or we drop it on the floor
1538 for the gc to take care of. There will not be enough of them
1541 tree nt
= build_new_int_cst (type
, cst
);
1542 tree
*slot
= int_cst_hash_table
->find_slot (nt
, INSERT
);
1546 /* Insert this one into the hash table. */
1556 cache_integer_cst (tree t
)
1558 tree type
= TREE_TYPE (t
);
1561 int prec
= TYPE_PRECISION (type
);
1563 gcc_assert (!TREE_OVERFLOW (t
));
1565 switch (TREE_CODE (type
))
1568 gcc_assert (integer_zerop (t
));
1572 case REFERENCE_TYPE
:
1573 /* Cache NULL pointer. */
1574 if (integer_zerop (t
))
1582 /* Cache false or true. */
1584 if (wi::ltu_p (t
, 2))
1585 ix
= TREE_INT_CST_ELT (t
, 0);
1590 if (TYPE_UNSIGNED (type
))
1593 limit
= INTEGER_SHARE_LIMIT
;
1595 /* This is a little hokie, but if the prec is smaller than
1596 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1597 obvious test will not get the correct answer. */
1598 if (prec
< HOST_BITS_PER_WIDE_INT
)
1600 if (tree_to_uhwi (t
) < (unsigned HOST_WIDE_INT
) INTEGER_SHARE_LIMIT
)
1601 ix
= tree_to_uhwi (t
);
1603 else if (wi::ltu_p (t
, INTEGER_SHARE_LIMIT
))
1604 ix
= tree_to_uhwi (t
);
1609 limit
= INTEGER_SHARE_LIMIT
+ 1;
1611 if (integer_minus_onep (t
))
1613 else if (!wi::neg_p (t
))
1615 if (prec
< HOST_BITS_PER_WIDE_INT
)
1617 if (tree_to_shwi (t
) < INTEGER_SHARE_LIMIT
)
1618 ix
= tree_to_shwi (t
) + 1;
1620 else if (wi::ltu_p (t
, INTEGER_SHARE_LIMIT
))
1621 ix
= tree_to_shwi (t
) + 1;
1635 /* Look for it in the type's vector of small shared ints. */
1636 if (!TYPE_CACHED_VALUES_P (type
))
1638 TYPE_CACHED_VALUES_P (type
) = 1;
1639 TYPE_CACHED_VALUES (type
) = make_tree_vec (limit
);
1642 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
) == NULL_TREE
);
1643 TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
) = t
;
1647 /* Use the cache of larger shared ints. */
1648 tree
*slot
= int_cst_hash_table
->find_slot (t
, INSERT
);
1649 /* If there is already an entry for the number verify it's the
1652 gcc_assert (wi::eq_p (tree (*slot
), t
));
1654 /* Otherwise insert this one into the hash table. */
1660 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1661 and the rest are zeros. */
1664 build_low_bits_mask (tree type
, unsigned bits
)
1666 gcc_assert (bits
<= TYPE_PRECISION (type
));
1668 return wide_int_to_tree (type
, wi::mask (bits
, false,
1669 TYPE_PRECISION (type
)));
1672 /* Checks that X is integer constant that can be expressed in (unsigned)
1673 HOST_WIDE_INT without loss of precision. */
1676 cst_and_fits_in_hwi (const_tree x
)
1678 if (TREE_CODE (x
) != INTEGER_CST
)
1681 if (TYPE_PRECISION (TREE_TYPE (x
)) > HOST_BITS_PER_WIDE_INT
)
1684 return TREE_INT_CST_NUNITS (x
) == 1;
1687 /* Build a newly constructed VECTOR_CST node of length LEN. */
1690 make_vector_stat (unsigned len MEM_STAT_DECL
)
1693 unsigned length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_vector
);
1695 record_node_allocation_statistics (VECTOR_CST
, length
);
1697 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
1699 TREE_SET_CODE (t
, VECTOR_CST
);
1700 TREE_CONSTANT (t
) = 1;
1705 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1706 are in a list pointed to by VALS. */
1709 build_vector_stat (tree type
, tree
*vals MEM_STAT_DECL
)
1713 tree v
= make_vector (TYPE_VECTOR_SUBPARTS (type
));
1714 TREE_TYPE (v
) = type
;
1716 /* Iterate through elements and check for overflow. */
1717 for (cnt
= 0; cnt
< TYPE_VECTOR_SUBPARTS (type
); ++cnt
)
1719 tree value
= vals
[cnt
];
1721 VECTOR_CST_ELT (v
, cnt
) = value
;
1723 /* Don't crash if we get an address constant. */
1724 if (!CONSTANT_CLASS_P (value
))
1727 over
|= TREE_OVERFLOW (value
);
1730 TREE_OVERFLOW (v
) = over
;
1734 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1735 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1738 build_vector_from_ctor (tree type
, vec
<constructor_elt
, va_gc
> *v
)
1740 tree
*vec
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
1741 unsigned HOST_WIDE_INT idx
, pos
= 0;
1744 FOR_EACH_CONSTRUCTOR_VALUE (v
, idx
, value
)
1746 if (TREE_CODE (value
) == VECTOR_CST
)
1747 for (unsigned i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
1748 vec
[pos
++] = VECTOR_CST_ELT (value
, i
);
1752 for (; idx
< TYPE_VECTOR_SUBPARTS (type
); ++idx
)
1753 vec
[pos
++] = build_zero_cst (TREE_TYPE (type
));
1755 return build_vector (type
, vec
);
1758 /* Build a vector of type VECTYPE where all the elements are SCs. */
1760 build_vector_from_val (tree vectype
, tree sc
)
1762 int i
, nunits
= TYPE_VECTOR_SUBPARTS (vectype
);
1764 if (sc
== error_mark_node
)
1767 /* Verify that the vector type is suitable for SC. Note that there
1768 is some inconsistency in the type-system with respect to restrict
1769 qualifications of pointers. Vector types always have a main-variant
1770 element type and the qualification is applied to the vector-type.
1771 So TREE_TYPE (vector-type) does not return a properly qualified
1772 vector element-type. */
1773 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc
)),
1774 TREE_TYPE (vectype
)));
1776 if (CONSTANT_CLASS_P (sc
))
1778 tree
*v
= XALLOCAVEC (tree
, nunits
);
1779 for (i
= 0; i
< nunits
; ++i
)
1781 return build_vector (vectype
, v
);
1785 vec
<constructor_elt
, va_gc
> *v
;
1786 vec_alloc (v
, nunits
);
1787 for (i
= 0; i
< nunits
; ++i
)
1788 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, sc
);
1789 return build_constructor (vectype
, v
);
1793 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
1794 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
1797 recompute_constructor_flags (tree c
)
1801 bool constant_p
= true;
1802 bool side_effects_p
= false;
1803 vec
<constructor_elt
, va_gc
> *vals
= CONSTRUCTOR_ELTS (c
);
1805 FOR_EACH_CONSTRUCTOR_VALUE (vals
, i
, val
)
1807 /* Mostly ctors will have elts that don't have side-effects, so
1808 the usual case is to scan all the elements. Hence a single
1809 loop for both const and side effects, rather than one loop
1810 each (with early outs). */
1811 if (!TREE_CONSTANT (val
))
1813 if (TREE_SIDE_EFFECTS (val
))
1814 side_effects_p
= true;
1817 TREE_SIDE_EFFECTS (c
) = side_effects_p
;
1818 TREE_CONSTANT (c
) = constant_p
;
1821 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
1825 verify_constructor_flags (tree c
)
1829 bool constant_p
= TREE_CONSTANT (c
);
1830 bool side_effects_p
= TREE_SIDE_EFFECTS (c
);
1831 vec
<constructor_elt
, va_gc
> *vals
= CONSTRUCTOR_ELTS (c
);
1833 FOR_EACH_CONSTRUCTOR_VALUE (vals
, i
, val
)
1835 if (constant_p
&& !TREE_CONSTANT (val
))
1836 internal_error ("non-constant element in constant CONSTRUCTOR");
1837 if (!side_effects_p
&& TREE_SIDE_EFFECTS (val
))
1838 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
1842 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1843 are in the vec pointed to by VALS. */
1845 build_constructor (tree type
, vec
<constructor_elt
, va_gc
> *vals
)
1847 tree c
= make_node (CONSTRUCTOR
);
1849 TREE_TYPE (c
) = type
;
1850 CONSTRUCTOR_ELTS (c
) = vals
;
1852 recompute_constructor_flags (c
);
1857 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1860 build_constructor_single (tree type
, tree index
, tree value
)
1862 vec
<constructor_elt
, va_gc
> *v
;
1863 constructor_elt elt
= {index
, value
};
1866 v
->quick_push (elt
);
1868 return build_constructor (type
, v
);
1872 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1873 are in a list pointed to by VALS. */
1875 build_constructor_from_list (tree type
, tree vals
)
1878 vec
<constructor_elt
, va_gc
> *v
= NULL
;
1882 vec_alloc (v
, list_length (vals
));
1883 for (t
= vals
; t
; t
= TREE_CHAIN (t
))
1884 CONSTRUCTOR_APPEND_ELT (v
, TREE_PURPOSE (t
), TREE_VALUE (t
));
1887 return build_constructor (type
, v
);
1890 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1891 of elements, provided as index/value pairs. */
1894 build_constructor_va (tree type
, int nelts
, ...)
1896 vec
<constructor_elt
, va_gc
> *v
= NULL
;
1899 va_start (p
, nelts
);
1900 vec_alloc (v
, nelts
);
1903 tree index
= va_arg (p
, tree
);
1904 tree value
= va_arg (p
, tree
);
1905 CONSTRUCTOR_APPEND_ELT (v
, index
, value
);
1908 return build_constructor (type
, v
);
1911 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1914 build_fixed (tree type
, FIXED_VALUE_TYPE f
)
1917 FIXED_VALUE_TYPE
*fp
;
1919 v
= make_node (FIXED_CST
);
1920 fp
= ggc_alloc
<fixed_value
> ();
1921 memcpy (fp
, &f
, sizeof (FIXED_VALUE_TYPE
));
1923 TREE_TYPE (v
) = type
;
1924 TREE_FIXED_CST_PTR (v
) = fp
;
1928 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1931 build_real (tree type
, REAL_VALUE_TYPE d
)
1934 REAL_VALUE_TYPE
*dp
;
1937 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1938 Consider doing it via real_convert now. */
1940 v
= make_node (REAL_CST
);
1941 dp
= ggc_alloc
<real_value
> ();
1942 memcpy (dp
, &d
, sizeof (REAL_VALUE_TYPE
));
1944 TREE_TYPE (v
) = type
;
1945 TREE_REAL_CST_PTR (v
) = dp
;
1946 TREE_OVERFLOW (v
) = overflow
;
1950 /* Like build_real, but first truncate D to the type. */
1953 build_real_truncate (tree type
, REAL_VALUE_TYPE d
)
1955 return build_real (type
, real_value_truncate (TYPE_MODE (type
), d
));
1958 /* Return a new REAL_CST node whose type is TYPE
1959 and whose value is the integer value of the INTEGER_CST node I. */
1962 real_value_from_int_cst (const_tree type
, const_tree i
)
1966 /* Clear all bits of the real value type so that we can later do
1967 bitwise comparisons to see if two values are the same. */
1968 memset (&d
, 0, sizeof d
);
1970 real_from_integer (&d
, type
? TYPE_MODE (type
) : VOIDmode
, i
,
1971 TYPE_SIGN (TREE_TYPE (i
)));
1975 /* Given a tree representing an integer constant I, return a tree
1976 representing the same value as a floating-point constant of type TYPE. */
1979 build_real_from_int_cst (tree type
, const_tree i
)
1982 int overflow
= TREE_OVERFLOW (i
);
1984 v
= build_real (type
, real_value_from_int_cst (type
, i
));
1986 TREE_OVERFLOW (v
) |= overflow
;
1990 /* Return a newly constructed STRING_CST node whose value is
1991 the LEN characters at STR.
1992 Note that for a C string literal, LEN should include the trailing NUL.
1993 The TREE_TYPE is not initialized. */
1996 build_string (int len
, const char *str
)
2001 /* Do not waste bytes provided by padding of struct tree_string. */
2002 length
= len
+ offsetof (struct tree_string
, str
) + 1;
2004 record_node_allocation_statistics (STRING_CST
, length
);
2006 s
= (tree
) ggc_internal_alloc (length
);
2008 memset (s
, 0, sizeof (struct tree_typed
));
2009 TREE_SET_CODE (s
, STRING_CST
);
2010 TREE_CONSTANT (s
) = 1;
2011 TREE_STRING_LENGTH (s
) = len
;
2012 memcpy (s
->string
.str
, str
, len
);
2013 s
->string
.str
[len
] = '\0';
2018 /* Return a newly constructed COMPLEX_CST node whose value is
2019 specified by the real and imaginary parts REAL and IMAG.
2020 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2021 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2024 build_complex (tree type
, tree real
, tree imag
)
2026 tree t
= make_node (COMPLEX_CST
);
2028 TREE_REALPART (t
) = real
;
2029 TREE_IMAGPART (t
) = imag
;
2030 TREE_TYPE (t
) = type
? type
: build_complex_type (TREE_TYPE (real
));
2031 TREE_OVERFLOW (t
) = TREE_OVERFLOW (real
) | TREE_OVERFLOW (imag
);
2035 /* Build a complex (inf +- 0i), such as for the result of cproj.
2036 TYPE is the complex tree type of the result. If NEG is true, the
2037 imaginary zero is negative. */
2040 build_complex_inf (tree type
, bool neg
)
2042 REAL_VALUE_TYPE rinf
, rzero
= dconst0
;
2046 return build_complex (type
, build_real (TREE_TYPE (type
), rinf
),
2047 build_real (TREE_TYPE (type
), rzero
));
2050 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2051 element is set to 1. In particular, this is 1 + i for complex types. */
2054 build_each_one_cst (tree type
)
2056 if (TREE_CODE (type
) == COMPLEX_TYPE
)
2058 tree scalar
= build_one_cst (TREE_TYPE (type
));
2059 return build_complex (type
, scalar
, scalar
);
2062 return build_one_cst (type
);
2065 /* Return a constant of arithmetic type TYPE which is the
2066 multiplicative identity of the set TYPE. */
2069 build_one_cst (tree type
)
2071 switch (TREE_CODE (type
))
2073 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2074 case POINTER_TYPE
: case REFERENCE_TYPE
:
2076 return build_int_cst (type
, 1);
2079 return build_real (type
, dconst1
);
2081 case FIXED_POINT_TYPE
:
2082 /* We can only generate 1 for accum types. */
2083 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type
)));
2084 return build_fixed (type
, FCONST1 (TYPE_MODE (type
)));
2088 tree scalar
= build_one_cst (TREE_TYPE (type
));
2090 return build_vector_from_val (type
, scalar
);
2094 return build_complex (type
,
2095 build_one_cst (TREE_TYPE (type
)),
2096 build_zero_cst (TREE_TYPE (type
)));
2103 /* Return an integer of type TYPE containing all 1's in as much precision as
2104 it contains, or a complex or vector whose subparts are such integers. */
2107 build_all_ones_cst (tree type
)
2109 if (TREE_CODE (type
) == COMPLEX_TYPE
)
2111 tree scalar
= build_all_ones_cst (TREE_TYPE (type
));
2112 return build_complex (type
, scalar
, scalar
);
2115 return build_minus_one_cst (type
);
2118 /* Return a constant of arithmetic type TYPE which is the
2119 opposite of the multiplicative identity of the set TYPE. */
2122 build_minus_one_cst (tree type
)
2124 switch (TREE_CODE (type
))
2126 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2127 case POINTER_TYPE
: case REFERENCE_TYPE
:
2129 return build_int_cst (type
, -1);
2132 return build_real (type
, dconstm1
);
2134 case FIXED_POINT_TYPE
:
2135 /* We can only generate 1 for accum types. */
2136 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type
)));
2137 return build_fixed (type
, fixed_from_double_int (double_int_minus_one
,
2142 tree scalar
= build_minus_one_cst (TREE_TYPE (type
));
2144 return build_vector_from_val (type
, scalar
);
2148 return build_complex (type
,
2149 build_minus_one_cst (TREE_TYPE (type
)),
2150 build_zero_cst (TREE_TYPE (type
)));
2157 /* Build 0 constant of type TYPE. This is used by constructor folding
2158 and thus the constant should be represented in memory by
2162 build_zero_cst (tree type
)
2164 switch (TREE_CODE (type
))
2166 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
2167 case POINTER_TYPE
: case REFERENCE_TYPE
:
2168 case OFFSET_TYPE
: case NULLPTR_TYPE
:
2169 return build_int_cst (type
, 0);
2172 return build_real (type
, dconst0
);
2174 case FIXED_POINT_TYPE
:
2175 return build_fixed (type
, FCONST0 (TYPE_MODE (type
)));
2179 tree scalar
= build_zero_cst (TREE_TYPE (type
));
2181 return build_vector_from_val (type
, scalar
);
2186 tree zero
= build_zero_cst (TREE_TYPE (type
));
2188 return build_complex (type
, zero
, zero
);
2192 if (!AGGREGATE_TYPE_P (type
))
2193 return fold_convert (type
, integer_zero_node
);
2194 return build_constructor (type
, NULL
);
2199 /* Build a BINFO with LEN language slots. */
2202 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL
)
2205 size_t length
= (offsetof (struct tree_binfo
, base_binfos
)
2206 + vec
<tree
, va_gc
>::embedded_size (base_binfos
));
2208 record_node_allocation_statistics (TREE_BINFO
, length
);
2210 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
2212 memset (t
, 0, offsetof (struct tree_binfo
, base_binfos
));
2214 TREE_SET_CODE (t
, TREE_BINFO
);
2216 BINFO_BASE_BINFOS (t
)->embedded_init (base_binfos
);
2221 /* Create a CASE_LABEL_EXPR tree node and return it. */
2224 build_case_label (tree low_value
, tree high_value
, tree label_decl
)
2226 tree t
= make_node (CASE_LABEL_EXPR
);
2228 TREE_TYPE (t
) = void_type_node
;
2229 SET_EXPR_LOCATION (t
, DECL_SOURCE_LOCATION (label_decl
));
2231 CASE_LOW (t
) = low_value
;
2232 CASE_HIGH (t
) = high_value
;
2233 CASE_LABEL (t
) = label_decl
;
2234 CASE_CHAIN (t
) = NULL_TREE
;
2239 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2240 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2241 The latter determines the length of the HOST_WIDE_INT vector. */
2244 make_int_cst_stat (int len
, int ext_len MEM_STAT_DECL
)
2247 int length
= ((ext_len
- 1) * sizeof (HOST_WIDE_INT
)
2248 + sizeof (struct tree_int_cst
));
2251 record_node_allocation_statistics (INTEGER_CST
, length
);
2253 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
2255 TREE_SET_CODE (t
, INTEGER_CST
);
2256 TREE_INT_CST_NUNITS (t
) = len
;
2257 TREE_INT_CST_EXT_NUNITS (t
) = ext_len
;
2258 /* to_offset can only be applied to trees that are offset_int-sized
2259 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2260 must be exactly the precision of offset_int and so LEN is correct. */
2261 if (ext_len
<= OFFSET_INT_ELTS
)
2262 TREE_INT_CST_OFFSET_NUNITS (t
) = ext_len
;
2264 TREE_INT_CST_OFFSET_NUNITS (t
) = len
;
2266 TREE_CONSTANT (t
) = 1;
2271 /* Build a newly constructed TREE_VEC node of length LEN. */
2274 make_tree_vec_stat (int len MEM_STAT_DECL
)
2277 int length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2279 record_node_allocation_statistics (TREE_VEC
, length
);
2281 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
2283 TREE_SET_CODE (t
, TREE_VEC
);
2284 TREE_VEC_LENGTH (t
) = len
;
2289 /* Grow a TREE_VEC node to new length LEN. */
2292 grow_tree_vec_stat (tree v
, int len MEM_STAT_DECL
)
2294 gcc_assert (TREE_CODE (v
) == TREE_VEC
);
2296 int oldlen
= TREE_VEC_LENGTH (v
);
2297 gcc_assert (len
> oldlen
);
2299 int oldlength
= (oldlen
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2300 int length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2302 record_node_allocation_statistics (TREE_VEC
, length
- oldlength
);
2304 v
= (tree
) ggc_realloc (v
, length PASS_MEM_STAT
);
2306 TREE_VEC_LENGTH (v
) = len
;
2311 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2312 fixed, and scalar, complex or vector. */
2315 zerop (const_tree expr
)
2317 return (integer_zerop (expr
)
2318 || real_zerop (expr
)
2319 || fixed_zerop (expr
));
2322 /* Return 1 if EXPR is the integer constant zero or a complex constant
2326 integer_zerop (const_tree expr
)
2328 switch (TREE_CODE (expr
))
2331 return wi::eq_p (expr
, 0);
2333 return (integer_zerop (TREE_REALPART (expr
))
2334 && integer_zerop (TREE_IMAGPART (expr
)));
2338 for (i
= 0; i
< VECTOR_CST_NELTS (expr
); ++i
)
2339 if (!integer_zerop (VECTOR_CST_ELT (expr
, i
)))
2348 /* Return 1 if EXPR is the integer constant one or the corresponding
2349 complex constant. */
2352 integer_onep (const_tree expr
)
2354 switch (TREE_CODE (expr
))
2357 return wi::eq_p (wi::to_widest (expr
), 1);
2359 return (integer_onep (TREE_REALPART (expr
))
2360 && integer_zerop (TREE_IMAGPART (expr
)));
2364 for (i
= 0; i
< VECTOR_CST_NELTS (expr
); ++i
)
2365 if (!integer_onep (VECTOR_CST_ELT (expr
, i
)))
2374 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2375 return 1 if every piece is the integer constant one. */
2378 integer_each_onep (const_tree expr
)
2380 if (TREE_CODE (expr
) == COMPLEX_CST
)
2381 return (integer_onep (TREE_REALPART (expr
))
2382 && integer_onep (TREE_IMAGPART (expr
)));
2384 return integer_onep (expr
);
2387 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2388 it contains, or a complex or vector whose subparts are such integers. */
2391 integer_all_onesp (const_tree expr
)
2393 if (TREE_CODE (expr
) == COMPLEX_CST
2394 && integer_all_onesp (TREE_REALPART (expr
))
2395 && integer_all_onesp (TREE_IMAGPART (expr
)))
2398 else if (TREE_CODE (expr
) == VECTOR_CST
)
2401 for (i
= 0; i
< VECTOR_CST_NELTS (expr
); ++i
)
2402 if (!integer_all_onesp (VECTOR_CST_ELT (expr
, i
)))
2407 else if (TREE_CODE (expr
) != INTEGER_CST
)
2410 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr
)), UNSIGNED
) == expr
;
2413 /* Return 1 if EXPR is the integer constant minus one. */
2416 integer_minus_onep (const_tree expr
)
2418 if (TREE_CODE (expr
) == COMPLEX_CST
)
2419 return (integer_all_onesp (TREE_REALPART (expr
))
2420 && integer_zerop (TREE_IMAGPART (expr
)));
2422 return integer_all_onesp (expr
);
2425 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2429 integer_pow2p (const_tree expr
)
2431 if (TREE_CODE (expr
) == COMPLEX_CST
2432 && integer_pow2p (TREE_REALPART (expr
))
2433 && integer_zerop (TREE_IMAGPART (expr
)))
2436 if (TREE_CODE (expr
) != INTEGER_CST
)
2439 return wi::popcount (expr
) == 1;
2442 /* Return 1 if EXPR is an integer constant other than zero or a
2443 complex constant other than zero. */
2446 integer_nonzerop (const_tree expr
)
2448 return ((TREE_CODE (expr
) == INTEGER_CST
2449 && !wi::eq_p (expr
, 0))
2450 || (TREE_CODE (expr
) == COMPLEX_CST
2451 && (integer_nonzerop (TREE_REALPART (expr
))
2452 || integer_nonzerop (TREE_IMAGPART (expr
)))));
2455 /* Return 1 if EXPR is the integer constant one. For vector,
2456 return 1 if every piece is the integer constant minus one
2457 (representing the value TRUE). */
2460 integer_truep (const_tree expr
)
2462 if (TREE_CODE (expr
) == VECTOR_CST
)
2463 return integer_all_onesp (expr
);
2464 return integer_onep (expr
);
2467 /* Return 1 if EXPR is the fixed-point constant zero. */
2470 fixed_zerop (const_tree expr
)
2472 return (TREE_CODE (expr
) == FIXED_CST
2473 && TREE_FIXED_CST (expr
).data
.is_zero ());
2476 /* Return the power of two represented by a tree node known to be a
2480 tree_log2 (const_tree expr
)
2482 if (TREE_CODE (expr
) == COMPLEX_CST
)
2483 return tree_log2 (TREE_REALPART (expr
));
2485 return wi::exact_log2 (expr
);
2488 /* Similar, but return the largest integer Y such that 2 ** Y is less
2489 than or equal to EXPR. */
2492 tree_floor_log2 (const_tree expr
)
2494 if (TREE_CODE (expr
) == COMPLEX_CST
)
2495 return tree_log2 (TREE_REALPART (expr
));
2497 return wi::floor_log2 (expr
);
2500 /* Return number of known trailing zero bits in EXPR, or, if the value of
2501 EXPR is known to be zero, the precision of it's type. */
2504 tree_ctz (const_tree expr
)
2506 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr
))
2507 && !POINTER_TYPE_P (TREE_TYPE (expr
)))
2510 unsigned int ret1
, ret2
, prec
= TYPE_PRECISION (TREE_TYPE (expr
));
2511 switch (TREE_CODE (expr
))
2514 ret1
= wi::ctz (expr
);
2515 return MIN (ret1
, prec
);
2517 ret1
= wi::ctz (get_nonzero_bits (expr
));
2518 return MIN (ret1
, prec
);
2525 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2528 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2529 return MIN (ret1
, ret2
);
2530 case POINTER_PLUS_EXPR
:
2531 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2532 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2533 /* Second operand is sizetype, which could be in theory
2534 wider than pointer's precision. Make sure we never
2535 return more than prec. */
2536 ret2
= MIN (ret2
, prec
);
2537 return MIN (ret1
, ret2
);
2539 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2540 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2541 return MAX (ret1
, ret2
);
2543 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2544 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2545 return MIN (ret1
+ ret2
, prec
);
2547 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2548 if (tree_fits_uhwi_p (TREE_OPERAND (expr
, 1))
2549 && (tree_to_uhwi (TREE_OPERAND (expr
, 1)) < prec
))
2551 ret2
= tree_to_uhwi (TREE_OPERAND (expr
, 1));
2552 return MIN (ret1
+ ret2
, prec
);
2556 if (tree_fits_uhwi_p (TREE_OPERAND (expr
, 1))
2557 && (tree_to_uhwi (TREE_OPERAND (expr
, 1)) < prec
))
2559 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2560 ret2
= tree_to_uhwi (TREE_OPERAND (expr
, 1));
2565 case TRUNC_DIV_EXPR
:
2567 case FLOOR_DIV_EXPR
:
2568 case ROUND_DIV_EXPR
:
2569 case EXACT_DIV_EXPR
:
2570 if (TREE_CODE (TREE_OPERAND (expr
, 1)) == INTEGER_CST
2571 && tree_int_cst_sgn (TREE_OPERAND (expr
, 1)) == 1)
2573 int l
= tree_log2 (TREE_OPERAND (expr
, 1));
2576 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2584 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2585 if (ret1
&& ret1
== TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr
, 0))))
2587 return MIN (ret1
, prec
);
2589 return tree_ctz (TREE_OPERAND (expr
, 0));
2591 ret1
= tree_ctz (TREE_OPERAND (expr
, 1));
2594 ret2
= tree_ctz (TREE_OPERAND (expr
, 2));
2595 return MIN (ret1
, ret2
);
2597 return tree_ctz (TREE_OPERAND (expr
, 1));
2599 ret1
= get_pointer_alignment (CONST_CAST_TREE (expr
));
2600 if (ret1
> BITS_PER_UNIT
)
2602 ret1
= ctz_hwi (ret1
/ BITS_PER_UNIT
);
2603 return MIN (ret1
, prec
);
2611 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2612 decimal float constants, so don't return 1 for them. */
2615 real_zerop (const_tree expr
)
2617 switch (TREE_CODE (expr
))
2620 return real_equal (&TREE_REAL_CST (expr
), &dconst0
)
2621 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
2623 return real_zerop (TREE_REALPART (expr
))
2624 && real_zerop (TREE_IMAGPART (expr
));
2628 for (i
= 0; i
< VECTOR_CST_NELTS (expr
); ++i
)
2629 if (!real_zerop (VECTOR_CST_ELT (expr
, i
)))
2638 /* Return 1 if EXPR is the real constant one in real or complex form.
2639 Trailing zeroes matter for decimal float constants, so don't return
2643 real_onep (const_tree expr
)
2645 switch (TREE_CODE (expr
))
2648 return real_equal (&TREE_REAL_CST (expr
), &dconst1
)
2649 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
2651 return real_onep (TREE_REALPART (expr
))
2652 && real_zerop (TREE_IMAGPART (expr
));
2656 for (i
= 0; i
< VECTOR_CST_NELTS (expr
); ++i
)
2657 if (!real_onep (VECTOR_CST_ELT (expr
, i
)))
2666 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2667 matter for decimal float constants, so don't return 1 for them. */
2670 real_minus_onep (const_tree expr
)
2672 switch (TREE_CODE (expr
))
2675 return real_equal (&TREE_REAL_CST (expr
), &dconstm1
)
2676 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
2678 return real_minus_onep (TREE_REALPART (expr
))
2679 && real_zerop (TREE_IMAGPART (expr
));
2683 for (i
= 0; i
< VECTOR_CST_NELTS (expr
); ++i
)
2684 if (!real_minus_onep (VECTOR_CST_ELT (expr
, i
)))
2693 /* Nonzero if EXP is a constant or a cast of a constant. */
2696 really_constant_p (const_tree exp
)
2698 /* This is not quite the same as STRIP_NOPS. It does more. */
2699 while (CONVERT_EXPR_P (exp
)
2700 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
2701 exp
= TREE_OPERAND (exp
, 0);
2702 return TREE_CONSTANT (exp
);
2705 /* Return first list element whose TREE_VALUE is ELEM.
2706 Return 0 if ELEM is not in LIST. */
2709 value_member (tree elem
, tree list
)
2713 if (elem
== TREE_VALUE (list
))
2715 list
= TREE_CHAIN (list
);
2720 /* Return first list element whose TREE_PURPOSE is ELEM.
2721 Return 0 if ELEM is not in LIST. */
2724 purpose_member (const_tree elem
, tree list
)
2728 if (elem
== TREE_PURPOSE (list
))
2730 list
= TREE_CHAIN (list
);
2735 /* Return true if ELEM is in V. */
2738 vec_member (const_tree elem
, vec
<tree
, va_gc
> *v
)
2742 FOR_EACH_VEC_SAFE_ELT (v
, ix
, t
)
2748 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2752 chain_index (int idx
, tree chain
)
2754 for (; chain
&& idx
> 0; --idx
)
2755 chain
= TREE_CHAIN (chain
);
2759 /* Return nonzero if ELEM is part of the chain CHAIN. */
2762 chain_member (const_tree elem
, const_tree chain
)
2768 chain
= DECL_CHAIN (chain
);
2774 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2775 We expect a null pointer to mark the end of the chain.
2776 This is the Lisp primitive `length'. */
2779 list_length (const_tree t
)
2782 #ifdef ENABLE_TREE_CHECKING
2790 #ifdef ENABLE_TREE_CHECKING
2793 gcc_assert (p
!= q
);
2801 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2802 UNION_TYPE TYPE, or NULL_TREE if none. */
2805 first_field (const_tree type
)
2807 tree t
= TYPE_FIELDS (type
);
2808 while (t
&& TREE_CODE (t
) != FIELD_DECL
)
2813 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2814 by modifying the last node in chain 1 to point to chain 2.
2815 This is the Lisp primitive `nconc'. */
2818 chainon (tree op1
, tree op2
)
2827 for (t1
= op1
; TREE_CHAIN (t1
); t1
= TREE_CHAIN (t1
))
2829 TREE_CHAIN (t1
) = op2
;
2831 #ifdef ENABLE_TREE_CHECKING
2834 for (t2
= op2
; t2
; t2
= TREE_CHAIN (t2
))
2835 gcc_assert (t2
!= t1
);
2842 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2845 tree_last (tree chain
)
2849 while ((next
= TREE_CHAIN (chain
)))
2854 /* Reverse the order of elements in the chain T,
2855 and return the new head of the chain (old last element). */
2860 tree prev
= 0, decl
, next
;
2861 for (decl
= t
; decl
; decl
= next
)
2863 /* We shouldn't be using this function to reverse BLOCK chains; we
2864 have blocks_nreverse for that. */
2865 gcc_checking_assert (TREE_CODE (decl
) != BLOCK
);
2866 next
= TREE_CHAIN (decl
);
2867 TREE_CHAIN (decl
) = prev
;
2873 /* Return a newly created TREE_LIST node whose
2874 purpose and value fields are PARM and VALUE. */
2877 build_tree_list_stat (tree parm
, tree value MEM_STAT_DECL
)
2879 tree t
= make_node_stat (TREE_LIST PASS_MEM_STAT
);
2880 TREE_PURPOSE (t
) = parm
;
2881 TREE_VALUE (t
) = value
;
2885 /* Build a chain of TREE_LIST nodes from a vector. */
2888 build_tree_list_vec_stat (const vec
<tree
, va_gc
> *vec MEM_STAT_DECL
)
2890 tree ret
= NULL_TREE
;
2894 FOR_EACH_VEC_SAFE_ELT (vec
, i
, t
)
2896 *pp
= build_tree_list_stat (NULL
, t PASS_MEM_STAT
);
2897 pp
= &TREE_CHAIN (*pp
);
2902 /* Return a newly created TREE_LIST node whose
2903 purpose and value fields are PURPOSE and VALUE
2904 and whose TREE_CHAIN is CHAIN. */
2907 tree_cons_stat (tree purpose
, tree value
, tree chain MEM_STAT_DECL
)
2911 node
= ggc_alloc_tree_node_stat (sizeof (struct tree_list
) PASS_MEM_STAT
);
2912 memset (node
, 0, sizeof (struct tree_common
));
2914 record_node_allocation_statistics (TREE_LIST
, sizeof (struct tree_list
));
2916 TREE_SET_CODE (node
, TREE_LIST
);
2917 TREE_CHAIN (node
) = chain
;
2918 TREE_PURPOSE (node
) = purpose
;
2919 TREE_VALUE (node
) = value
;
2923 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2927 ctor_to_vec (tree ctor
)
2929 vec
<tree
, va_gc
> *vec
;
2930 vec_alloc (vec
, CONSTRUCTOR_NELTS (ctor
));
2934 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor
), ix
, val
)
2935 vec
->quick_push (val
);
2940 /* Return the size nominally occupied by an object of type TYPE
2941 when it resides in memory. The value is measured in units of bytes,
2942 and its data type is that normally used for type sizes
2943 (which is the first type created by make_signed_type or
2944 make_unsigned_type). */
2947 size_in_bytes (const_tree type
)
2951 if (type
== error_mark_node
)
2952 return integer_zero_node
;
2954 type
= TYPE_MAIN_VARIANT (type
);
2955 t
= TYPE_SIZE_UNIT (type
);
2959 lang_hooks
.types
.incomplete_type_error (NULL_TREE
, type
);
2960 return size_zero_node
;
2966 /* Return the size of TYPE (in bytes) as a wide integer
2967 or return -1 if the size can vary or is larger than an integer. */
2970 int_size_in_bytes (const_tree type
)
2974 if (type
== error_mark_node
)
2977 type
= TYPE_MAIN_VARIANT (type
);
2978 t
= TYPE_SIZE_UNIT (type
);
2980 if (t
&& tree_fits_uhwi_p (t
))
2981 return TREE_INT_CST_LOW (t
);
2986 /* Return the maximum size of TYPE (in bytes) as a wide integer
2987 or return -1 if the size can vary or is larger than an integer. */
2990 max_int_size_in_bytes (const_tree type
)
2992 HOST_WIDE_INT size
= -1;
2995 /* If this is an array type, check for a possible MAX_SIZE attached. */
2997 if (TREE_CODE (type
) == ARRAY_TYPE
)
2999 size_tree
= TYPE_ARRAY_MAX_SIZE (type
);
3001 if (size_tree
&& tree_fits_uhwi_p (size_tree
))
3002 size
= tree_to_uhwi (size_tree
);
3005 /* If we still haven't been able to get a size, see if the language
3006 can compute a maximum size. */
3010 size_tree
= lang_hooks
.types
.max_size (type
);
3012 if (size_tree
&& tree_fits_uhwi_p (size_tree
))
3013 size
= tree_to_uhwi (size_tree
);
3019 /* Return the bit position of FIELD, in bits from the start of the record.
3020 This is a tree of type bitsizetype. */
3023 bit_position (const_tree field
)
3025 return bit_from_pos (DECL_FIELD_OFFSET (field
),
3026 DECL_FIELD_BIT_OFFSET (field
));
3029 /* Return the byte position of FIELD, in bytes from the start of the record.
3030 This is a tree of type sizetype. */
3033 byte_position (const_tree field
)
3035 return byte_from_pos (DECL_FIELD_OFFSET (field
),
3036 DECL_FIELD_BIT_OFFSET (field
));
3039 /* Likewise, but return as an integer. It must be representable in
3040 that way (since it could be a signed value, we don't have the
3041 option of returning -1 like int_size_in_byte can. */
3044 int_byte_position (const_tree field
)
3046 return tree_to_shwi (byte_position (field
));
3049 /* Return the strictest alignment, in bits, that T is known to have. */
3052 expr_align (const_tree t
)
3054 unsigned int align0
, align1
;
3056 switch (TREE_CODE (t
))
3058 CASE_CONVERT
: case NON_LVALUE_EXPR
:
3059 /* If we have conversions, we know that the alignment of the
3060 object must meet each of the alignments of the types. */
3061 align0
= expr_align (TREE_OPERAND (t
, 0));
3062 align1
= TYPE_ALIGN (TREE_TYPE (t
));
3063 return MAX (align0
, align1
);
3065 case SAVE_EXPR
: case COMPOUND_EXPR
: case MODIFY_EXPR
:
3066 case INIT_EXPR
: case TARGET_EXPR
: case WITH_CLEANUP_EXPR
:
3067 case CLEANUP_POINT_EXPR
:
3068 /* These don't change the alignment of an object. */
3069 return expr_align (TREE_OPERAND (t
, 0));
3072 /* The best we can do is say that the alignment is the least aligned
3074 align0
= expr_align (TREE_OPERAND (t
, 1));
3075 align1
= expr_align (TREE_OPERAND (t
, 2));
3076 return MIN (align0
, align1
);
3078 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3079 meaningfully, it's always 1. */
3080 case LABEL_DECL
: case CONST_DECL
:
3081 case VAR_DECL
: case PARM_DECL
: case RESULT_DECL
:
3083 gcc_assert (DECL_ALIGN (t
) != 0);
3084 return DECL_ALIGN (t
);
3090 /* Otherwise take the alignment from that of the type. */
3091 return TYPE_ALIGN (TREE_TYPE (t
));
3094 /* Return, as a tree node, the number of elements for TYPE (which is an
3095 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3098 array_type_nelts (const_tree type
)
3100 tree index_type
, min
, max
;
3102 /* If they did it with unspecified bounds, then we should have already
3103 given an error about it before we got here. */
3104 if (! TYPE_DOMAIN (type
))
3105 return error_mark_node
;
3107 index_type
= TYPE_DOMAIN (type
);
3108 min
= TYPE_MIN_VALUE (index_type
);
3109 max
= TYPE_MAX_VALUE (index_type
);
3111 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3113 return error_mark_node
;
3115 return (integer_zerop (min
)
3117 : fold_build2 (MINUS_EXPR
, TREE_TYPE (max
), max
, min
));
3120 /* If arg is static -- a reference to an object in static storage -- then
3121 return the object. This is not the same as the C meaning of `static'.
3122 If arg isn't static, return NULL. */
3127 switch (TREE_CODE (arg
))
3130 /* Nested functions are static, even though taking their address will
3131 involve a trampoline as we unnest the nested function and create
3132 the trampoline on the tree level. */
3136 return ((TREE_STATIC (arg
) || DECL_EXTERNAL (arg
))
3137 && ! DECL_THREAD_LOCAL_P (arg
)
3138 && ! DECL_DLLIMPORT_P (arg
)
3142 return ((TREE_STATIC (arg
) || DECL_EXTERNAL (arg
))
3146 return TREE_STATIC (arg
) ? arg
: NULL
;
3153 /* If the thing being referenced is not a field, then it is
3154 something language specific. */
3155 gcc_assert (TREE_CODE (TREE_OPERAND (arg
, 1)) == FIELD_DECL
);
3157 /* If we are referencing a bitfield, we can't evaluate an
3158 ADDR_EXPR at compile time and so it isn't a constant. */
3159 if (DECL_BIT_FIELD (TREE_OPERAND (arg
, 1)))
3162 return staticp (TREE_OPERAND (arg
, 0));
3168 return TREE_CONSTANT (TREE_OPERAND (arg
, 0)) ? arg
: NULL
;
3171 case ARRAY_RANGE_REF
:
3172 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg
))) == INTEGER_CST
3173 && TREE_CODE (TREE_OPERAND (arg
, 1)) == INTEGER_CST
)
3174 return staticp (TREE_OPERAND (arg
, 0));
3178 case COMPOUND_LITERAL_EXPR
:
3179 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg
)) ? arg
: NULL
;
3189 /* Return whether OP is a DECL whose address is function-invariant. */
3192 decl_address_invariant_p (const_tree op
)
3194 /* The conditions below are slightly less strict than the one in
3197 switch (TREE_CODE (op
))
3206 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3207 || DECL_THREAD_LOCAL_P (op
)
3208 || DECL_CONTEXT (op
) == current_function_decl
3209 || decl_function_context (op
) == current_function_decl
)
3214 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3215 || decl_function_context (op
) == current_function_decl
)
3226 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3229 decl_address_ip_invariant_p (const_tree op
)
3231 /* The conditions below are slightly less strict than the one in
3234 switch (TREE_CODE (op
))
3242 if (((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3243 && !DECL_DLLIMPORT_P (op
))
3244 || DECL_THREAD_LOCAL_P (op
))
3249 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
)))
3261 /* Return true if T is function-invariant (internal function, does
3262 not handle arithmetic; that's handled in skip_simple_arithmetic and
3263 tree_invariant_p). */
3266 tree_invariant_p_1 (tree t
)
3270 if (TREE_CONSTANT (t
)
3271 || (TREE_READONLY (t
) && !TREE_SIDE_EFFECTS (t
)))
3274 switch (TREE_CODE (t
))
3280 op
= TREE_OPERAND (t
, 0);
3281 while (handled_component_p (op
))
3283 switch (TREE_CODE (op
))
3286 case ARRAY_RANGE_REF
:
3287 if (!tree_invariant_p (TREE_OPERAND (op
, 1))
3288 || TREE_OPERAND (op
, 2) != NULL_TREE
3289 || TREE_OPERAND (op
, 3) != NULL_TREE
)
3294 if (TREE_OPERAND (op
, 2) != NULL_TREE
)
3300 op
= TREE_OPERAND (op
, 0);
3303 return CONSTANT_CLASS_P (op
) || decl_address_invariant_p (op
);
3312 /* Return true if T is function-invariant. */
3315 tree_invariant_p (tree t
)
3317 tree inner
= skip_simple_arithmetic (t
);
3318 return tree_invariant_p_1 (inner
);
3321 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3322 Do this to any expression which may be used in more than one place,
3323 but must be evaluated only once.
3325 Normally, expand_expr would reevaluate the expression each time.
3326 Calling save_expr produces something that is evaluated and recorded
3327 the first time expand_expr is called on it. Subsequent calls to
3328 expand_expr just reuse the recorded value.
3330 The call to expand_expr that generates code that actually computes
3331 the value is the first call *at compile time*. Subsequent calls
3332 *at compile time* generate code to use the saved value.
3333 This produces correct result provided that *at run time* control
3334 always flows through the insns made by the first expand_expr
3335 before reaching the other places where the save_expr was evaluated.
3336 You, the caller of save_expr, must make sure this is so.
3338 Constants, and certain read-only nodes, are returned with no
3339 SAVE_EXPR because that is safe. Expressions containing placeholders
3340 are not touched; see tree.def for an explanation of what these
3344 save_expr (tree expr
)
3346 tree t
= fold (expr
);
3349 /* If the tree evaluates to a constant, then we don't want to hide that
3350 fact (i.e. this allows further folding, and direct checks for constants).
3351 However, a read-only object that has side effects cannot be bypassed.
3352 Since it is no problem to reevaluate literals, we just return the
3354 inner
= skip_simple_arithmetic (t
);
3355 if (TREE_CODE (inner
) == ERROR_MARK
)
3358 if (tree_invariant_p_1 (inner
))
3361 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3362 it means that the size or offset of some field of an object depends on
3363 the value within another field.
3365 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3366 and some variable since it would then need to be both evaluated once and
3367 evaluated more than once. Front-ends must assure this case cannot
3368 happen by surrounding any such subexpressions in their own SAVE_EXPR
3369 and forcing evaluation at the proper time. */
3370 if (contains_placeholder_p (inner
))
3373 t
= build1 (SAVE_EXPR
, TREE_TYPE (expr
), t
);
3374 SET_EXPR_LOCATION (t
, EXPR_LOCATION (expr
));
3376 /* This expression might be placed ahead of a jump to ensure that the
3377 value was computed on both sides of the jump. So make sure it isn't
3378 eliminated as dead. */
3379 TREE_SIDE_EFFECTS (t
) = 1;
3383 /* Look inside EXPR into any simple arithmetic operations. Return the
3384 outermost non-arithmetic or non-invariant node. */
3387 skip_simple_arithmetic (tree expr
)
3389 /* We don't care about whether this can be used as an lvalue in this
3391 while (TREE_CODE (expr
) == NON_LVALUE_EXPR
)
3392 expr
= TREE_OPERAND (expr
, 0);
3394 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3395 a constant, it will be more efficient to not make another SAVE_EXPR since
3396 it will allow better simplification and GCSE will be able to merge the
3397 computations if they actually occur. */
3400 if (UNARY_CLASS_P (expr
))
3401 expr
= TREE_OPERAND (expr
, 0);
3402 else if (BINARY_CLASS_P (expr
))
3404 if (tree_invariant_p (TREE_OPERAND (expr
, 1)))
3405 expr
= TREE_OPERAND (expr
, 0);
3406 else if (tree_invariant_p (TREE_OPERAND (expr
, 0)))
3407 expr
= TREE_OPERAND (expr
, 1);
3418 /* Look inside EXPR into simple arithmetic operations involving constants.
3419 Return the outermost non-arithmetic or non-constant node. */
3422 skip_simple_constant_arithmetic (tree expr
)
3424 while (TREE_CODE (expr
) == NON_LVALUE_EXPR
)
3425 expr
= TREE_OPERAND (expr
, 0);
3429 if (UNARY_CLASS_P (expr
))
3430 expr
= TREE_OPERAND (expr
, 0);
3431 else if (BINARY_CLASS_P (expr
))
3433 if (TREE_CONSTANT (TREE_OPERAND (expr
, 1)))
3434 expr
= TREE_OPERAND (expr
, 0);
3435 else if (TREE_CONSTANT (TREE_OPERAND (expr
, 0)))
3436 expr
= TREE_OPERAND (expr
, 1);
3447 /* Return which tree structure is used by T. */
3449 enum tree_node_structure_enum
3450 tree_node_structure (const_tree t
)
3452 const enum tree_code code
= TREE_CODE (t
);
3453 return tree_node_structure_for_code (code
);
3456 /* Set various status flags when building a CALL_EXPR object T. */
3459 process_call_operands (tree t
)
3461 bool side_effects
= TREE_SIDE_EFFECTS (t
);
3462 bool read_only
= false;
3463 int i
= call_expr_flags (t
);
3465 /* Calls have side-effects, except those to const or pure functions. */
3466 if ((i
& ECF_LOOPING_CONST_OR_PURE
) || !(i
& (ECF_CONST
| ECF_PURE
)))
3467 side_effects
= true;
3468 /* Propagate TREE_READONLY of arguments for const functions. */
3472 if (!side_effects
|| read_only
)
3473 for (i
= 1; i
< TREE_OPERAND_LENGTH (t
); i
++)
3475 tree op
= TREE_OPERAND (t
, i
);
3476 if (op
&& TREE_SIDE_EFFECTS (op
))
3477 side_effects
= true;
3478 if (op
&& !TREE_READONLY (op
) && !CONSTANT_CLASS_P (op
))
3482 TREE_SIDE_EFFECTS (t
) = side_effects
;
3483 TREE_READONLY (t
) = read_only
;
3486 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3487 size or offset that depends on a field within a record. */
3490 contains_placeholder_p (const_tree exp
)
3492 enum tree_code code
;
3497 code
= TREE_CODE (exp
);
3498 if (code
== PLACEHOLDER_EXPR
)
3501 switch (TREE_CODE_CLASS (code
))
3504 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3505 position computations since they will be converted into a
3506 WITH_RECORD_EXPR involving the reference, which will assume
3507 here will be valid. */
3508 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0));
3510 case tcc_exceptional
:
3511 if (code
== TREE_LIST
)
3512 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp
))
3513 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp
)));
3518 case tcc_comparison
:
3519 case tcc_expression
:
3523 /* Ignoring the first operand isn't quite right, but works best. */
3524 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1));
3527 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0))
3528 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1))
3529 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 2)));
3532 /* The save_expr function never wraps anything containing
3533 a PLACEHOLDER_EXPR. */
3540 switch (TREE_CODE_LENGTH (code
))
3543 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0));
3545 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0))
3546 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1)));
3557 const_call_expr_arg_iterator iter
;
3558 FOR_EACH_CONST_CALL_EXPR_ARG (arg
, iter
, exp
)
3559 if (CONTAINS_PLACEHOLDER_P (arg
))
3573 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3574 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3578 type_contains_placeholder_1 (const_tree type
)
3580 /* If the size contains a placeholder or the parent type (component type in
3581 the case of arrays) type involves a placeholder, this type does. */
3582 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type
))
3583 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type
))
3584 || (!POINTER_TYPE_P (type
)
3586 && type_contains_placeholder_p (TREE_TYPE (type
))))
3589 /* Now do type-specific checks. Note that the last part of the check above
3590 greatly limits what we have to do below. */
3591 switch (TREE_CODE (type
))
3594 case POINTER_BOUNDS_TYPE
:
3600 case REFERENCE_TYPE
:
3609 case FIXED_POINT_TYPE
:
3610 /* Here we just check the bounds. */
3611 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type
))
3612 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type
)));
3615 /* We have already checked the component type above, so just check
3616 the domain type. Flexible array members have a null domain. */
3617 return TYPE_DOMAIN (type
) ?
3618 type_contains_placeholder_p (TYPE_DOMAIN (type
)) : false;
3622 case QUAL_UNION_TYPE
:
3626 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
3627 if (TREE_CODE (field
) == FIELD_DECL
3628 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field
))
3629 || (TREE_CODE (type
) == QUAL_UNION_TYPE
3630 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field
)))
3631 || type_contains_placeholder_p (TREE_TYPE (field
))))
3642 /* Wrapper around above function used to cache its result. */
3645 type_contains_placeholder_p (tree type
)
3649 /* If the contains_placeholder_bits field has been initialized,
3650 then we know the answer. */
3651 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) > 0)
3652 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) - 1;
3654 /* Indicate that we've seen this type node, and the answer is false.
3655 This is what we want to return if we run into recursion via fields. */
3656 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) = 1;
3658 /* Compute the real value. */
3659 result
= type_contains_placeholder_1 (type
);
3661 /* Store the real value. */
3662 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) = result
+ 1;
3667 /* Push tree EXP onto vector QUEUE if it is not already present. */
3670 push_without_duplicates (tree exp
, vec
<tree
> *queue
)
3675 FOR_EACH_VEC_ELT (*queue
, i
, iter
)
3676 if (simple_cst_equal (iter
, exp
) == 1)
3680 queue
->safe_push (exp
);
3683 /* Given a tree EXP, find all occurrences of references to fields
3684 in a PLACEHOLDER_EXPR and place them in vector REFS without
3685 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3686 we assume here that EXP contains only arithmetic expressions
3687 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3691 find_placeholder_in_expr (tree exp
, vec
<tree
> *refs
)
3693 enum tree_code code
= TREE_CODE (exp
);
3697 /* We handle TREE_LIST and COMPONENT_REF separately. */
3698 if (code
== TREE_LIST
)
3700 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp
), refs
);
3701 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp
), refs
);
3703 else if (code
== COMPONENT_REF
)
3705 for (inner
= TREE_OPERAND (exp
, 0);
3706 REFERENCE_CLASS_P (inner
);
3707 inner
= TREE_OPERAND (inner
, 0))
3710 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3711 push_without_duplicates (exp
, refs
);
3713 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), refs
);
3716 switch (TREE_CODE_CLASS (code
))
3721 case tcc_declaration
:
3722 /* Variables allocated to static storage can stay. */
3723 if (!TREE_STATIC (exp
))
3724 push_without_duplicates (exp
, refs
);
3727 case tcc_expression
:
3728 /* This is the pattern built in ada/make_aligning_type. */
3729 if (code
== ADDR_EXPR
3730 && TREE_CODE (TREE_OPERAND (exp
, 0)) == PLACEHOLDER_EXPR
)
3732 push_without_duplicates (exp
, refs
);
3736 /* Fall through... */
3738 case tcc_exceptional
:
3741 case tcc_comparison
:
3743 for (i
= 0; i
< TREE_CODE_LENGTH (code
); i
++)
3744 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, i
), refs
);
3748 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
3749 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, i
), refs
);
3757 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3758 return a tree with all occurrences of references to F in a
3759 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3760 CONST_DECLs. Note that we assume here that EXP contains only
3761 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3762 occurring only in their argument list. */
3765 substitute_in_expr (tree exp
, tree f
, tree r
)
3767 enum tree_code code
= TREE_CODE (exp
);
3768 tree op0
, op1
, op2
, op3
;
3771 /* We handle TREE_LIST and COMPONENT_REF separately. */
3772 if (code
== TREE_LIST
)
3774 op0
= SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp
), f
, r
);
3775 op1
= SUBSTITUTE_IN_EXPR (TREE_VALUE (exp
), f
, r
);
3776 if (op0
== TREE_CHAIN (exp
) && op1
== TREE_VALUE (exp
))
3779 return tree_cons (TREE_PURPOSE (exp
), op1
, op0
);
3781 else if (code
== COMPONENT_REF
)
3785 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3786 and it is the right field, replace it with R. */
3787 for (inner
= TREE_OPERAND (exp
, 0);
3788 REFERENCE_CLASS_P (inner
);
3789 inner
= TREE_OPERAND (inner
, 0))
3793 op1
= TREE_OPERAND (exp
, 1);
3795 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
&& op1
== f
)
3798 /* If this expression hasn't been completed let, leave it alone. */
3799 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
&& !TREE_TYPE (inner
))
3802 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
3803 if (op0
== TREE_OPERAND (exp
, 0))
3807 = fold_build3 (COMPONENT_REF
, TREE_TYPE (exp
), op0
, op1
, NULL_TREE
);
3810 switch (TREE_CODE_CLASS (code
))
3815 case tcc_declaration
:
3821 case tcc_expression
:
3825 /* Fall through... */
3827 case tcc_exceptional
:
3830 case tcc_comparison
:
3832 switch (TREE_CODE_LENGTH (code
))
3838 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
3839 if (op0
== TREE_OPERAND (exp
, 0))
3842 new_tree
= fold_build1 (code
, TREE_TYPE (exp
), op0
);
3846 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
3847 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
3849 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1))
3852 new_tree
= fold_build2 (code
, TREE_TYPE (exp
), op0
, op1
);
3856 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
3857 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
3858 op2
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 2), f
, r
);
3860 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
3861 && op2
== TREE_OPERAND (exp
, 2))
3864 new_tree
= fold_build3 (code
, TREE_TYPE (exp
), op0
, op1
, op2
);
3868 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
3869 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
3870 op2
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 2), f
, r
);
3871 op3
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 3), f
, r
);
3873 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
3874 && op2
== TREE_OPERAND (exp
, 2)
3875 && op3
== TREE_OPERAND (exp
, 3))
3879 = fold (build4 (code
, TREE_TYPE (exp
), op0
, op1
, op2
, op3
));
3891 new_tree
= NULL_TREE
;
3893 /* If we are trying to replace F with a constant, inline back
3894 functions which do nothing else than computing a value from
3895 the arguments they are passed. This makes it possible to
3896 fold partially or entirely the replacement expression. */
3897 if (CONSTANT_CLASS_P (r
) && code
== CALL_EXPR
)
3899 tree t
= maybe_inline_call_in_expr (exp
);
3901 return SUBSTITUTE_IN_EXPR (t
, f
, r
);
3904 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
3906 tree op
= TREE_OPERAND (exp
, i
);
3907 tree new_op
= SUBSTITUTE_IN_EXPR (op
, f
, r
);
3911 new_tree
= copy_node (exp
);
3912 TREE_OPERAND (new_tree
, i
) = new_op
;
3918 new_tree
= fold (new_tree
);
3919 if (TREE_CODE (new_tree
) == CALL_EXPR
)
3920 process_call_operands (new_tree
);
3931 TREE_READONLY (new_tree
) |= TREE_READONLY (exp
);
3933 if (code
== INDIRECT_REF
|| code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
3934 TREE_THIS_NOTRAP (new_tree
) |= TREE_THIS_NOTRAP (exp
);
3939 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3940 for it within OBJ, a tree that is an object or a chain of references. */
3943 substitute_placeholder_in_expr (tree exp
, tree obj
)
3945 enum tree_code code
= TREE_CODE (exp
);
3946 tree op0
, op1
, op2
, op3
;
3949 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3950 in the chain of OBJ. */
3951 if (code
== PLACEHOLDER_EXPR
)
3953 tree need_type
= TYPE_MAIN_VARIANT (TREE_TYPE (exp
));
3956 for (elt
= obj
; elt
!= 0;
3957 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
3958 || TREE_CODE (elt
) == COND_EXPR
)
3959 ? TREE_OPERAND (elt
, 1)
3960 : (REFERENCE_CLASS_P (elt
)
3961 || UNARY_CLASS_P (elt
)
3962 || BINARY_CLASS_P (elt
)
3963 || VL_EXP_CLASS_P (elt
)
3964 || EXPRESSION_CLASS_P (elt
))
3965 ? TREE_OPERAND (elt
, 0) : 0))
3966 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
3969 for (elt
= obj
; elt
!= 0;
3970 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
3971 || TREE_CODE (elt
) == COND_EXPR
)
3972 ? TREE_OPERAND (elt
, 1)
3973 : (REFERENCE_CLASS_P (elt
)
3974 || UNARY_CLASS_P (elt
)
3975 || BINARY_CLASS_P (elt
)
3976 || VL_EXP_CLASS_P (elt
)
3977 || EXPRESSION_CLASS_P (elt
))
3978 ? TREE_OPERAND (elt
, 0) : 0))
3979 if (POINTER_TYPE_P (TREE_TYPE (elt
))
3980 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
3982 return fold_build1 (INDIRECT_REF
, need_type
, elt
);
3984 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3985 survives until RTL generation, there will be an error. */
3989 /* TREE_LIST is special because we need to look at TREE_VALUE
3990 and TREE_CHAIN, not TREE_OPERANDS. */
3991 else if (code
== TREE_LIST
)
3993 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp
), obj
);
3994 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp
), obj
);
3995 if (op0
== TREE_CHAIN (exp
) && op1
== TREE_VALUE (exp
))
3998 return tree_cons (TREE_PURPOSE (exp
), op1
, op0
);
4001 switch (TREE_CODE_CLASS (code
))
4004 case tcc_declaration
:
4007 case tcc_exceptional
:
4010 case tcc_comparison
:
4011 case tcc_expression
:
4014 switch (TREE_CODE_LENGTH (code
))
4020 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4021 if (op0
== TREE_OPERAND (exp
, 0))
4024 new_tree
= fold_build1 (code
, TREE_TYPE (exp
), op0
);
4028 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4029 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
4031 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1))
4034 new_tree
= fold_build2 (code
, TREE_TYPE (exp
), op0
, op1
);
4038 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4039 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
4040 op2
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 2), obj
);
4042 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4043 && op2
== TREE_OPERAND (exp
, 2))
4046 new_tree
= fold_build3 (code
, TREE_TYPE (exp
), op0
, op1
, op2
);
4050 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
4051 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
4052 op2
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 2), obj
);
4053 op3
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 3), obj
);
4055 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
4056 && op2
== TREE_OPERAND (exp
, 2)
4057 && op3
== TREE_OPERAND (exp
, 3))
4061 = fold (build4 (code
, TREE_TYPE (exp
), op0
, op1
, op2
, op3
));
4073 new_tree
= NULL_TREE
;
4075 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
4077 tree op
= TREE_OPERAND (exp
, i
);
4078 tree new_op
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (op
, obj
);
4082 new_tree
= copy_node (exp
);
4083 TREE_OPERAND (new_tree
, i
) = new_op
;
4089 new_tree
= fold (new_tree
);
4090 if (TREE_CODE (new_tree
) == CALL_EXPR
)
4091 process_call_operands (new_tree
);
4102 TREE_READONLY (new_tree
) |= TREE_READONLY (exp
);
4104 if (code
== INDIRECT_REF
|| code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
4105 TREE_THIS_NOTRAP (new_tree
) |= TREE_THIS_NOTRAP (exp
);
4111 /* Subroutine of stabilize_reference; this is called for subtrees of
4112 references. Any expression with side-effects must be put in a SAVE_EXPR
4113 to ensure that it is only evaluated once.
4115 We don't put SAVE_EXPR nodes around everything, because assigning very
4116 simple expressions to temporaries causes us to miss good opportunities
4117 for optimizations. Among other things, the opportunity to fold in the
4118 addition of a constant into an addressing mode often gets lost, e.g.
4119 "y[i+1] += x;". In general, we take the approach that we should not make
4120 an assignment unless we are forced into it - i.e., that any non-side effect
4121 operator should be allowed, and that cse should take care of coalescing
4122 multiple utterances of the same expression should that prove fruitful. */
4125 stabilize_reference_1 (tree e
)
4128 enum tree_code code
= TREE_CODE (e
);
4130 /* We cannot ignore const expressions because it might be a reference
4131 to a const array but whose index contains side-effects. But we can
4132 ignore things that are actual constant or that already have been
4133 handled by this function. */
4135 if (tree_invariant_p (e
))
4138 switch (TREE_CODE_CLASS (code
))
4140 case tcc_exceptional
:
4142 case tcc_declaration
:
4143 case tcc_comparison
:
4145 case tcc_expression
:
4148 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4149 so that it will only be evaluated once. */
4150 /* The reference (r) and comparison (<) classes could be handled as
4151 below, but it is generally faster to only evaluate them once. */
4152 if (TREE_SIDE_EFFECTS (e
))
4153 return save_expr (e
);
4157 /* Constants need no processing. In fact, we should never reach
4162 /* Division is slow and tends to be compiled with jumps,
4163 especially the division by powers of 2 that is often
4164 found inside of an array reference. So do it just once. */
4165 if (code
== TRUNC_DIV_EXPR
|| code
== TRUNC_MOD_EXPR
4166 || code
== FLOOR_DIV_EXPR
|| code
== FLOOR_MOD_EXPR
4167 || code
== CEIL_DIV_EXPR
|| code
== CEIL_MOD_EXPR
4168 || code
== ROUND_DIV_EXPR
|| code
== ROUND_MOD_EXPR
)
4169 return save_expr (e
);
4170 /* Recursively stabilize each operand. */
4171 result
= build_nt (code
, stabilize_reference_1 (TREE_OPERAND (e
, 0)),
4172 stabilize_reference_1 (TREE_OPERAND (e
, 1)));
4176 /* Recursively stabilize each operand. */
4177 result
= build_nt (code
, stabilize_reference_1 (TREE_OPERAND (e
, 0)));
4184 TREE_TYPE (result
) = TREE_TYPE (e
);
4185 TREE_READONLY (result
) = TREE_READONLY (e
);
4186 TREE_SIDE_EFFECTS (result
) = TREE_SIDE_EFFECTS (e
);
4187 TREE_THIS_VOLATILE (result
) = TREE_THIS_VOLATILE (e
);
4192 /* Stabilize a reference so that we can use it any number of times
4193 without causing its operands to be evaluated more than once.
4194 Returns the stabilized reference. This works by means of save_expr,
4195 so see the caveats in the comments about save_expr.
4197 Also allows conversion expressions whose operands are references.
4198 Any other kind of expression is returned unchanged. */
4201 stabilize_reference (tree ref
)
4204 enum tree_code code
= TREE_CODE (ref
);
4211 /* No action is needed in this case. */
4216 case FIX_TRUNC_EXPR
:
4217 result
= build_nt (code
, stabilize_reference (TREE_OPERAND (ref
, 0)));
4221 result
= build_nt (INDIRECT_REF
,
4222 stabilize_reference_1 (TREE_OPERAND (ref
, 0)));
4226 result
= build_nt (COMPONENT_REF
,
4227 stabilize_reference (TREE_OPERAND (ref
, 0)),
4228 TREE_OPERAND (ref
, 1), NULL_TREE
);
4232 result
= build_nt (BIT_FIELD_REF
,
4233 stabilize_reference (TREE_OPERAND (ref
, 0)),
4234 TREE_OPERAND (ref
, 1), TREE_OPERAND (ref
, 2));
4235 REF_REVERSE_STORAGE_ORDER (result
) = REF_REVERSE_STORAGE_ORDER (ref
);
4239 result
= build_nt (ARRAY_REF
,
4240 stabilize_reference (TREE_OPERAND (ref
, 0)),
4241 stabilize_reference_1 (TREE_OPERAND (ref
, 1)),
4242 TREE_OPERAND (ref
, 2), TREE_OPERAND (ref
, 3));
4245 case ARRAY_RANGE_REF
:
4246 result
= build_nt (ARRAY_RANGE_REF
,
4247 stabilize_reference (TREE_OPERAND (ref
, 0)),
4248 stabilize_reference_1 (TREE_OPERAND (ref
, 1)),
4249 TREE_OPERAND (ref
, 2), TREE_OPERAND (ref
, 3));
4253 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4254 it wouldn't be ignored. This matters when dealing with
4256 return stabilize_reference_1 (ref
);
4258 /* If arg isn't a kind of lvalue we recognize, make no change.
4259 Caller should recognize the error for an invalid lvalue. */
4264 return error_mark_node
;
4267 TREE_TYPE (result
) = TREE_TYPE (ref
);
4268 TREE_READONLY (result
) = TREE_READONLY (ref
);
4269 TREE_SIDE_EFFECTS (result
) = TREE_SIDE_EFFECTS (ref
);
4270 TREE_THIS_VOLATILE (result
) = TREE_THIS_VOLATILE (ref
);
4275 /* Low-level constructors for expressions. */
4277 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4278 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4281 recompute_tree_invariant_for_addr_expr (tree t
)
4284 bool tc
= true, se
= false;
4286 gcc_assert (TREE_CODE (t
) == ADDR_EXPR
);
4288 /* We started out assuming this address is both invariant and constant, but
4289 does not have side effects. Now go down any handled components and see if
4290 any of them involve offsets that are either non-constant or non-invariant.
4291 Also check for side-effects.
4293 ??? Note that this code makes no attempt to deal with the case where
4294 taking the address of something causes a copy due to misalignment. */
4296 #define UPDATE_FLAGS(NODE) \
4297 do { tree _node = (NODE); \
4298 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4299 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4301 for (node
= TREE_OPERAND (t
, 0); handled_component_p (node
);
4302 node
= TREE_OPERAND (node
, 0))
4304 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4305 array reference (probably made temporarily by the G++ front end),
4306 so ignore all the operands. */
4307 if ((TREE_CODE (node
) == ARRAY_REF
4308 || TREE_CODE (node
) == ARRAY_RANGE_REF
)
4309 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node
, 0))) == ARRAY_TYPE
)
4311 UPDATE_FLAGS (TREE_OPERAND (node
, 1));
4312 if (TREE_OPERAND (node
, 2))
4313 UPDATE_FLAGS (TREE_OPERAND (node
, 2));
4314 if (TREE_OPERAND (node
, 3))
4315 UPDATE_FLAGS (TREE_OPERAND (node
, 3));
4317 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4318 FIELD_DECL, apparently. The G++ front end can put something else
4319 there, at least temporarily. */
4320 else if (TREE_CODE (node
) == COMPONENT_REF
4321 && TREE_CODE (TREE_OPERAND (node
, 1)) == FIELD_DECL
)
4323 if (TREE_OPERAND (node
, 2))
4324 UPDATE_FLAGS (TREE_OPERAND (node
, 2));
4328 node
= lang_hooks
.expr_to_decl (node
, &tc
, &se
);
4330 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4331 the address, since &(*a)->b is a form of addition. If it's a constant, the
4332 address is constant too. If it's a decl, its address is constant if the
4333 decl is static. Everything else is not constant and, furthermore,
4334 taking the address of a volatile variable is not volatile. */
4335 if (TREE_CODE (node
) == INDIRECT_REF
4336 || TREE_CODE (node
) == MEM_REF
)
4337 UPDATE_FLAGS (TREE_OPERAND (node
, 0));
4338 else if (CONSTANT_CLASS_P (node
))
4340 else if (DECL_P (node
))
4341 tc
&= (staticp (node
) != NULL_TREE
);
4345 se
|= TREE_SIDE_EFFECTS (node
);
4349 TREE_CONSTANT (t
) = tc
;
4350 TREE_SIDE_EFFECTS (t
) = se
;
4354 /* Build an expression of code CODE, data type TYPE, and operands as
4355 specified. Expressions and reference nodes can be created this way.
4356 Constants, decls, types and misc nodes cannot be.
4358 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4359 enough for all extant tree codes. */
4362 build0_stat (enum tree_code code
, tree tt MEM_STAT_DECL
)
4366 gcc_assert (TREE_CODE_LENGTH (code
) == 0);
4368 t
= make_node_stat (code PASS_MEM_STAT
);
4375 build1_stat (enum tree_code code
, tree type
, tree node MEM_STAT_DECL
)
4377 int length
= sizeof (struct tree_exp
);
4380 record_node_allocation_statistics (code
, length
);
4382 gcc_assert (TREE_CODE_LENGTH (code
) == 1);
4384 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
4386 memset (t
, 0, sizeof (struct tree_common
));
4388 TREE_SET_CODE (t
, code
);
4390 TREE_TYPE (t
) = type
;
4391 SET_EXPR_LOCATION (t
, UNKNOWN_LOCATION
);
4392 TREE_OPERAND (t
, 0) = node
;
4393 if (node
&& !TYPE_P (node
))
4395 TREE_SIDE_EFFECTS (t
) = TREE_SIDE_EFFECTS (node
);
4396 TREE_READONLY (t
) = TREE_READONLY (node
);
4399 if (TREE_CODE_CLASS (code
) == tcc_statement
)
4400 TREE_SIDE_EFFECTS (t
) = 1;
4404 /* All of these have side-effects, no matter what their
4406 TREE_SIDE_EFFECTS (t
) = 1;
4407 TREE_READONLY (t
) = 0;
4411 /* Whether a dereference is readonly has nothing to do with whether
4412 its operand is readonly. */
4413 TREE_READONLY (t
) = 0;
4418 recompute_tree_invariant_for_addr_expr (t
);
4422 if ((TREE_CODE_CLASS (code
) == tcc_unary
|| code
== VIEW_CONVERT_EXPR
)
4423 && node
&& !TYPE_P (node
)
4424 && TREE_CONSTANT (node
))
4425 TREE_CONSTANT (t
) = 1;
4426 if (TREE_CODE_CLASS (code
) == tcc_reference
4427 && node
&& TREE_THIS_VOLATILE (node
))
4428 TREE_THIS_VOLATILE (t
) = 1;
4435 #define PROCESS_ARG(N) \
4437 TREE_OPERAND (t, N) = arg##N; \
4438 if (arg##N &&!TYPE_P (arg##N)) \
4440 if (TREE_SIDE_EFFECTS (arg##N)) \
4442 if (!TREE_READONLY (arg##N) \
4443 && !CONSTANT_CLASS_P (arg##N)) \
4444 (void) (read_only = 0); \
4445 if (!TREE_CONSTANT (arg##N)) \
4446 (void) (constant = 0); \
4451 build2_stat (enum tree_code code
, tree tt
, tree arg0
, tree arg1 MEM_STAT_DECL
)
4453 bool constant
, read_only
, side_effects
;
4456 gcc_assert (TREE_CODE_LENGTH (code
) == 2);
4458 if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
|| code
== MULT_EXPR
)
4459 && arg0
&& arg1
&& tt
&& POINTER_TYPE_P (tt
)
4460 /* When sizetype precision doesn't match that of pointers
4461 we need to be able to build explicit extensions or truncations
4462 of the offset argument. */
4463 && TYPE_PRECISION (sizetype
) == TYPE_PRECISION (tt
))
4464 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
4465 && TREE_CODE (arg1
) == INTEGER_CST
);
4467 if (code
== POINTER_PLUS_EXPR
&& arg0
&& arg1
&& tt
)
4468 gcc_assert (POINTER_TYPE_P (tt
) && POINTER_TYPE_P (TREE_TYPE (arg0
))
4469 && ptrofftype_p (TREE_TYPE (arg1
)));
4471 t
= make_node_stat (code PASS_MEM_STAT
);
4474 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4475 result based on those same flags for the arguments. But if the
4476 arguments aren't really even `tree' expressions, we shouldn't be trying
4479 /* Expressions without side effects may be constant if their
4480 arguments are as well. */
4481 constant
= (TREE_CODE_CLASS (code
) == tcc_comparison
4482 || TREE_CODE_CLASS (code
) == tcc_binary
);
4484 side_effects
= TREE_SIDE_EFFECTS (t
);
4489 TREE_SIDE_EFFECTS (t
) = side_effects
;
4490 if (code
== MEM_REF
)
4492 if (arg0
&& TREE_CODE (arg0
) == ADDR_EXPR
)
4494 tree o
= TREE_OPERAND (arg0
, 0);
4495 TREE_READONLY (t
) = TREE_READONLY (o
);
4496 TREE_THIS_VOLATILE (t
) = TREE_THIS_VOLATILE (o
);
4501 TREE_READONLY (t
) = read_only
;
4502 TREE_CONSTANT (t
) = constant
;
4503 TREE_THIS_VOLATILE (t
)
4504 = (TREE_CODE_CLASS (code
) == tcc_reference
4505 && arg0
&& TREE_THIS_VOLATILE (arg0
));
4513 build3_stat (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
4514 tree arg2 MEM_STAT_DECL
)
4516 bool constant
, read_only
, side_effects
;
4519 gcc_assert (TREE_CODE_LENGTH (code
) == 3);
4520 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
4522 t
= make_node_stat (code PASS_MEM_STAT
);
4527 /* As a special exception, if COND_EXPR has NULL branches, we
4528 assume that it is a gimple statement and always consider
4529 it to have side effects. */
4530 if (code
== COND_EXPR
4531 && tt
== void_type_node
4532 && arg1
== NULL_TREE
4533 && arg2
== NULL_TREE
)
4534 side_effects
= true;
4536 side_effects
= TREE_SIDE_EFFECTS (t
);
4542 if (code
== COND_EXPR
)
4543 TREE_READONLY (t
) = read_only
;
4545 TREE_SIDE_EFFECTS (t
) = side_effects
;
4546 TREE_THIS_VOLATILE (t
)
4547 = (TREE_CODE_CLASS (code
) == tcc_reference
4548 && arg0
&& TREE_THIS_VOLATILE (arg0
));
4554 build4_stat (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
4555 tree arg2
, tree arg3 MEM_STAT_DECL
)
4557 bool constant
, read_only
, side_effects
;
4560 gcc_assert (TREE_CODE_LENGTH (code
) == 4);
4562 t
= make_node_stat (code PASS_MEM_STAT
);
4565 side_effects
= TREE_SIDE_EFFECTS (t
);
4572 TREE_SIDE_EFFECTS (t
) = side_effects
;
4573 TREE_THIS_VOLATILE (t
)
4574 = (TREE_CODE_CLASS (code
) == tcc_reference
4575 && arg0
&& TREE_THIS_VOLATILE (arg0
));
4581 build5_stat (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
4582 tree arg2
, tree arg3
, tree arg4 MEM_STAT_DECL
)
4584 bool constant
, read_only
, side_effects
;
4587 gcc_assert (TREE_CODE_LENGTH (code
) == 5);
4589 t
= make_node_stat (code PASS_MEM_STAT
);
4592 side_effects
= TREE_SIDE_EFFECTS (t
);
4600 TREE_SIDE_EFFECTS (t
) = side_effects
;
4601 if (code
== TARGET_MEM_REF
)
4603 if (arg0
&& TREE_CODE (arg0
) == ADDR_EXPR
)
4605 tree o
= TREE_OPERAND (arg0
, 0);
4606 TREE_READONLY (t
) = TREE_READONLY (o
);
4607 TREE_THIS_VOLATILE (t
) = TREE_THIS_VOLATILE (o
);
4611 TREE_THIS_VOLATILE (t
)
4612 = (TREE_CODE_CLASS (code
) == tcc_reference
4613 && arg0
&& TREE_THIS_VOLATILE (arg0
));
4618 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4619 on the pointer PTR. */
4622 build_simple_mem_ref_loc (location_t loc
, tree ptr
)
4624 HOST_WIDE_INT offset
= 0;
4625 tree ptype
= TREE_TYPE (ptr
);
4627 /* For convenience allow addresses that collapse to a simple base
4629 if (TREE_CODE (ptr
) == ADDR_EXPR
4630 && (handled_component_p (TREE_OPERAND (ptr
, 0))
4631 || TREE_CODE (TREE_OPERAND (ptr
, 0)) == MEM_REF
))
4633 ptr
= get_addr_base_and_unit_offset (TREE_OPERAND (ptr
, 0), &offset
);
4635 ptr
= build_fold_addr_expr (ptr
);
4636 gcc_assert (is_gimple_reg (ptr
) || is_gimple_min_invariant (ptr
));
4638 tem
= build2 (MEM_REF
, TREE_TYPE (ptype
),
4639 ptr
, build_int_cst (ptype
, offset
));
4640 SET_EXPR_LOCATION (tem
, loc
);
4644 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4647 mem_ref_offset (const_tree t
)
4649 return offset_int::from (TREE_OPERAND (t
, 1), SIGNED
);
4652 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4653 offsetted by OFFSET units. */
4656 build_invariant_address (tree type
, tree base
, HOST_WIDE_INT offset
)
4658 tree ref
= fold_build2 (MEM_REF
, TREE_TYPE (type
),
4659 build_fold_addr_expr (base
),
4660 build_int_cst (ptr_type_node
, offset
));
4661 tree addr
= build1 (ADDR_EXPR
, type
, ref
);
4662 recompute_tree_invariant_for_addr_expr (addr
);
4666 /* Similar except don't specify the TREE_TYPE
4667 and leave the TREE_SIDE_EFFECTS as 0.
4668 It is permissible for arguments to be null,
4669 or even garbage if their values do not matter. */
4672 build_nt (enum tree_code code
, ...)
4679 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
4683 t
= make_node (code
);
4684 length
= TREE_CODE_LENGTH (code
);
4686 for (i
= 0; i
< length
; i
++)
4687 TREE_OPERAND (t
, i
) = va_arg (p
, tree
);
4693 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4697 build_nt_call_vec (tree fn
, vec
<tree
, va_gc
> *args
)
4702 ret
= build_vl_exp (CALL_EXPR
, vec_safe_length (args
) + 3);
4703 CALL_EXPR_FN (ret
) = fn
;
4704 CALL_EXPR_STATIC_CHAIN (ret
) = NULL_TREE
;
4705 FOR_EACH_VEC_SAFE_ELT (args
, ix
, t
)
4706 CALL_EXPR_ARG (ret
, ix
) = t
;
4710 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4711 We do NOT enter this node in any sort of symbol table.
4713 LOC is the location of the decl.
4715 layout_decl is used to set up the decl's storage layout.
4716 Other slots are initialized to 0 or null pointers. */
4719 build_decl_stat (location_t loc
, enum tree_code code
, tree name
,
4720 tree type MEM_STAT_DECL
)
4724 t
= make_node_stat (code PASS_MEM_STAT
);
4725 DECL_SOURCE_LOCATION (t
) = loc
;
4727 /* if (type == error_mark_node)
4728 type = integer_type_node; */
4729 /* That is not done, deliberately, so that having error_mark_node
4730 as the type can suppress useless errors in the use of this variable. */
4732 DECL_NAME (t
) = name
;
4733 TREE_TYPE (t
) = type
;
4735 if (code
== VAR_DECL
|| code
== PARM_DECL
|| code
== RESULT_DECL
)
4741 /* Builds and returns function declaration with NAME and TYPE. */
4744 build_fn_decl (const char *name
, tree type
)
4746 tree id
= get_identifier (name
);
4747 tree decl
= build_decl (input_location
, FUNCTION_DECL
, id
, type
);
4749 DECL_EXTERNAL (decl
) = 1;
4750 TREE_PUBLIC (decl
) = 1;
4751 DECL_ARTIFICIAL (decl
) = 1;
4752 TREE_NOTHROW (decl
) = 1;
4757 vec
<tree
, va_gc
> *all_translation_units
;
4759 /* Builds a new translation-unit decl with name NAME, queues it in the
4760 global list of translation-unit decls and returns it. */
4763 build_translation_unit_decl (tree name
)
4765 tree tu
= build_decl (UNKNOWN_LOCATION
, TRANSLATION_UNIT_DECL
,
4767 TRANSLATION_UNIT_LANGUAGE (tu
) = lang_hooks
.name
;
4768 vec_safe_push (all_translation_units
, tu
);
4773 /* BLOCK nodes are used to represent the structure of binding contours
4774 and declarations, once those contours have been exited and their contents
4775 compiled. This information is used for outputting debugging info. */
4778 build_block (tree vars
, tree subblocks
, tree supercontext
, tree chain
)
4780 tree block
= make_node (BLOCK
);
4782 BLOCK_VARS (block
) = vars
;
4783 BLOCK_SUBBLOCKS (block
) = subblocks
;
4784 BLOCK_SUPERCONTEXT (block
) = supercontext
;
4785 BLOCK_CHAIN (block
) = chain
;
4790 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4792 LOC is the location to use in tree T. */
4795 protected_set_expr_location (tree t
, location_t loc
)
4797 if (CAN_HAVE_LOCATION_P (t
))
4798 SET_EXPR_LOCATION (t
, loc
);
4801 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4805 build_decl_attribute_variant (tree ddecl
, tree attribute
)
4807 DECL_ATTRIBUTES (ddecl
) = attribute
;
4811 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4812 is ATTRIBUTE and its qualifiers are QUALS.
4814 Record such modified types already made so we don't make duplicates. */
4817 build_type_attribute_qual_variant (tree ttype
, tree attribute
, int quals
)
4819 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype
), attribute
))
4821 inchash::hash hstate
;
4825 enum tree_code code
= TREE_CODE (ttype
);
4827 /* Building a distinct copy of a tagged type is inappropriate; it
4828 causes breakage in code that expects there to be a one-to-one
4829 relationship between a struct and its fields.
4830 build_duplicate_type is another solution (as used in
4831 handle_transparent_union_attribute), but that doesn't play well
4832 with the stronger C++ type identity model. */
4833 if (TREE_CODE (ttype
) == RECORD_TYPE
4834 || TREE_CODE (ttype
) == UNION_TYPE
4835 || TREE_CODE (ttype
) == QUAL_UNION_TYPE
4836 || TREE_CODE (ttype
) == ENUMERAL_TYPE
)
4838 warning (OPT_Wattributes
,
4839 "ignoring attributes applied to %qT after definition",
4840 TYPE_MAIN_VARIANT (ttype
));
4841 return build_qualified_type (ttype
, quals
);
4844 ttype
= build_qualified_type (ttype
, TYPE_UNQUALIFIED
);
4845 ntype
= build_distinct_type_copy (ttype
);
4847 TYPE_ATTRIBUTES (ntype
) = attribute
;
4849 hstate
.add_int (code
);
4850 if (TREE_TYPE (ntype
))
4851 hstate
.add_object (TYPE_HASH (TREE_TYPE (ntype
)));
4852 attribute_hash_list (attribute
, hstate
);
4854 switch (TREE_CODE (ntype
))
4857 type_hash_list (TYPE_ARG_TYPES (ntype
), hstate
);
4860 if (TYPE_DOMAIN (ntype
))
4861 hstate
.add_object (TYPE_HASH (TYPE_DOMAIN (ntype
)));
4864 t
= TYPE_MAX_VALUE (ntype
);
4865 for (i
= 0; i
< TREE_INT_CST_NUNITS (t
); i
++)
4866 hstate
.add_object (TREE_INT_CST_ELT (t
, i
));
4869 case FIXED_POINT_TYPE
:
4871 unsigned int precision
= TYPE_PRECISION (ntype
);
4872 hstate
.add_object (precision
);
4879 ntype
= type_hash_canon (hstate
.end(), ntype
);
4881 /* If the target-dependent attributes make NTYPE different from
4882 its canonical type, we will need to use structural equality
4883 checks for this type. */
4884 if (TYPE_STRUCTURAL_EQUALITY_P (ttype
)
4885 || !comp_type_attributes (ntype
, ttype
))
4886 SET_TYPE_STRUCTURAL_EQUALITY (ntype
);
4887 else if (TYPE_CANONICAL (ntype
) == ntype
)
4888 TYPE_CANONICAL (ntype
) = TYPE_CANONICAL (ttype
);
4890 ttype
= build_qualified_type (ntype
, quals
);
4892 else if (TYPE_QUALS (ttype
) != quals
)
4893 ttype
= build_qualified_type (ttype
, quals
);
4898 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4902 omp_declare_simd_clauses_equal (tree clauses1
, tree clauses2
)
4905 for (cl1
= clauses1
, cl2
= clauses2
;
4907 cl1
= OMP_CLAUSE_CHAIN (cl1
), cl2
= OMP_CLAUSE_CHAIN (cl2
))
4909 if (OMP_CLAUSE_CODE (cl1
) != OMP_CLAUSE_CODE (cl2
))
4911 if (OMP_CLAUSE_CODE (cl1
) != OMP_CLAUSE_SIMDLEN
)
4913 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1
),
4914 OMP_CLAUSE_DECL (cl2
)) != 1)
4917 switch (OMP_CLAUSE_CODE (cl1
))
4919 case OMP_CLAUSE_ALIGNED
:
4920 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1
),
4921 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2
)) != 1)
4924 case OMP_CLAUSE_LINEAR
:
4925 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1
),
4926 OMP_CLAUSE_LINEAR_STEP (cl2
)) != 1)
4929 case OMP_CLAUSE_SIMDLEN
:
4930 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1
),
4931 OMP_CLAUSE_SIMDLEN_EXPR (cl2
)) != 1)
4940 /* Compare two constructor-element-type constants. Return 1 if the lists
4941 are known to be equal; otherwise return 0. */
4944 simple_cst_list_equal (const_tree l1
, const_tree l2
)
4946 while (l1
!= NULL_TREE
&& l2
!= NULL_TREE
)
4948 if (simple_cst_equal (TREE_VALUE (l1
), TREE_VALUE (l2
)) != 1)
4951 l1
= TREE_CHAIN (l1
);
4952 l2
= TREE_CHAIN (l2
);
4958 /* Compare two identifier nodes representing attributes. Either one may
4959 be in wrapped __ATTR__ form. Return true if they are the same, false
4963 cmp_attrib_identifiers (const_tree attr1
, const_tree attr2
)
4965 /* Make sure we're dealing with IDENTIFIER_NODEs. */
4966 gcc_checking_assert (TREE_CODE (attr1
) == IDENTIFIER_NODE
4967 && TREE_CODE (attr2
) == IDENTIFIER_NODE
);
4969 /* Identifiers can be compared directly for equality. */
4973 /* If they are not equal, they may still be one in the form
4974 'text' while the other one is in the form '__text__'. TODO:
4975 If we were storing attributes in normalized 'text' form, then
4976 this could all go away and we could take full advantage of
4977 the fact that we're comparing identifiers. :-) */
4978 const size_t attr1_len
= IDENTIFIER_LENGTH (attr1
);
4979 const size_t attr2_len
= IDENTIFIER_LENGTH (attr2
);
4981 if (attr2_len
== attr1_len
+ 4)
4983 const char *p
= IDENTIFIER_POINTER (attr2
);
4984 const char *q
= IDENTIFIER_POINTER (attr1
);
4985 if (p
[0] == '_' && p
[1] == '_'
4986 && p
[attr2_len
- 2] == '_' && p
[attr2_len
- 1] == '_'
4987 && strncmp (q
, p
+ 2, attr1_len
) == 0)
4990 else if (attr2_len
+ 4 == attr1_len
)
4992 const char *p
= IDENTIFIER_POINTER (attr2
);
4993 const char *q
= IDENTIFIER_POINTER (attr1
);
4994 if (q
[0] == '_' && q
[1] == '_'
4995 && q
[attr1_len
- 2] == '_' && q
[attr1_len
- 1] == '_'
4996 && strncmp (q
+ 2, p
, attr2_len
) == 0)
5003 /* Compare two attributes for their value identity. Return true if the
5004 attribute values are known to be equal; otherwise return false. */
5007 attribute_value_equal (const_tree attr1
, const_tree attr2
)
5009 if (TREE_VALUE (attr1
) == TREE_VALUE (attr2
))
5012 if (TREE_VALUE (attr1
) != NULL_TREE
5013 && TREE_CODE (TREE_VALUE (attr1
)) == TREE_LIST
5014 && TREE_VALUE (attr2
) != NULL_TREE
5015 && TREE_CODE (TREE_VALUE (attr2
)) == TREE_LIST
)
5017 /* Handle attribute format. */
5018 if (is_attribute_p ("format", TREE_PURPOSE (attr1
)))
5020 attr1
= TREE_VALUE (attr1
);
5021 attr2
= TREE_VALUE (attr2
);
5022 /* Compare the archetypes (printf/scanf/strftime/...). */
5023 if (!cmp_attrib_identifiers (TREE_VALUE (attr1
),
5024 TREE_VALUE (attr2
)))
5026 /* Archetypes are the same. Compare the rest. */
5027 return (simple_cst_list_equal (TREE_CHAIN (attr1
),
5028 TREE_CHAIN (attr2
)) == 1);
5030 return (simple_cst_list_equal (TREE_VALUE (attr1
),
5031 TREE_VALUE (attr2
)) == 1);
5034 if ((flag_openmp
|| flag_openmp_simd
)
5035 && TREE_VALUE (attr1
) && TREE_VALUE (attr2
)
5036 && TREE_CODE (TREE_VALUE (attr1
)) == OMP_CLAUSE
5037 && TREE_CODE (TREE_VALUE (attr2
)) == OMP_CLAUSE
)
5038 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1
),
5039 TREE_VALUE (attr2
));
5041 return (simple_cst_equal (TREE_VALUE (attr1
), TREE_VALUE (attr2
)) == 1);
5044 /* Return 0 if the attributes for two types are incompatible, 1 if they
5045 are compatible, and 2 if they are nearly compatible (which causes a
5046 warning to be generated). */
5048 comp_type_attributes (const_tree type1
, const_tree type2
)
5050 const_tree a1
= TYPE_ATTRIBUTES (type1
);
5051 const_tree a2
= TYPE_ATTRIBUTES (type2
);
5056 for (a
= a1
; a
!= NULL_TREE
; a
= TREE_CHAIN (a
))
5058 const struct attribute_spec
*as
;
5061 as
= lookup_attribute_spec (get_attribute_name (a
));
5062 if (!as
|| as
->affects_type_identity
== false)
5065 attr
= lookup_attribute (as
->name
, CONST_CAST_TREE (a2
));
5066 if (!attr
|| !attribute_value_equal (a
, attr
))
5071 for (a
= a2
; a
!= NULL_TREE
; a
= TREE_CHAIN (a
))
5073 const struct attribute_spec
*as
;
5075 as
= lookup_attribute_spec (get_attribute_name (a
));
5076 if (!as
|| as
->affects_type_identity
== false)
5079 if (!lookup_attribute (as
->name
, CONST_CAST_TREE (a1
)))
5081 /* We don't need to compare trees again, as we did this
5082 already in first loop. */
5084 /* All types - affecting identity - are equal, so
5085 there is no need to call target hook for comparison. */
5089 if (lookup_attribute ("transaction_safe", CONST_CAST_TREE (a
)))
5091 /* As some type combinations - like default calling-convention - might
5092 be compatible, we have to call the target hook to get the final result. */
5093 return targetm
.comp_type_attributes (type1
, type2
);
5096 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
5099 Record such modified types already made so we don't make duplicates. */
5102 build_type_attribute_variant (tree ttype
, tree attribute
)
5104 return build_type_attribute_qual_variant (ttype
, attribute
,
5105 TYPE_QUALS (ttype
));
5109 /* Reset the expression *EXPR_P, a size or position.
5111 ??? We could reset all non-constant sizes or positions. But it's cheap
5112 enough to not do so and refrain from adding workarounds to dwarf2out.c.
5114 We need to reset self-referential sizes or positions because they cannot
5115 be gimplified and thus can contain a CALL_EXPR after the gimplification
5116 is finished, which will run afoul of LTO streaming. And they need to be
5117 reset to something essentially dummy but not constant, so as to preserve
5118 the properties of the object they are attached to. */
5121 free_lang_data_in_one_sizepos (tree
*expr_p
)
5123 tree expr
= *expr_p
;
5124 if (CONTAINS_PLACEHOLDER_P (expr
))
5125 *expr_p
= build0 (PLACEHOLDER_EXPR
, TREE_TYPE (expr
));
5129 /* Reset all the fields in a binfo node BINFO. We only keep
5130 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
5133 free_lang_data_in_binfo (tree binfo
)
5138 gcc_assert (TREE_CODE (binfo
) == TREE_BINFO
);
5140 BINFO_VIRTUALS (binfo
) = NULL_TREE
;
5141 BINFO_BASE_ACCESSES (binfo
) = NULL
;
5142 BINFO_INHERITANCE_CHAIN (binfo
) = NULL_TREE
;
5143 BINFO_SUBVTT_INDEX (binfo
) = NULL_TREE
;
5145 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo
), i
, t
)
5146 free_lang_data_in_binfo (t
);
5150 /* Reset all language specific information still present in TYPE. */
5153 free_lang_data_in_type (tree type
)
5155 gcc_assert (TYPE_P (type
));
5157 /* Give the FE a chance to remove its own data first. */
5158 lang_hooks
.free_lang_data (type
);
5160 TREE_LANG_FLAG_0 (type
) = 0;
5161 TREE_LANG_FLAG_1 (type
) = 0;
5162 TREE_LANG_FLAG_2 (type
) = 0;
5163 TREE_LANG_FLAG_3 (type
) = 0;
5164 TREE_LANG_FLAG_4 (type
) = 0;
5165 TREE_LANG_FLAG_5 (type
) = 0;
5166 TREE_LANG_FLAG_6 (type
) = 0;
5168 if (TREE_CODE (type
) == FUNCTION_TYPE
)
5170 /* Remove the const and volatile qualifiers from arguments. The
5171 C++ front end removes them, but the C front end does not,
5172 leading to false ODR violation errors when merging two
5173 instances of the same function signature compiled by
5174 different front ends. */
5177 for (p
= TYPE_ARG_TYPES (type
); p
; p
= TREE_CHAIN (p
))
5179 tree arg_type
= TREE_VALUE (p
);
5181 if (TYPE_READONLY (arg_type
) || TYPE_VOLATILE (arg_type
))
5183 int quals
= TYPE_QUALS (arg_type
)
5185 & ~TYPE_QUAL_VOLATILE
;
5186 TREE_VALUE (p
) = build_qualified_type (arg_type
, quals
);
5187 free_lang_data_in_type (TREE_VALUE (p
));
5189 /* C++ FE uses TREE_PURPOSE to store initial values. */
5190 TREE_PURPOSE (p
) = NULL
;
5192 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5193 TYPE_MINVAL (type
) = NULL
;
5195 if (TREE_CODE (type
) == METHOD_TYPE
)
5199 for (p
= TYPE_ARG_TYPES (type
); p
; p
= TREE_CHAIN (p
))
5201 /* C++ FE uses TREE_PURPOSE to store initial values. */
5202 TREE_PURPOSE (p
) = NULL
;
5204 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5205 TYPE_MINVAL (type
) = NULL
;
5208 /* Remove members that are not actually FIELD_DECLs from the field
5209 list of an aggregate. These occur in C++. */
5210 if (RECORD_OR_UNION_TYPE_P (type
))
5214 /* Note that TYPE_FIELDS can be shared across distinct
5215 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
5216 to be removed, we cannot set its TREE_CHAIN to NULL.
5217 Otherwise, we would not be able to find all the other fields
5218 in the other instances of this TREE_TYPE.
5220 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
5222 member
= TYPE_FIELDS (type
);
5225 if (TREE_CODE (member
) == FIELD_DECL
5226 || (TREE_CODE (member
) == TYPE_DECL
5227 && !DECL_IGNORED_P (member
)
5228 && debug_info_level
> DINFO_LEVEL_TERSE
5229 && !is_redundant_typedef (member
)))
5232 TREE_CHAIN (prev
) = member
;
5234 TYPE_FIELDS (type
) = member
;
5238 member
= TREE_CHAIN (member
);
5242 TREE_CHAIN (prev
) = NULL_TREE
;
5244 TYPE_FIELDS (type
) = NULL_TREE
;
5246 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
5247 and danagle the pointer from time to time. */
5248 if (TYPE_VFIELD (type
) && TREE_CODE (TYPE_VFIELD (type
)) != FIELD_DECL
)
5249 TYPE_VFIELD (type
) = NULL_TREE
;
5251 /* Remove TYPE_METHODS list. While it would be nice to keep it
5252 to enable ODR warnings about different method lists, doing so
5253 seems to impractically increase size of LTO data streamed.
5254 Keep the information if TYPE_METHODS was non-NULL. This is used
5255 by function.c and pretty printers. */
5256 if (TYPE_METHODS (type
))
5257 TYPE_METHODS (type
) = error_mark_node
;
5258 if (TYPE_BINFO (type
))
5260 free_lang_data_in_binfo (TYPE_BINFO (type
));
5261 /* We need to preserve link to bases and virtual table for all
5262 polymorphic types to make devirtualization machinery working.
5263 Debug output cares only about bases, but output also
5264 virtual table pointers so merging of -fdevirtualize and
5265 -fno-devirtualize units is easier. */
5266 if ((!BINFO_VTABLE (TYPE_BINFO (type
))
5267 || !flag_devirtualize
)
5268 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type
))
5269 && !BINFO_VTABLE (TYPE_BINFO (type
)))
5270 || debug_info_level
!= DINFO_LEVEL_NONE
))
5271 TYPE_BINFO (type
) = NULL
;
5276 /* For non-aggregate types, clear out the language slot (which
5277 overloads TYPE_BINFO). */
5278 TYPE_LANG_SLOT_1 (type
) = NULL_TREE
;
5280 if (INTEGRAL_TYPE_P (type
)
5281 || SCALAR_FLOAT_TYPE_P (type
)
5282 || FIXED_POINT_TYPE_P (type
))
5284 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type
));
5285 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type
));
5289 free_lang_data_in_one_sizepos (&TYPE_SIZE (type
));
5290 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type
));
5292 if (TYPE_CONTEXT (type
)
5293 && TREE_CODE (TYPE_CONTEXT (type
)) == BLOCK
)
5295 tree ctx
= TYPE_CONTEXT (type
);
5298 ctx
= BLOCK_SUPERCONTEXT (ctx
);
5300 while (ctx
&& TREE_CODE (ctx
) == BLOCK
);
5301 TYPE_CONTEXT (type
) = ctx
;
5306 /* Return true if DECL may need an assembler name to be set. */
5309 need_assembler_name_p (tree decl
)
5311 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5312 Rule merging. This makes type_odr_p to return true on those types during
5313 LTO and by comparing the mangled name, we can say what types are intended
5314 to be equivalent across compilation unit.
5316 We do not store names of type_in_anonymous_namespace_p.
5318 Record, union and enumeration type have linkage that allows use
5319 to check type_in_anonymous_namespace_p. We do not mangle compound types
5320 that always can be compared structurally.
5322 Similarly for builtin types, we compare properties of their main variant.
5323 A special case are integer types where mangling do make differences
5324 between char/signed char/unsigned char etc. Storing name for these makes
5325 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5326 See cp/mangle.c:write_builtin_type for details. */
5328 if (flag_lto_odr_type_mering
5329 && TREE_CODE (decl
) == TYPE_DECL
5331 && decl
== TYPE_NAME (TREE_TYPE (decl
))
5332 && !TYPE_ARTIFICIAL (TREE_TYPE (decl
))
5333 && (type_with_linkage_p (TREE_TYPE (decl
))
5334 || TREE_CODE (TREE_TYPE (decl
)) == INTEGER_TYPE
)
5335 && !variably_modified_type_p (TREE_TYPE (decl
), NULL_TREE
))
5336 return !DECL_ASSEMBLER_NAME_SET_P (decl
);
5337 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5338 if (TREE_CODE (decl
) != FUNCTION_DECL
5339 && TREE_CODE (decl
) != VAR_DECL
)
5342 /* If DECL already has its assembler name set, it does not need a
5344 if (!HAS_DECL_ASSEMBLER_NAME_P (decl
)
5345 || DECL_ASSEMBLER_NAME_SET_P (decl
))
5348 /* Abstract decls do not need an assembler name. */
5349 if (DECL_ABSTRACT_P (decl
))
5352 /* For VAR_DECLs, only static, public and external symbols need an
5354 if (TREE_CODE (decl
) == VAR_DECL
5355 && !TREE_STATIC (decl
)
5356 && !TREE_PUBLIC (decl
)
5357 && !DECL_EXTERNAL (decl
))
5360 if (TREE_CODE (decl
) == FUNCTION_DECL
)
5362 /* Do not set assembler name on builtins. Allow RTL expansion to
5363 decide whether to expand inline or via a regular call. */
5364 if (DECL_BUILT_IN (decl
)
5365 && DECL_BUILT_IN_CLASS (decl
) != BUILT_IN_FRONTEND
)
5368 /* Functions represented in the callgraph need an assembler name. */
5369 if (cgraph_node::get (decl
) != NULL
)
5372 /* Unused and not public functions don't need an assembler name. */
5373 if (!TREE_USED (decl
) && !TREE_PUBLIC (decl
))
5381 /* Reset all language specific information still present in symbol
5385 free_lang_data_in_decl (tree decl
)
5387 gcc_assert (DECL_P (decl
));
5389 /* Give the FE a chance to remove its own data first. */
5390 lang_hooks
.free_lang_data (decl
);
5392 TREE_LANG_FLAG_0 (decl
) = 0;
5393 TREE_LANG_FLAG_1 (decl
) = 0;
5394 TREE_LANG_FLAG_2 (decl
) = 0;
5395 TREE_LANG_FLAG_3 (decl
) = 0;
5396 TREE_LANG_FLAG_4 (decl
) = 0;
5397 TREE_LANG_FLAG_5 (decl
) = 0;
5398 TREE_LANG_FLAG_6 (decl
) = 0;
5400 free_lang_data_in_one_sizepos (&DECL_SIZE (decl
));
5401 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl
));
5402 if (TREE_CODE (decl
) == FIELD_DECL
)
5404 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl
));
5405 if (TREE_CODE (DECL_CONTEXT (decl
)) == QUAL_UNION_TYPE
)
5406 DECL_QUALIFIER (decl
) = NULL_TREE
;
5409 if (TREE_CODE (decl
) == FUNCTION_DECL
)
5411 struct cgraph_node
*node
;
5412 if (!(node
= cgraph_node::get (decl
))
5413 || (!node
->definition
&& !node
->clones
))
5416 node
->release_body ();
5419 release_function_body (decl
);
5420 DECL_ARGUMENTS (decl
) = NULL
;
5421 DECL_RESULT (decl
) = NULL
;
5422 DECL_INITIAL (decl
) = error_mark_node
;
5425 if (gimple_has_body_p (decl
))
5429 /* If DECL has a gimple body, then the context for its
5430 arguments must be DECL. Otherwise, it doesn't really
5431 matter, as we will not be emitting any code for DECL. In
5432 general, there may be other instances of DECL created by
5433 the front end and since PARM_DECLs are generally shared,
5434 their DECL_CONTEXT changes as the replicas of DECL are
5435 created. The only time where DECL_CONTEXT is important
5436 is for the FUNCTION_DECLs that have a gimple body (since
5437 the PARM_DECL will be used in the function's body). */
5438 for (t
= DECL_ARGUMENTS (decl
); t
; t
= TREE_CHAIN (t
))
5439 DECL_CONTEXT (t
) = decl
;
5440 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl
))
5441 DECL_FUNCTION_SPECIFIC_TARGET (decl
)
5442 = target_option_default_node
;
5443 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
))
5444 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
)
5445 = optimization_default_node
;
5448 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5449 At this point, it is not needed anymore. */
5450 DECL_SAVED_TREE (decl
) = NULL_TREE
;
5452 /* Clear the abstract origin if it refers to a method. Otherwise
5453 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5454 origin will not be output correctly. */
5455 if (DECL_ABSTRACT_ORIGIN (decl
)
5456 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl
))
5457 && RECORD_OR_UNION_TYPE_P
5458 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl
))))
5459 DECL_ABSTRACT_ORIGIN (decl
) = NULL_TREE
;
5461 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5462 DECL_VINDEX referring to itself into a vtable slot number as it
5463 should. Happens with functions that are copied and then forgotten
5464 about. Just clear it, it won't matter anymore. */
5465 if (DECL_VINDEX (decl
) && !tree_fits_shwi_p (DECL_VINDEX (decl
)))
5466 DECL_VINDEX (decl
) = NULL_TREE
;
5468 else if (TREE_CODE (decl
) == VAR_DECL
)
5470 if ((DECL_EXTERNAL (decl
)
5471 && (!TREE_STATIC (decl
) || !TREE_READONLY (decl
)))
5472 || (decl_function_context (decl
) && !TREE_STATIC (decl
)))
5473 DECL_INITIAL (decl
) = NULL_TREE
;
5475 else if (TREE_CODE (decl
) == TYPE_DECL
5476 || TREE_CODE (decl
) == FIELD_DECL
)
5477 DECL_INITIAL (decl
) = NULL_TREE
;
5478 else if (TREE_CODE (decl
) == TRANSLATION_UNIT_DECL
5479 && DECL_INITIAL (decl
)
5480 && TREE_CODE (DECL_INITIAL (decl
)) == BLOCK
)
5482 /* Strip builtins from the translation-unit BLOCK. We still have targets
5483 without builtin_decl_explicit support and also builtins are shared
5484 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5485 tree
*nextp
= &BLOCK_VARS (DECL_INITIAL (decl
));
5489 if (TREE_CODE (var
) == FUNCTION_DECL
5490 && DECL_BUILT_IN (var
))
5491 *nextp
= TREE_CHAIN (var
);
5493 nextp
= &TREE_CHAIN (var
);
5499 /* Data used when collecting DECLs and TYPEs for language data removal. */
5501 struct free_lang_data_d
5503 /* Worklist to avoid excessive recursion. */
5506 /* Set of traversed objects. Used to avoid duplicate visits. */
5507 hash_set
<tree
> *pset
;
5509 /* Array of symbols to process with free_lang_data_in_decl. */
5512 /* Array of types to process with free_lang_data_in_type. */
5517 /* Save all language fields needed to generate proper debug information
5518 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5521 save_debug_info_for_decl (tree t
)
5523 /*struct saved_debug_info_d *sdi;*/
5525 gcc_assert (debug_info_level
> DINFO_LEVEL_TERSE
&& t
&& DECL_P (t
));
5527 /* FIXME. Partial implementation for saving debug info removed. */
5531 /* Save all language fields needed to generate proper debug information
5532 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5535 save_debug_info_for_type (tree t
)
5537 /*struct saved_debug_info_d *sdi;*/
5539 gcc_assert (debug_info_level
> DINFO_LEVEL_TERSE
&& t
&& TYPE_P (t
));
5541 /* FIXME. Partial implementation for saving debug info removed. */
5545 /* Add type or decl T to one of the list of tree nodes that need their
5546 language data removed. The lists are held inside FLD. */
5549 add_tree_to_fld_list (tree t
, struct free_lang_data_d
*fld
)
5553 fld
->decls
.safe_push (t
);
5554 if (debug_info_level
> DINFO_LEVEL_TERSE
)
5555 save_debug_info_for_decl (t
);
5557 else if (TYPE_P (t
))
5559 fld
->types
.safe_push (t
);
5560 if (debug_info_level
> DINFO_LEVEL_TERSE
)
5561 save_debug_info_for_type (t
);
5567 /* Push tree node T into FLD->WORKLIST. */
5570 fld_worklist_push (tree t
, struct free_lang_data_d
*fld
)
5572 if (t
&& !is_lang_specific (t
) && !fld
->pset
->contains (t
))
5573 fld
->worklist
.safe_push ((t
));
5577 /* Operand callback helper for free_lang_data_in_node. *TP is the
5578 subtree operand being considered. */
5581 find_decls_types_r (tree
*tp
, int *ws
, void *data
)
5584 struct free_lang_data_d
*fld
= (struct free_lang_data_d
*) data
;
5586 if (TREE_CODE (t
) == TREE_LIST
)
5589 /* Language specific nodes will be removed, so there is no need
5590 to gather anything under them. */
5591 if (is_lang_specific (t
))
5599 /* Note that walk_tree does not traverse every possible field in
5600 decls, so we have to do our own traversals here. */
5601 add_tree_to_fld_list (t
, fld
);
5603 fld_worklist_push (DECL_NAME (t
), fld
);
5604 fld_worklist_push (DECL_CONTEXT (t
), fld
);
5605 fld_worklist_push (DECL_SIZE (t
), fld
);
5606 fld_worklist_push (DECL_SIZE_UNIT (t
), fld
);
5608 /* We are going to remove everything under DECL_INITIAL for
5609 TYPE_DECLs. No point walking them. */
5610 if (TREE_CODE (t
) != TYPE_DECL
)
5611 fld_worklist_push (DECL_INITIAL (t
), fld
);
5613 fld_worklist_push (DECL_ATTRIBUTES (t
), fld
);
5614 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t
), fld
);
5616 if (TREE_CODE (t
) == FUNCTION_DECL
)
5618 fld_worklist_push (DECL_ARGUMENTS (t
), fld
);
5619 fld_worklist_push (DECL_RESULT (t
), fld
);
5621 else if (TREE_CODE (t
) == TYPE_DECL
)
5623 fld_worklist_push (DECL_ORIGINAL_TYPE (t
), fld
);
5625 else if (TREE_CODE (t
) == FIELD_DECL
)
5627 fld_worklist_push (DECL_FIELD_OFFSET (t
), fld
);
5628 fld_worklist_push (DECL_BIT_FIELD_TYPE (t
), fld
);
5629 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t
), fld
);
5630 fld_worklist_push (DECL_FCONTEXT (t
), fld
);
5633 if ((TREE_CODE (t
) == VAR_DECL
|| TREE_CODE (t
) == PARM_DECL
)
5634 && DECL_HAS_VALUE_EXPR_P (t
))
5635 fld_worklist_push (DECL_VALUE_EXPR (t
), fld
);
5637 if (TREE_CODE (t
) != FIELD_DECL
5638 && TREE_CODE (t
) != TYPE_DECL
)
5639 fld_worklist_push (TREE_CHAIN (t
), fld
);
5642 else if (TYPE_P (t
))
5644 /* Note that walk_tree does not traverse every possible field in
5645 types, so we have to do our own traversals here. */
5646 add_tree_to_fld_list (t
, fld
);
5648 if (!RECORD_OR_UNION_TYPE_P (t
))
5649 fld_worklist_push (TYPE_CACHED_VALUES (t
), fld
);
5650 fld_worklist_push (TYPE_SIZE (t
), fld
);
5651 fld_worklist_push (TYPE_SIZE_UNIT (t
), fld
);
5652 fld_worklist_push (TYPE_ATTRIBUTES (t
), fld
);
5653 fld_worklist_push (TYPE_POINTER_TO (t
), fld
);
5654 fld_worklist_push (TYPE_REFERENCE_TO (t
), fld
);
5655 fld_worklist_push (TYPE_NAME (t
), fld
);
5656 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5657 them and thus do not and want not to reach unused pointer types
5659 if (!POINTER_TYPE_P (t
))
5660 fld_worklist_push (TYPE_MINVAL (t
), fld
);
5661 if (!RECORD_OR_UNION_TYPE_P (t
))
5662 fld_worklist_push (TYPE_MAXVAL (t
), fld
);
5663 fld_worklist_push (TYPE_MAIN_VARIANT (t
), fld
);
5664 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5665 do not and want not to reach unused variants this way. */
5666 if (TYPE_CONTEXT (t
))
5668 tree ctx
= TYPE_CONTEXT (t
);
5669 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5670 So push that instead. */
5671 while (ctx
&& TREE_CODE (ctx
) == BLOCK
)
5672 ctx
= BLOCK_SUPERCONTEXT (ctx
);
5673 fld_worklist_push (ctx
, fld
);
5675 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5676 and want not to reach unused types this way. */
5678 if (RECORD_OR_UNION_TYPE_P (t
) && TYPE_BINFO (t
))
5682 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t
)), i
, tem
)
5683 fld_worklist_push (TREE_TYPE (tem
), fld
);
5684 tem
= BINFO_VIRTUALS (TYPE_BINFO (t
));
5686 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5687 && TREE_CODE (tem
) == TREE_LIST
)
5690 fld_worklist_push (TREE_VALUE (tem
), fld
);
5691 tem
= TREE_CHAIN (tem
);
5695 if (RECORD_OR_UNION_TYPE_P (t
))
5698 /* Push all TYPE_FIELDS - there can be interleaving interesting
5699 and non-interesting things. */
5700 tem
= TYPE_FIELDS (t
);
5703 if (TREE_CODE (tem
) == FIELD_DECL
5704 || (TREE_CODE (tem
) == TYPE_DECL
5705 && !DECL_IGNORED_P (tem
)
5706 && debug_info_level
> DINFO_LEVEL_TERSE
5707 && !is_redundant_typedef (tem
)))
5708 fld_worklist_push (tem
, fld
);
5709 tem
= TREE_CHAIN (tem
);
5713 fld_worklist_push (TYPE_STUB_DECL (t
), fld
);
5716 else if (TREE_CODE (t
) == BLOCK
)
5719 for (tem
= BLOCK_VARS (t
); tem
; tem
= TREE_CHAIN (tem
))
5720 fld_worklist_push (tem
, fld
);
5721 for (tem
= BLOCK_SUBBLOCKS (t
); tem
; tem
= BLOCK_CHAIN (tem
))
5722 fld_worklist_push (tem
, fld
);
5723 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t
), fld
);
5726 if (TREE_CODE (t
) != IDENTIFIER_NODE
5727 && CODE_CONTAINS_STRUCT (TREE_CODE (t
), TS_TYPED
))
5728 fld_worklist_push (TREE_TYPE (t
), fld
);
5734 /* Find decls and types in T. */
5737 find_decls_types (tree t
, struct free_lang_data_d
*fld
)
5741 if (!fld
->pset
->contains (t
))
5742 walk_tree (&t
, find_decls_types_r
, fld
, fld
->pset
);
5743 if (fld
->worklist
.is_empty ())
5745 t
= fld
->worklist
.pop ();
5749 /* Translate all the types in LIST with the corresponding runtime
5753 get_eh_types_for_runtime (tree list
)
5757 if (list
== NULL_TREE
)
5760 head
= build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list
)));
5762 list
= TREE_CHAIN (list
);
5765 tree n
= build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list
)));
5766 TREE_CHAIN (prev
) = n
;
5767 prev
= TREE_CHAIN (prev
);
5768 list
= TREE_CHAIN (list
);
5775 /* Find decls and types referenced in EH region R and store them in
5776 FLD->DECLS and FLD->TYPES. */
5779 find_decls_types_in_eh_region (eh_region r
, struct free_lang_data_d
*fld
)
5790 /* The types referenced in each catch must first be changed to the
5791 EH types used at runtime. This removes references to FE types
5793 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
5795 c
->type_list
= get_eh_types_for_runtime (c
->type_list
);
5796 walk_tree (&c
->type_list
, find_decls_types_r
, fld
, fld
->pset
);
5801 case ERT_ALLOWED_EXCEPTIONS
:
5802 r
->u
.allowed
.type_list
5803 = get_eh_types_for_runtime (r
->u
.allowed
.type_list
);
5804 walk_tree (&r
->u
.allowed
.type_list
, find_decls_types_r
, fld
, fld
->pset
);
5807 case ERT_MUST_NOT_THROW
:
5808 walk_tree (&r
->u
.must_not_throw
.failure_decl
,
5809 find_decls_types_r
, fld
, fld
->pset
);
5815 /* Find decls and types referenced in cgraph node N and store them in
5816 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5817 look for *every* kind of DECL and TYPE node reachable from N,
5818 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5819 NAMESPACE_DECLs, etc). */
5822 find_decls_types_in_node (struct cgraph_node
*n
, struct free_lang_data_d
*fld
)
5825 struct function
*fn
;
5829 find_decls_types (n
->decl
, fld
);
5831 if (!gimple_has_body_p (n
->decl
))
5834 gcc_assert (current_function_decl
== NULL_TREE
&& cfun
== NULL
);
5836 fn
= DECL_STRUCT_FUNCTION (n
->decl
);
5838 /* Traverse locals. */
5839 FOR_EACH_LOCAL_DECL (fn
, ix
, t
)
5840 find_decls_types (t
, fld
);
5842 /* Traverse EH regions in FN. */
5845 FOR_ALL_EH_REGION_FN (r
, fn
)
5846 find_decls_types_in_eh_region (r
, fld
);
5849 /* Traverse every statement in FN. */
5850 FOR_EACH_BB_FN (bb
, fn
)
5853 gimple_stmt_iterator si
;
5856 for (psi
= gsi_start_phis (bb
); !gsi_end_p (psi
); gsi_next (&psi
))
5858 gphi
*phi
= psi
.phi ();
5860 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
5862 tree
*arg_p
= gimple_phi_arg_def_ptr (phi
, i
);
5863 find_decls_types (*arg_p
, fld
);
5867 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
5869 gimple
*stmt
= gsi_stmt (si
);
5871 if (is_gimple_call (stmt
))
5872 find_decls_types (gimple_call_fntype (stmt
), fld
);
5874 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
5876 tree arg
= gimple_op (stmt
, i
);
5877 find_decls_types (arg
, fld
);
5884 /* Find decls and types referenced in varpool node N and store them in
5885 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5886 look for *every* kind of DECL and TYPE node reachable from N,
5887 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5888 NAMESPACE_DECLs, etc). */
5891 find_decls_types_in_var (varpool_node
*v
, struct free_lang_data_d
*fld
)
5893 find_decls_types (v
->decl
, fld
);
5896 /* If T needs an assembler name, have one created for it. */
5899 assign_assembler_name_if_neeeded (tree t
)
5901 if (need_assembler_name_p (t
))
5903 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5904 diagnostics that use input_location to show locus
5905 information. The problem here is that, at this point,
5906 input_location is generally anchored to the end of the file
5907 (since the parser is long gone), so we don't have a good
5908 position to pin it to.
5910 To alleviate this problem, this uses the location of T's
5911 declaration. Examples of this are
5912 testsuite/g++.dg/template/cond2.C and
5913 testsuite/g++.dg/template/pr35240.C. */
5914 location_t saved_location
= input_location
;
5915 input_location
= DECL_SOURCE_LOCATION (t
);
5917 decl_assembler_name (t
);
5919 input_location
= saved_location
;
5924 /* Free language specific information for every operand and expression
5925 in every node of the call graph. This process operates in three stages:
5927 1- Every callgraph node and varpool node is traversed looking for
5928 decls and types embedded in them. This is a more exhaustive
5929 search than that done by find_referenced_vars, because it will
5930 also collect individual fields, decls embedded in types, etc.
5932 2- All the decls found are sent to free_lang_data_in_decl.
5934 3- All the types found are sent to free_lang_data_in_type.
5936 The ordering between decls and types is important because
5937 free_lang_data_in_decl sets assembler names, which includes
5938 mangling. So types cannot be freed up until assembler names have
5942 free_lang_data_in_cgraph (void)
5944 struct cgraph_node
*n
;
5946 struct free_lang_data_d fld
;
5951 /* Initialize sets and arrays to store referenced decls and types. */
5952 fld
.pset
= new hash_set
<tree
>;
5953 fld
.worklist
.create (0);
5954 fld
.decls
.create (100);
5955 fld
.types
.create (100);
5957 /* Find decls and types in the body of every function in the callgraph. */
5958 FOR_EACH_FUNCTION (n
)
5959 find_decls_types_in_node (n
, &fld
);
5961 FOR_EACH_VEC_SAFE_ELT (alias_pairs
, i
, p
)
5962 find_decls_types (p
->decl
, &fld
);
5964 /* Find decls and types in every varpool symbol. */
5965 FOR_EACH_VARIABLE (v
)
5966 find_decls_types_in_var (v
, &fld
);
5968 /* Set the assembler name on every decl found. We need to do this
5969 now because free_lang_data_in_decl will invalidate data needed
5970 for mangling. This breaks mangling on interdependent decls. */
5971 FOR_EACH_VEC_ELT (fld
.decls
, i
, t
)
5972 assign_assembler_name_if_neeeded (t
);
5974 /* Traverse every decl found freeing its language data. */
5975 FOR_EACH_VEC_ELT (fld
.decls
, i
, t
)
5976 free_lang_data_in_decl (t
);
5978 /* Traverse every type found freeing its language data. */
5979 FOR_EACH_VEC_ELT (fld
.types
, i
, t
)
5980 free_lang_data_in_type (t
);
5983 FOR_EACH_VEC_ELT (fld
.types
, i
, t
)
5988 fld
.worklist
.release ();
5989 fld
.decls
.release ();
5990 fld
.types
.release ();
5994 /* Free resources that are used by FE but are not needed once they are done. */
5997 free_lang_data (void)
6001 /* If we are the LTO frontend we have freed lang-specific data already. */
6003 || (!flag_generate_lto
&& !flag_generate_offload
))
6006 /* Allocate and assign alias sets to the standard integer types
6007 while the slots are still in the way the frontends generated them. */
6008 for (i
= 0; i
< itk_none
; ++i
)
6009 if (integer_types
[i
])
6010 TYPE_ALIAS_SET (integer_types
[i
]) = get_alias_set (integer_types
[i
]);
6012 /* Traverse the IL resetting language specific information for
6013 operands, expressions, etc. */
6014 free_lang_data_in_cgraph ();
6016 /* Create gimple variants for common types. */
6017 ptrdiff_type_node
= integer_type_node
;
6018 fileptr_type_node
= ptr_type_node
;
6020 /* Reset some langhooks. Do not reset types_compatible_p, it may
6021 still be used indirectly via the get_alias_set langhook. */
6022 lang_hooks
.dwarf_name
= lhd_dwarf_name
;
6023 lang_hooks
.decl_printable_name
= gimple_decl_printable_name
;
6024 lang_hooks
.gimplify_expr
= lhd_gimplify_expr
;
6026 /* We do not want the default decl_assembler_name implementation,
6027 rather if we have fixed everything we want a wrapper around it
6028 asserting that all non-local symbols already got their assembler
6029 name and only produce assembler names for local symbols. Or rather
6030 make sure we never call decl_assembler_name on local symbols and
6031 devise a separate, middle-end private scheme for it. */
6033 /* Reset diagnostic machinery. */
6034 tree_diagnostics_defaults (global_dc
);
6042 const pass_data pass_data_ipa_free_lang_data
=
6044 SIMPLE_IPA_PASS
, /* type */
6045 "*free_lang_data", /* name */
6046 OPTGROUP_NONE
, /* optinfo_flags */
6047 TV_IPA_FREE_LANG_DATA
, /* tv_id */
6048 0, /* properties_required */
6049 0, /* properties_provided */
6050 0, /* properties_destroyed */
6051 0, /* todo_flags_start */
6052 0, /* todo_flags_finish */
6055 class pass_ipa_free_lang_data
: public simple_ipa_opt_pass
6058 pass_ipa_free_lang_data (gcc::context
*ctxt
)
6059 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data
, ctxt
)
6062 /* opt_pass methods: */
6063 virtual unsigned int execute (function
*) { return free_lang_data (); }
6065 }; // class pass_ipa_free_lang_data
6069 simple_ipa_opt_pass
*
6070 make_pass_ipa_free_lang_data (gcc::context
*ctxt
)
6072 return new pass_ipa_free_lang_data (ctxt
);
6075 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
6076 ATTR_NAME. Also used internally by remove_attribute(). */
6078 private_is_attribute_p (const char *attr_name
, size_t attr_len
, const_tree ident
)
6080 size_t ident_len
= IDENTIFIER_LENGTH (ident
);
6082 if (ident_len
== attr_len
)
6084 if (strcmp (attr_name
, IDENTIFIER_POINTER (ident
)) == 0)
6087 else if (ident_len
== attr_len
+ 4)
6089 /* There is the possibility that ATTR is 'text' and IDENT is
6091 const char *p
= IDENTIFIER_POINTER (ident
);
6092 if (p
[0] == '_' && p
[1] == '_'
6093 && p
[ident_len
- 2] == '_' && p
[ident_len
- 1] == '_'
6094 && strncmp (attr_name
, p
+ 2, attr_len
) == 0)
6101 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
6102 of ATTR_NAME, and LIST is not NULL_TREE. */
6104 private_lookup_attribute (const char *attr_name
, size_t attr_len
, tree list
)
6108 size_t ident_len
= IDENTIFIER_LENGTH (get_attribute_name (list
));
6110 if (ident_len
== attr_len
)
6112 if (!strcmp (attr_name
,
6113 IDENTIFIER_POINTER (get_attribute_name (list
))))
6116 /* TODO: If we made sure that attributes were stored in the
6117 canonical form without '__...__' (ie, as in 'text' as opposed
6118 to '__text__') then we could avoid the following case. */
6119 else if (ident_len
== attr_len
+ 4)
6121 const char *p
= IDENTIFIER_POINTER (get_attribute_name (list
));
6122 if (p
[0] == '_' && p
[1] == '_'
6123 && p
[ident_len
- 2] == '_' && p
[ident_len
- 1] == '_'
6124 && strncmp (attr_name
, p
+ 2, attr_len
) == 0)
6127 list
= TREE_CHAIN (list
);
6133 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
6134 return a pointer to the attribute's list first element if the attribute
6135 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
6139 private_lookup_attribute_by_prefix (const char *attr_name
, size_t attr_len
,
6144 size_t ident_len
= IDENTIFIER_LENGTH (get_attribute_name (list
));
6146 if (attr_len
> ident_len
)
6148 list
= TREE_CHAIN (list
);
6152 const char *p
= IDENTIFIER_POINTER (get_attribute_name (list
));
6154 if (strncmp (attr_name
, p
, attr_len
) == 0)
6157 /* TODO: If we made sure that attributes were stored in the
6158 canonical form without '__...__' (ie, as in 'text' as opposed
6159 to '__text__') then we could avoid the following case. */
6160 if (p
[0] == '_' && p
[1] == '_' &&
6161 strncmp (attr_name
, p
+ 2, attr_len
) == 0)
6164 list
= TREE_CHAIN (list
);
6171 /* A variant of lookup_attribute() that can be used with an identifier
6172 as the first argument, and where the identifier can be either
6173 'text' or '__text__'.
6175 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
6176 return a pointer to the attribute's list element if the attribute
6177 is part of the list, or NULL_TREE if not found. If the attribute
6178 appears more than once, this only returns the first occurrence; the
6179 TREE_CHAIN of the return value should be passed back in if further
6180 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
6181 can be in the form 'text' or '__text__'. */
6183 lookup_ident_attribute (tree attr_identifier
, tree list
)
6185 gcc_checking_assert (TREE_CODE (attr_identifier
) == IDENTIFIER_NODE
);
6189 gcc_checking_assert (TREE_CODE (get_attribute_name (list
))
6190 == IDENTIFIER_NODE
);
6192 if (cmp_attrib_identifiers (attr_identifier
,
6193 get_attribute_name (list
)))
6196 list
= TREE_CHAIN (list
);
6202 /* Remove any instances of attribute ATTR_NAME in LIST and return the
6206 remove_attribute (const char *attr_name
, tree list
)
6209 size_t attr_len
= strlen (attr_name
);
6211 gcc_checking_assert (attr_name
[0] != '_');
6213 for (p
= &list
; *p
; )
6216 /* TODO: If we were storing attributes in normalized form, here
6217 we could use a simple strcmp(). */
6218 if (private_is_attribute_p (attr_name
, attr_len
, get_attribute_name (l
)))
6219 *p
= TREE_CHAIN (l
);
6221 p
= &TREE_CHAIN (l
);
6227 /* Return an attribute list that is the union of a1 and a2. */
6230 merge_attributes (tree a1
, tree a2
)
6234 /* Either one unset? Take the set one. */
6236 if ((attributes
= a1
) == 0)
6239 /* One that completely contains the other? Take it. */
6241 else if (a2
!= 0 && ! attribute_list_contained (a1
, a2
))
6243 if (attribute_list_contained (a2
, a1
))
6247 /* Pick the longest list, and hang on the other list. */
6249 if (list_length (a1
) < list_length (a2
))
6250 attributes
= a2
, a2
= a1
;
6252 for (; a2
!= 0; a2
= TREE_CHAIN (a2
))
6255 for (a
= lookup_ident_attribute (get_attribute_name (a2
),
6257 a
!= NULL_TREE
&& !attribute_value_equal (a
, a2
);
6258 a
= lookup_ident_attribute (get_attribute_name (a2
),
6263 a1
= copy_node (a2
);
6264 TREE_CHAIN (a1
) = attributes
;
6273 /* Given types T1 and T2, merge their attributes and return
6277 merge_type_attributes (tree t1
, tree t2
)
6279 return merge_attributes (TYPE_ATTRIBUTES (t1
),
6280 TYPE_ATTRIBUTES (t2
));
6283 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
6287 merge_decl_attributes (tree olddecl
, tree newdecl
)
6289 return merge_attributes (DECL_ATTRIBUTES (olddecl
),
6290 DECL_ATTRIBUTES (newdecl
));
6293 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6295 /* Specialization of merge_decl_attributes for various Windows targets.
6297 This handles the following situation:
6299 __declspec (dllimport) int foo;
6302 The second instance of `foo' nullifies the dllimport. */
6305 merge_dllimport_decl_attributes (tree old
, tree new_tree
)
6308 int delete_dllimport_p
= 1;
6310 /* What we need to do here is remove from `old' dllimport if it doesn't
6311 appear in `new'. dllimport behaves like extern: if a declaration is
6312 marked dllimport and a definition appears later, then the object
6313 is not dllimport'd. We also remove a `new' dllimport if the old list
6314 contains dllexport: dllexport always overrides dllimport, regardless
6315 of the order of declaration. */
6316 if (!VAR_OR_FUNCTION_DECL_P (new_tree
))
6317 delete_dllimport_p
= 0;
6318 else if (DECL_DLLIMPORT_P (new_tree
)
6319 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old
)))
6321 DECL_DLLIMPORT_P (new_tree
) = 0;
6322 warning (OPT_Wattributes
, "%q+D already declared with dllexport attribute: "
6323 "dllimport ignored", new_tree
);
6325 else if (DECL_DLLIMPORT_P (old
) && !DECL_DLLIMPORT_P (new_tree
))
6327 /* Warn about overriding a symbol that has already been used, e.g.:
6328 extern int __attribute__ ((dllimport)) foo;
6329 int* bar () {return &foo;}
6332 if (TREE_USED (old
))
6334 warning (0, "%q+D redeclared without dllimport attribute "
6335 "after being referenced with dll linkage", new_tree
);
6336 /* If we have used a variable's address with dllimport linkage,
6337 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6338 decl may already have had TREE_CONSTANT computed.
6339 We still remove the attribute so that assembler code refers
6340 to '&foo rather than '_imp__foo'. */
6341 if (TREE_CODE (old
) == VAR_DECL
&& TREE_ADDRESSABLE (old
))
6342 DECL_DLLIMPORT_P (new_tree
) = 1;
6345 /* Let an inline definition silently override the external reference,
6346 but otherwise warn about attribute inconsistency. */
6347 else if (TREE_CODE (new_tree
) == VAR_DECL
6348 || !DECL_DECLARED_INLINE_P (new_tree
))
6349 warning (OPT_Wattributes
, "%q+D redeclared without dllimport attribute: "
6350 "previous dllimport ignored", new_tree
);
6353 delete_dllimport_p
= 0;
6355 a
= merge_attributes (DECL_ATTRIBUTES (old
), DECL_ATTRIBUTES (new_tree
));
6357 if (delete_dllimport_p
)
6358 a
= remove_attribute ("dllimport", a
);
6363 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6364 struct attribute_spec.handler. */
6367 handle_dll_attribute (tree
* pnode
, tree name
, tree args
, int flags
,
6373 /* These attributes may apply to structure and union types being created,
6374 but otherwise should pass to the declaration involved. */
6377 if (flags
& ((int) ATTR_FLAG_DECL_NEXT
| (int) ATTR_FLAG_FUNCTION_NEXT
6378 | (int) ATTR_FLAG_ARRAY_NEXT
))
6380 *no_add_attrs
= true;
6381 return tree_cons (name
, args
, NULL_TREE
);
6383 if (TREE_CODE (node
) == RECORD_TYPE
6384 || TREE_CODE (node
) == UNION_TYPE
)
6386 node
= TYPE_NAME (node
);
6392 warning (OPT_Wattributes
, "%qE attribute ignored",
6394 *no_add_attrs
= true;
6399 if (TREE_CODE (node
) != FUNCTION_DECL
6400 && TREE_CODE (node
) != VAR_DECL
6401 && TREE_CODE (node
) != TYPE_DECL
)
6403 *no_add_attrs
= true;
6404 warning (OPT_Wattributes
, "%qE attribute ignored",
6409 if (TREE_CODE (node
) == TYPE_DECL
6410 && TREE_CODE (TREE_TYPE (node
)) != RECORD_TYPE
6411 && TREE_CODE (TREE_TYPE (node
)) != UNION_TYPE
)
6413 *no_add_attrs
= true;
6414 warning (OPT_Wattributes
, "%qE attribute ignored",
6419 is_dllimport
= is_attribute_p ("dllimport", name
);
6421 /* Report error on dllimport ambiguities seen now before they cause
6425 /* Honor any target-specific overrides. */
6426 if (!targetm
.valid_dllimport_attribute_p (node
))
6427 *no_add_attrs
= true;
6429 else if (TREE_CODE (node
) == FUNCTION_DECL
6430 && DECL_DECLARED_INLINE_P (node
))
6432 warning (OPT_Wattributes
, "inline function %q+D declared as "
6433 " dllimport: attribute ignored", node
);
6434 *no_add_attrs
= true;
6436 /* Like MS, treat definition of dllimported variables and
6437 non-inlined functions on declaration as syntax errors. */
6438 else if (TREE_CODE (node
) == FUNCTION_DECL
&& DECL_INITIAL (node
))
6440 error ("function %q+D definition is marked dllimport", node
);
6441 *no_add_attrs
= true;
6444 else if (TREE_CODE (node
) == VAR_DECL
)
6446 if (DECL_INITIAL (node
))
6448 error ("variable %q+D definition is marked dllimport",
6450 *no_add_attrs
= true;
6453 /* `extern' needn't be specified with dllimport.
6454 Specify `extern' now and hope for the best. Sigh. */
6455 DECL_EXTERNAL (node
) = 1;
6456 /* Also, implicitly give dllimport'd variables declared within
6457 a function global scope, unless declared static. */
6458 if (current_function_decl
!= NULL_TREE
&& !TREE_STATIC (node
))
6459 TREE_PUBLIC (node
) = 1;
6462 if (*no_add_attrs
== false)
6463 DECL_DLLIMPORT_P (node
) = 1;
6465 else if (TREE_CODE (node
) == FUNCTION_DECL
6466 && DECL_DECLARED_INLINE_P (node
)
6467 && flag_keep_inline_dllexport
)
6468 /* An exported function, even if inline, must be emitted. */
6469 DECL_EXTERNAL (node
) = 0;
6471 /* Report error if symbol is not accessible at global scope. */
6472 if (!TREE_PUBLIC (node
)
6473 && (TREE_CODE (node
) == VAR_DECL
6474 || TREE_CODE (node
) == FUNCTION_DECL
))
6476 error ("external linkage required for symbol %q+D because of "
6477 "%qE attribute", node
, name
);
6478 *no_add_attrs
= true;
6481 /* A dllexport'd entity must have default visibility so that other
6482 program units (shared libraries or the main executable) can see
6483 it. A dllimport'd entity must have default visibility so that
6484 the linker knows that undefined references within this program
6485 unit can be resolved by the dynamic linker. */
6488 if (DECL_VISIBILITY_SPECIFIED (node
)
6489 && DECL_VISIBILITY (node
) != VISIBILITY_DEFAULT
)
6490 error ("%qE implies default visibility, but %qD has already "
6491 "been declared with a different visibility",
6493 DECL_VISIBILITY (node
) = VISIBILITY_DEFAULT
;
6494 DECL_VISIBILITY_SPECIFIED (node
) = 1;
6500 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6502 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6503 of the various TYPE_QUAL values. */
6506 set_type_quals (tree type
, int type_quals
)
6508 TYPE_READONLY (type
) = (type_quals
& TYPE_QUAL_CONST
) != 0;
6509 TYPE_VOLATILE (type
) = (type_quals
& TYPE_QUAL_VOLATILE
) != 0;
6510 TYPE_RESTRICT (type
) = (type_quals
& TYPE_QUAL_RESTRICT
) != 0;
6511 TYPE_ATOMIC (type
) = (type_quals
& TYPE_QUAL_ATOMIC
) != 0;
6512 TYPE_ADDR_SPACE (type
) = DECODE_QUAL_ADDR_SPACE (type_quals
);
6515 /* Returns true iff unqualified CAND and BASE are equivalent. */
6518 check_base_type (const_tree cand
, const_tree base
)
6520 return (TYPE_NAME (cand
) == TYPE_NAME (base
)
6521 /* Apparently this is needed for Objective-C. */
6522 && TYPE_CONTEXT (cand
) == TYPE_CONTEXT (base
)
6523 /* Check alignment. */
6524 && TYPE_ALIGN (cand
) == TYPE_ALIGN (base
)
6525 && attribute_list_equal (TYPE_ATTRIBUTES (cand
),
6526 TYPE_ATTRIBUTES (base
)));
6529 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6532 check_qualified_type (const_tree cand
, const_tree base
, int type_quals
)
6534 return (TYPE_QUALS (cand
) == type_quals
6535 && check_base_type (cand
, base
));
6538 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6541 check_aligned_type (const_tree cand
, const_tree base
, unsigned int align
)
6543 return (TYPE_QUALS (cand
) == TYPE_QUALS (base
)
6544 && TYPE_NAME (cand
) == TYPE_NAME (base
)
6545 /* Apparently this is needed for Objective-C. */
6546 && TYPE_CONTEXT (cand
) == TYPE_CONTEXT (base
)
6547 /* Check alignment. */
6548 && TYPE_ALIGN (cand
) == align
6549 && attribute_list_equal (TYPE_ATTRIBUTES (cand
),
6550 TYPE_ATTRIBUTES (base
)));
6553 /* This function checks to see if TYPE matches the size one of the built-in
6554 atomic types, and returns that core atomic type. */
6557 find_atomic_core_type (tree type
)
6559 tree base_atomic_type
;
6561 /* Only handle complete types. */
6562 if (TYPE_SIZE (type
) == NULL_TREE
)
6565 HOST_WIDE_INT type_size
= tree_to_uhwi (TYPE_SIZE (type
));
6569 base_atomic_type
= atomicQI_type_node
;
6573 base_atomic_type
= atomicHI_type_node
;
6577 base_atomic_type
= atomicSI_type_node
;
6581 base_atomic_type
= atomicDI_type_node
;
6585 base_atomic_type
= atomicTI_type_node
;
6589 base_atomic_type
= NULL_TREE
;
6592 return base_atomic_type
;
6595 /* Return a version of the TYPE, qualified as indicated by the
6596 TYPE_QUALS, if one exists. If no qualified version exists yet,
6597 return NULL_TREE. */
6600 get_qualified_type (tree type
, int type_quals
)
6604 if (TYPE_QUALS (type
) == type_quals
)
6607 /* Search the chain of variants to see if there is already one there just
6608 like the one we need to have. If so, use that existing one. We must
6609 preserve the TYPE_NAME, since there is code that depends on this. */
6610 for (t
= TYPE_MAIN_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
6611 if (check_qualified_type (t
, type
, type_quals
))
6617 /* Like get_qualified_type, but creates the type if it does not
6618 exist. This function never returns NULL_TREE. */
6621 build_qualified_type (tree type
, int type_quals
)
6625 /* See if we already have the appropriate qualified variant. */
6626 t
= get_qualified_type (type
, type_quals
);
6628 /* If not, build it. */
6631 t
= build_variant_type_copy (type
);
6632 set_type_quals (t
, type_quals
);
6634 if (((type_quals
& TYPE_QUAL_ATOMIC
) == TYPE_QUAL_ATOMIC
))
6636 /* See if this object can map to a basic atomic type. */
6637 tree atomic_type
= find_atomic_core_type (type
);
6640 /* Ensure the alignment of this type is compatible with
6641 the required alignment of the atomic type. */
6642 if (TYPE_ALIGN (atomic_type
) > TYPE_ALIGN (t
))
6643 TYPE_ALIGN (t
) = TYPE_ALIGN (atomic_type
);
6647 if (TYPE_STRUCTURAL_EQUALITY_P (type
))
6648 /* Propagate structural equality. */
6649 SET_TYPE_STRUCTURAL_EQUALITY (t
);
6650 else if (TYPE_CANONICAL (type
) != type
)
6651 /* Build the underlying canonical type, since it is different
6654 tree c
= build_qualified_type (TYPE_CANONICAL (type
), type_quals
);
6655 TYPE_CANONICAL (t
) = TYPE_CANONICAL (c
);
6658 /* T is its own canonical type. */
6659 TYPE_CANONICAL (t
) = t
;
6666 /* Create a variant of type T with alignment ALIGN. */
6669 build_aligned_type (tree type
, unsigned int align
)
6673 if (TYPE_PACKED (type
)
6674 || TYPE_ALIGN (type
) == align
)
6677 for (t
= TYPE_MAIN_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
6678 if (check_aligned_type (t
, type
, align
))
6681 t
= build_variant_type_copy (type
);
6682 TYPE_ALIGN (t
) = align
;
6687 /* Create a new distinct copy of TYPE. The new type is made its own
6688 MAIN_VARIANT. If TYPE requires structural equality checks, the
6689 resulting type requires structural equality checks; otherwise, its
6690 TYPE_CANONICAL points to itself. */
6693 build_distinct_type_copy (tree type
)
6695 tree t
= copy_node (type
);
6697 TYPE_POINTER_TO (t
) = 0;
6698 TYPE_REFERENCE_TO (t
) = 0;
6700 /* Set the canonical type either to a new equivalence class, or
6701 propagate the need for structural equality checks. */
6702 if (TYPE_STRUCTURAL_EQUALITY_P (type
))
6703 SET_TYPE_STRUCTURAL_EQUALITY (t
);
6705 TYPE_CANONICAL (t
) = t
;
6707 /* Make it its own variant. */
6708 TYPE_MAIN_VARIANT (t
) = t
;
6709 TYPE_NEXT_VARIANT (t
) = 0;
6711 /* We do not record methods in type copies nor variants
6712 so we do not need to keep them up to date when new method
6714 if (RECORD_OR_UNION_TYPE_P (t
))
6715 TYPE_METHODS (t
) = NULL_TREE
;
6717 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6718 whose TREE_TYPE is not t. This can also happen in the Ada
6719 frontend when using subtypes. */
6724 /* Create a new variant of TYPE, equivalent but distinct. This is so
6725 the caller can modify it. TYPE_CANONICAL for the return type will
6726 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6727 are considered equal by the language itself (or that both types
6728 require structural equality checks). */
6731 build_variant_type_copy (tree type
)
6733 tree t
, m
= TYPE_MAIN_VARIANT (type
);
6735 t
= build_distinct_type_copy (type
);
6737 /* Since we're building a variant, assume that it is a non-semantic
6738 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6739 TYPE_CANONICAL (t
) = TYPE_CANONICAL (type
);
6740 /* Type variants have no alias set defined. */
6741 TYPE_ALIAS_SET (t
) = -1;
6743 /* Add the new type to the chain of variants of TYPE. */
6744 TYPE_NEXT_VARIANT (t
) = TYPE_NEXT_VARIANT (m
);
6745 TYPE_NEXT_VARIANT (m
) = t
;
6746 TYPE_MAIN_VARIANT (t
) = m
;
6751 /* Return true if the from tree in both tree maps are equal. */
6754 tree_map_base_eq (const void *va
, const void *vb
)
6756 const struct tree_map_base
*const a
= (const struct tree_map_base
*) va
,
6757 *const b
= (const struct tree_map_base
*) vb
;
6758 return (a
->from
== b
->from
);
6761 /* Hash a from tree in a tree_base_map. */
6764 tree_map_base_hash (const void *item
)
6766 return htab_hash_pointer (((const struct tree_map_base
*)item
)->from
);
6769 /* Return true if this tree map structure is marked for garbage collection
6770 purposes. We simply return true if the from tree is marked, so that this
6771 structure goes away when the from tree goes away. */
6774 tree_map_base_marked_p (const void *p
)
6776 return ggc_marked_p (((const struct tree_map_base
*) p
)->from
);
6779 /* Hash a from tree in a tree_map. */
6782 tree_map_hash (const void *item
)
6784 return (((const struct tree_map
*) item
)->hash
);
6787 /* Hash a from tree in a tree_decl_map. */
6790 tree_decl_map_hash (const void *item
)
6792 return DECL_UID (((const struct tree_decl_map
*) item
)->base
.from
);
6795 /* Return the initialization priority for DECL. */
6798 decl_init_priority_lookup (tree decl
)
6800 symtab_node
*snode
= symtab_node::get (decl
);
6803 return DEFAULT_INIT_PRIORITY
;
6805 snode
->get_init_priority ();
6808 /* Return the finalization priority for DECL. */
6811 decl_fini_priority_lookup (tree decl
)
6813 cgraph_node
*node
= cgraph_node::get (decl
);
6816 return DEFAULT_INIT_PRIORITY
;
6818 node
->get_fini_priority ();
6821 /* Set the initialization priority for DECL to PRIORITY. */
6824 decl_init_priority_insert (tree decl
, priority_type priority
)
6826 struct symtab_node
*snode
;
6828 if (priority
== DEFAULT_INIT_PRIORITY
)
6830 snode
= symtab_node::get (decl
);
6834 else if (TREE_CODE (decl
) == VAR_DECL
)
6835 snode
= varpool_node::get_create (decl
);
6837 snode
= cgraph_node::get_create (decl
);
6838 snode
->set_init_priority (priority
);
6841 /* Set the finalization priority for DECL to PRIORITY. */
6844 decl_fini_priority_insert (tree decl
, priority_type priority
)
6846 struct cgraph_node
*node
;
6848 if (priority
== DEFAULT_INIT_PRIORITY
)
6850 node
= cgraph_node::get (decl
);
6855 node
= cgraph_node::get_create (decl
);
6856 node
->set_fini_priority (priority
);
6859 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6862 print_debug_expr_statistics (void)
6864 fprintf (stderr
, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6865 (long) debug_expr_for_decl
->size (),
6866 (long) debug_expr_for_decl
->elements (),
6867 debug_expr_for_decl
->collisions ());
6870 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6873 print_value_expr_statistics (void)
6875 fprintf (stderr
, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6876 (long) value_expr_for_decl
->size (),
6877 (long) value_expr_for_decl
->elements (),
6878 value_expr_for_decl
->collisions ());
6881 /* Lookup a debug expression for FROM, and return it if we find one. */
6884 decl_debug_expr_lookup (tree from
)
6886 struct tree_decl_map
*h
, in
;
6887 in
.base
.from
= from
;
6889 h
= debug_expr_for_decl
->find_with_hash (&in
, DECL_UID (from
));
6895 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6898 decl_debug_expr_insert (tree from
, tree to
)
6900 struct tree_decl_map
*h
;
6902 h
= ggc_alloc
<tree_decl_map
> ();
6903 h
->base
.from
= from
;
6905 *debug_expr_for_decl
->find_slot_with_hash (h
, DECL_UID (from
), INSERT
) = h
;
6908 /* Lookup a value expression for FROM, and return it if we find one. */
6911 decl_value_expr_lookup (tree from
)
6913 struct tree_decl_map
*h
, in
;
6914 in
.base
.from
= from
;
6916 h
= value_expr_for_decl
->find_with_hash (&in
, DECL_UID (from
));
6922 /* Insert a mapping FROM->TO in the value expression hashtable. */
6925 decl_value_expr_insert (tree from
, tree to
)
6927 struct tree_decl_map
*h
;
6929 h
= ggc_alloc
<tree_decl_map
> ();
6930 h
->base
.from
= from
;
6932 *value_expr_for_decl
->find_slot_with_hash (h
, DECL_UID (from
), INSERT
) = h
;
6935 /* Lookup a vector of debug arguments for FROM, and return it if we
6939 decl_debug_args_lookup (tree from
)
6941 struct tree_vec_map
*h
, in
;
6943 if (!DECL_HAS_DEBUG_ARGS_P (from
))
6945 gcc_checking_assert (debug_args_for_decl
!= NULL
);
6946 in
.base
.from
= from
;
6947 h
= debug_args_for_decl
->find_with_hash (&in
, DECL_UID (from
));
6953 /* Insert a mapping FROM->empty vector of debug arguments in the value
6954 expression hashtable. */
6957 decl_debug_args_insert (tree from
)
6959 struct tree_vec_map
*h
;
6962 if (DECL_HAS_DEBUG_ARGS_P (from
))
6963 return decl_debug_args_lookup (from
);
6964 if (debug_args_for_decl
== NULL
)
6965 debug_args_for_decl
= hash_table
<tree_vec_map_cache_hasher
>::create_ggc (64);
6966 h
= ggc_alloc
<tree_vec_map
> ();
6967 h
->base
.from
= from
;
6969 loc
= debug_args_for_decl
->find_slot_with_hash (h
, DECL_UID (from
), INSERT
);
6971 DECL_HAS_DEBUG_ARGS_P (from
) = 1;
6975 /* Hashing of types so that we don't make duplicates.
6976 The entry point is `type_hash_canon'. */
6978 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6979 with types in the TREE_VALUE slots), by adding the hash codes
6980 of the individual types. */
6983 type_hash_list (const_tree list
, inchash::hash
&hstate
)
6987 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
6988 if (TREE_VALUE (tail
) != error_mark_node
)
6989 hstate
.add_object (TYPE_HASH (TREE_VALUE (tail
)));
6992 /* These are the Hashtable callback functions. */
6994 /* Returns true iff the types are equivalent. */
6997 type_cache_hasher::equal (type_hash
*a
, type_hash
*b
)
6999 /* First test the things that are the same for all types. */
7000 if (a
->hash
!= b
->hash
7001 || TREE_CODE (a
->type
) != TREE_CODE (b
->type
)
7002 || TREE_TYPE (a
->type
) != TREE_TYPE (b
->type
)
7003 || !attribute_list_equal (TYPE_ATTRIBUTES (a
->type
),
7004 TYPE_ATTRIBUTES (b
->type
))
7005 || (TREE_CODE (a
->type
) != COMPLEX_TYPE
7006 && TYPE_NAME (a
->type
) != TYPE_NAME (b
->type
)))
7009 /* Be careful about comparing arrays before and after the element type
7010 has been completed; don't compare TYPE_ALIGN unless both types are
7012 if (COMPLETE_TYPE_P (a
->type
) && COMPLETE_TYPE_P (b
->type
)
7013 && (TYPE_ALIGN (a
->type
) != TYPE_ALIGN (b
->type
)
7014 || TYPE_MODE (a
->type
) != TYPE_MODE (b
->type
)))
7017 switch (TREE_CODE (a
->type
))
7022 case REFERENCE_TYPE
:
7027 return TYPE_VECTOR_SUBPARTS (a
->type
) == TYPE_VECTOR_SUBPARTS (b
->type
);
7030 if (TYPE_VALUES (a
->type
) != TYPE_VALUES (b
->type
)
7031 && !(TYPE_VALUES (a
->type
)
7032 && TREE_CODE (TYPE_VALUES (a
->type
)) == TREE_LIST
7033 && TYPE_VALUES (b
->type
)
7034 && TREE_CODE (TYPE_VALUES (b
->type
)) == TREE_LIST
7035 && type_list_equal (TYPE_VALUES (a
->type
),
7036 TYPE_VALUES (b
->type
))))
7039 /* ... fall through ... */
7044 if (TYPE_PRECISION (a
->type
) != TYPE_PRECISION (b
->type
))
7046 return ((TYPE_MAX_VALUE (a
->type
) == TYPE_MAX_VALUE (b
->type
)
7047 || tree_int_cst_equal (TYPE_MAX_VALUE (a
->type
),
7048 TYPE_MAX_VALUE (b
->type
)))
7049 && (TYPE_MIN_VALUE (a
->type
) == TYPE_MIN_VALUE (b
->type
)
7050 || tree_int_cst_equal (TYPE_MIN_VALUE (a
->type
),
7051 TYPE_MIN_VALUE (b
->type
))));
7053 case FIXED_POINT_TYPE
:
7054 return TYPE_SATURATING (a
->type
) == TYPE_SATURATING (b
->type
);
7057 return TYPE_OFFSET_BASETYPE (a
->type
) == TYPE_OFFSET_BASETYPE (b
->type
);
7060 if (TYPE_METHOD_BASETYPE (a
->type
) == TYPE_METHOD_BASETYPE (b
->type
)
7061 && (TYPE_ARG_TYPES (a
->type
) == TYPE_ARG_TYPES (b
->type
)
7062 || (TYPE_ARG_TYPES (a
->type
)
7063 && TREE_CODE (TYPE_ARG_TYPES (a
->type
)) == TREE_LIST
7064 && TYPE_ARG_TYPES (b
->type
)
7065 && TREE_CODE (TYPE_ARG_TYPES (b
->type
)) == TREE_LIST
7066 && type_list_equal (TYPE_ARG_TYPES (a
->type
),
7067 TYPE_ARG_TYPES (b
->type
)))))
7071 return TYPE_DOMAIN (a
->type
) == TYPE_DOMAIN (b
->type
);
7075 case QUAL_UNION_TYPE
:
7076 return (TYPE_FIELDS (a
->type
) == TYPE_FIELDS (b
->type
)
7077 || (TYPE_FIELDS (a
->type
)
7078 && TREE_CODE (TYPE_FIELDS (a
->type
)) == TREE_LIST
7079 && TYPE_FIELDS (b
->type
)
7080 && TREE_CODE (TYPE_FIELDS (b
->type
)) == TREE_LIST
7081 && type_list_equal (TYPE_FIELDS (a
->type
),
7082 TYPE_FIELDS (b
->type
))));
7085 if (TYPE_ARG_TYPES (a
->type
) == TYPE_ARG_TYPES (b
->type
)
7086 || (TYPE_ARG_TYPES (a
->type
)
7087 && TREE_CODE (TYPE_ARG_TYPES (a
->type
)) == TREE_LIST
7088 && TYPE_ARG_TYPES (b
->type
)
7089 && TREE_CODE (TYPE_ARG_TYPES (b
->type
)) == TREE_LIST
7090 && type_list_equal (TYPE_ARG_TYPES (a
->type
),
7091 TYPE_ARG_TYPES (b
->type
))))
7099 if (lang_hooks
.types
.type_hash_eq
!= NULL
)
7100 return lang_hooks
.types
.type_hash_eq (a
->type
, b
->type
);
7105 /* Given TYPE, and HASHCODE its hash code, return the canonical
7106 object for an identical type if one already exists.
7107 Otherwise, return TYPE, and record it as the canonical object.
7109 To use this function, first create a type of the sort you want.
7110 Then compute its hash code from the fields of the type that
7111 make it different from other similar types.
7112 Then call this function and use the value. */
7115 type_hash_canon (unsigned int hashcode
, tree type
)
7120 /* The hash table only contains main variants, so ensure that's what we're
7122 gcc_assert (TYPE_MAIN_VARIANT (type
) == type
);
7124 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7125 must call that routine before comparing TYPE_ALIGNs. */
7131 loc
= type_hash_table
->find_slot_with_hash (&in
, hashcode
, INSERT
);
7134 tree t1
= ((type_hash
*) *loc
)->type
;
7135 gcc_assert (TYPE_MAIN_VARIANT (t1
) == t1
);
7141 struct type_hash
*h
;
7143 h
= ggc_alloc
<type_hash
> ();
7153 print_type_hash_statistics (void)
7155 fprintf (stderr
, "Type hash: size %ld, %ld elements, %f collisions\n",
7156 (long) type_hash_table
->size (),
7157 (long) type_hash_table
->elements (),
7158 type_hash_table
->collisions ());
7161 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
7162 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
7163 by adding the hash codes of the individual attributes. */
7166 attribute_hash_list (const_tree list
, inchash::hash
&hstate
)
7170 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
7171 /* ??? Do we want to add in TREE_VALUE too? */
7172 hstate
.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail
)));
7175 /* Given two lists of attributes, return true if list l2 is
7176 equivalent to l1. */
7179 attribute_list_equal (const_tree l1
, const_tree l2
)
7184 return attribute_list_contained (l1
, l2
)
7185 && attribute_list_contained (l2
, l1
);
7188 /* Given two lists of attributes, return true if list L2 is
7189 completely contained within L1. */
7190 /* ??? This would be faster if attribute names were stored in a canonicalized
7191 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
7192 must be used to show these elements are equivalent (which they are). */
7193 /* ??? It's not clear that attributes with arguments will always be handled
7197 attribute_list_contained (const_tree l1
, const_tree l2
)
7201 /* First check the obvious, maybe the lists are identical. */
7205 /* Maybe the lists are similar. */
7206 for (t1
= l1
, t2
= l2
;
7208 && get_attribute_name (t1
) == get_attribute_name (t2
)
7209 && TREE_VALUE (t1
) == TREE_VALUE (t2
);
7210 t1
= TREE_CHAIN (t1
), t2
= TREE_CHAIN (t2
))
7213 /* Maybe the lists are equal. */
7214 if (t1
== 0 && t2
== 0)
7217 for (; t2
!= 0; t2
= TREE_CHAIN (t2
))
7220 /* This CONST_CAST is okay because lookup_attribute does not
7221 modify its argument and the return value is assigned to a
7223 for (attr
= lookup_ident_attribute (get_attribute_name (t2
),
7224 CONST_CAST_TREE (l1
));
7225 attr
!= NULL_TREE
&& !attribute_value_equal (t2
, attr
);
7226 attr
= lookup_ident_attribute (get_attribute_name (t2
),
7230 if (attr
== NULL_TREE
)
7237 /* Given two lists of types
7238 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7239 return 1 if the lists contain the same types in the same order.
7240 Also, the TREE_PURPOSEs must match. */
7243 type_list_equal (const_tree l1
, const_tree l2
)
7247 for (t1
= l1
, t2
= l2
; t1
&& t2
; t1
= TREE_CHAIN (t1
), t2
= TREE_CHAIN (t2
))
7248 if (TREE_VALUE (t1
) != TREE_VALUE (t2
)
7249 || (TREE_PURPOSE (t1
) != TREE_PURPOSE (t2
)
7250 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1
), TREE_PURPOSE (t2
))
7251 && (TREE_TYPE (TREE_PURPOSE (t1
))
7252 == TREE_TYPE (TREE_PURPOSE (t2
))))))
7258 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7259 given by TYPE. If the argument list accepts variable arguments,
7260 then this function counts only the ordinary arguments. */
7263 type_num_arguments (const_tree type
)
7268 for (t
= TYPE_ARG_TYPES (type
); t
; t
= TREE_CHAIN (t
))
7269 /* If the function does not take a variable number of arguments,
7270 the last element in the list will have type `void'. */
7271 if (VOID_TYPE_P (TREE_VALUE (t
)))
7279 /* Nonzero if integer constants T1 and T2
7280 represent the same constant value. */
7283 tree_int_cst_equal (const_tree t1
, const_tree t2
)
7288 if (t1
== 0 || t2
== 0)
7291 if (TREE_CODE (t1
) == INTEGER_CST
7292 && TREE_CODE (t2
) == INTEGER_CST
7293 && wi::to_widest (t1
) == wi::to_widest (t2
))
7299 /* Return true if T is an INTEGER_CST whose numerical value (extended
7300 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7303 tree_fits_shwi_p (const_tree t
)
7305 return (t
!= NULL_TREE
7306 && TREE_CODE (t
) == INTEGER_CST
7307 && wi::fits_shwi_p (wi::to_widest (t
)));
7310 /* Return true if T is an INTEGER_CST whose numerical value (extended
7311 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7314 tree_fits_uhwi_p (const_tree t
)
7316 return (t
!= NULL_TREE
7317 && TREE_CODE (t
) == INTEGER_CST
7318 && wi::fits_uhwi_p (wi::to_widest (t
)));
7321 /* T is an INTEGER_CST whose numerical value (extended according to
7322 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7326 tree_to_shwi (const_tree t
)
7328 gcc_assert (tree_fits_shwi_p (t
));
7329 return TREE_INT_CST_LOW (t
);
7332 /* T is an INTEGER_CST whose numerical value (extended according to
7333 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7336 unsigned HOST_WIDE_INT
7337 tree_to_uhwi (const_tree t
)
7339 gcc_assert (tree_fits_uhwi_p (t
));
7340 return TREE_INT_CST_LOW (t
);
7343 /* Return the most significant (sign) bit of T. */
7346 tree_int_cst_sign_bit (const_tree t
)
7348 unsigned bitno
= TYPE_PRECISION (TREE_TYPE (t
)) - 1;
7350 return wi::extract_uhwi (t
, bitno
, 1);
7353 /* Return an indication of the sign of the integer constant T.
7354 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7355 Note that -1 will never be returned if T's type is unsigned. */
7358 tree_int_cst_sgn (const_tree t
)
7360 if (wi::eq_p (t
, 0))
7362 else if (TYPE_UNSIGNED (TREE_TYPE (t
)))
7364 else if (wi::neg_p (t
))
7370 /* Return the minimum number of bits needed to represent VALUE in a
7371 signed or unsigned type, UNSIGNEDP says which. */
7374 tree_int_cst_min_precision (tree value
, signop sgn
)
7376 /* If the value is negative, compute its negative minus 1. The latter
7377 adjustment is because the absolute value of the largest negative value
7378 is one larger than the largest positive value. This is equivalent to
7379 a bit-wise negation, so use that operation instead. */
7381 if (tree_int_cst_sgn (value
) < 0)
7382 value
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (value
), value
);
7384 /* Return the number of bits needed, taking into account the fact
7385 that we need one more bit for a signed than unsigned type.
7386 If value is 0 or -1, the minimum precision is 1 no matter
7387 whether unsignedp is true or false. */
7389 if (integer_zerop (value
))
7392 return tree_floor_log2 (value
) + 1 + (sgn
== SIGNED
? 1 : 0) ;
7395 /* Return truthvalue of whether T1 is the same tree structure as T2.
7396 Return 1 if they are the same.
7397 Return 0 if they are understandably different.
7398 Return -1 if either contains tree structure not understood by
7402 simple_cst_equal (const_tree t1
, const_tree t2
)
7404 enum tree_code code1
, code2
;
7410 if (t1
== 0 || t2
== 0)
7413 code1
= TREE_CODE (t1
);
7414 code2
= TREE_CODE (t2
);
7416 if (CONVERT_EXPR_CODE_P (code1
) || code1
== NON_LVALUE_EXPR
)
7418 if (CONVERT_EXPR_CODE_P (code2
)
7419 || code2
== NON_LVALUE_EXPR
)
7420 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
7422 return simple_cst_equal (TREE_OPERAND (t1
, 0), t2
);
7425 else if (CONVERT_EXPR_CODE_P (code2
)
7426 || code2
== NON_LVALUE_EXPR
)
7427 return simple_cst_equal (t1
, TREE_OPERAND (t2
, 0));
7435 return wi::to_widest (t1
) == wi::to_widest (t2
);
7438 return real_identical (&TREE_REAL_CST (t1
), &TREE_REAL_CST (t2
));
7441 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1
), TREE_FIXED_CST (t2
));
7444 return (TREE_STRING_LENGTH (t1
) == TREE_STRING_LENGTH (t2
)
7445 && ! memcmp (TREE_STRING_POINTER (t1
), TREE_STRING_POINTER (t2
),
7446 TREE_STRING_LENGTH (t1
)));
7450 unsigned HOST_WIDE_INT idx
;
7451 vec
<constructor_elt
, va_gc
> *v1
= CONSTRUCTOR_ELTS (t1
);
7452 vec
<constructor_elt
, va_gc
> *v2
= CONSTRUCTOR_ELTS (t2
);
7454 if (vec_safe_length (v1
) != vec_safe_length (v2
))
7457 for (idx
= 0; idx
< vec_safe_length (v1
); ++idx
)
7458 /* ??? Should we handle also fields here? */
7459 if (!simple_cst_equal ((*v1
)[idx
].value
, (*v2
)[idx
].value
))
7465 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
7468 cmp
= simple_cst_equal (CALL_EXPR_FN (t1
), CALL_EXPR_FN (t2
));
7471 if (call_expr_nargs (t1
) != call_expr_nargs (t2
))
7474 const_tree arg1
, arg2
;
7475 const_call_expr_arg_iterator iter1
, iter2
;
7476 for (arg1
= first_const_call_expr_arg (t1
, &iter1
),
7477 arg2
= first_const_call_expr_arg (t2
, &iter2
);
7479 arg1
= next_const_call_expr_arg (&iter1
),
7480 arg2
= next_const_call_expr_arg (&iter2
))
7482 cmp
= simple_cst_equal (arg1
, arg2
);
7486 return arg1
== arg2
;
7490 /* Special case: if either target is an unallocated VAR_DECL,
7491 it means that it's going to be unified with whatever the
7492 TARGET_EXPR is really supposed to initialize, so treat it
7493 as being equivalent to anything. */
7494 if ((TREE_CODE (TREE_OPERAND (t1
, 0)) == VAR_DECL
7495 && DECL_NAME (TREE_OPERAND (t1
, 0)) == NULL_TREE
7496 && !DECL_RTL_SET_P (TREE_OPERAND (t1
, 0)))
7497 || (TREE_CODE (TREE_OPERAND (t2
, 0)) == VAR_DECL
7498 && DECL_NAME (TREE_OPERAND (t2
, 0)) == NULL_TREE
7499 && !DECL_RTL_SET_P (TREE_OPERAND (t2
, 0))))
7502 cmp
= simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
7507 return simple_cst_equal (TREE_OPERAND (t1
, 1), TREE_OPERAND (t2
, 1));
7509 case WITH_CLEANUP_EXPR
:
7510 cmp
= simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
7514 return simple_cst_equal (TREE_OPERAND (t1
, 1), TREE_OPERAND (t1
, 1));
7517 if (TREE_OPERAND (t1
, 1) == TREE_OPERAND (t2
, 1))
7518 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
7532 /* This general rule works for most tree codes. All exceptions should be
7533 handled above. If this is a language-specific tree code, we can't
7534 trust what might be in the operand, so say we don't know
7536 if ((int) code1
>= (int) LAST_AND_UNUSED_TREE_CODE
)
7539 switch (TREE_CODE_CLASS (code1
))
7543 case tcc_comparison
:
7544 case tcc_expression
:
7548 for (i
= 0; i
< TREE_CODE_LENGTH (code1
); i
++)
7550 cmp
= simple_cst_equal (TREE_OPERAND (t1
, i
), TREE_OPERAND (t2
, i
));
7562 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7563 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7564 than U, respectively. */
7567 compare_tree_int (const_tree t
, unsigned HOST_WIDE_INT u
)
7569 if (tree_int_cst_sgn (t
) < 0)
7571 else if (!tree_fits_uhwi_p (t
))
7573 else if (TREE_INT_CST_LOW (t
) == u
)
7575 else if (TREE_INT_CST_LOW (t
) < u
)
7581 /* Return true if SIZE represents a constant size that is in bounds of
7582 what the middle-end and the backend accepts (covering not more than
7583 half of the address-space). */
7586 valid_constant_size_p (const_tree size
)
7588 if (! tree_fits_uhwi_p (size
)
7589 || TREE_OVERFLOW (size
)
7590 || tree_int_cst_sign_bit (size
) != 0)
7595 /* Return the precision of the type, or for a complex or vector type the
7596 precision of the type of its elements. */
7599 element_precision (const_tree type
)
7602 type
= TREE_TYPE (type
);
7603 enum tree_code code
= TREE_CODE (type
);
7604 if (code
== COMPLEX_TYPE
|| code
== VECTOR_TYPE
)
7605 type
= TREE_TYPE (type
);
7607 return TYPE_PRECISION (type
);
7610 /* Return true if CODE represents an associative tree code. Otherwise
7613 associative_tree_code (enum tree_code code
)
7632 /* Return true if CODE represents a commutative tree code. Otherwise
7635 commutative_tree_code (enum tree_code code
)
7641 case MULT_HIGHPART_EXPR
:
7649 case UNORDERED_EXPR
:
7653 case TRUTH_AND_EXPR
:
7654 case TRUTH_XOR_EXPR
:
7656 case WIDEN_MULT_EXPR
:
7657 case VEC_WIDEN_MULT_HI_EXPR
:
7658 case VEC_WIDEN_MULT_LO_EXPR
:
7659 case VEC_WIDEN_MULT_EVEN_EXPR
:
7660 case VEC_WIDEN_MULT_ODD_EXPR
:
7669 /* Return true if CODE represents a ternary tree code for which the
7670 first two operands are commutative. Otherwise return false. */
7672 commutative_ternary_tree_code (enum tree_code code
)
7676 case WIDEN_MULT_PLUS_EXPR
:
7677 case WIDEN_MULT_MINUS_EXPR
:
7688 /* Returns true if CODE can overflow. */
7691 operation_can_overflow (enum tree_code code
)
7699 /* Can overflow in various ways. */
7701 case TRUNC_DIV_EXPR
:
7702 case EXACT_DIV_EXPR
:
7703 case FLOOR_DIV_EXPR
:
7705 /* For INT_MIN / -1. */
7712 /* These operators cannot overflow. */
7717 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7718 ftrapv doesn't generate trapping insns for CODE. */
7721 operation_no_trapping_overflow (tree type
, enum tree_code code
)
7723 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type
));
7725 /* We don't generate instructions that trap on overflow for complex or vector
7727 if (!INTEGRAL_TYPE_P (type
))
7730 if (!TYPE_OVERFLOW_TRAPS (type
))
7740 /* These operators can overflow, and -ftrapv generates trapping code for
7743 case TRUNC_DIV_EXPR
:
7744 case EXACT_DIV_EXPR
:
7745 case FLOOR_DIV_EXPR
:
7748 /* These operators can overflow, but -ftrapv does not generate trapping
7752 /* These operators cannot overflow. */
7760 /* Generate a hash value for an expression. This can be used iteratively
7761 by passing a previous result as the HSTATE argument.
7763 This function is intended to produce the same hash for expressions which
7764 would compare equal using operand_equal_p. */
7766 add_expr (const_tree t
, inchash::hash
&hstate
)
7769 enum tree_code code
;
7770 enum tree_code_class tclass
;
7774 hstate
.merge_hash (0);
7778 code
= TREE_CODE (t
);
7782 /* Alas, constants aren't shared, so we can't rely on pointer
7785 hstate
.merge_hash (0);
7788 for (i
= 0; i
< TREE_INT_CST_NUNITS (t
); i
++)
7789 hstate
.add_wide_int (TREE_INT_CST_ELT (t
, i
));
7793 unsigned int val2
= real_hash (TREE_REAL_CST_PTR (t
));
7794 hstate
.merge_hash (val2
);
7799 unsigned int val2
= fixed_hash (TREE_FIXED_CST_PTR (t
));
7800 hstate
.merge_hash (val2
);
7804 hstate
.add ((const void *) TREE_STRING_POINTER (t
), TREE_STRING_LENGTH (t
));
7807 inchash::add_expr (TREE_REALPART (t
), hstate
);
7808 inchash::add_expr (TREE_IMAGPART (t
), hstate
);
7813 for (i
= 0; i
< VECTOR_CST_NELTS (t
); ++i
)
7814 inchash::add_expr (VECTOR_CST_ELT (t
, i
), hstate
);
7818 /* We can just compare by pointer. */
7819 hstate
.add_wide_int (SSA_NAME_VERSION (t
));
7821 case PLACEHOLDER_EXPR
:
7822 /* The node itself doesn't matter. */
7825 /* A list of expressions, for a CALL_EXPR or as the elements of a
7827 for (; t
; t
= TREE_CHAIN (t
))
7828 inchash::add_expr (TREE_VALUE (t
), hstate
);
7832 unsigned HOST_WIDE_INT idx
;
7834 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t
), idx
, field
, value
)
7836 inchash::add_expr (field
, hstate
);
7837 inchash::add_expr (value
, hstate
);
7842 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7843 Otherwise nodes that compare equal according to operand_equal_p might
7844 get different hash codes. However, don't do this for machine specific
7845 or front end builtins, since the function code is overloaded in those
7847 if (DECL_BUILT_IN_CLASS (t
) == BUILT_IN_NORMAL
7848 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t
)))
7850 t
= builtin_decl_explicit (DECL_FUNCTION_CODE (t
));
7851 code
= TREE_CODE (t
);
7855 tclass
= TREE_CODE_CLASS (code
);
7857 if (tclass
== tcc_declaration
)
7859 /* DECL's have a unique ID */
7860 hstate
.add_wide_int (DECL_UID (t
));
7864 gcc_assert (IS_EXPR_CODE_CLASS (tclass
));
7866 hstate
.add_object (code
);
7868 /* Don't hash the type, that can lead to having nodes which
7869 compare equal according to operand_equal_p, but which
7870 have different hash codes. */
7871 if (CONVERT_EXPR_CODE_P (code
)
7872 || code
== NON_LVALUE_EXPR
)
7874 /* Make sure to include signness in the hash computation. */
7875 hstate
.add_int (TYPE_UNSIGNED (TREE_TYPE (t
)));
7876 inchash::add_expr (TREE_OPERAND (t
, 0), hstate
);
7879 else if (commutative_tree_code (code
))
7881 /* It's a commutative expression. We want to hash it the same
7882 however it appears. We do this by first hashing both operands
7883 and then rehashing based on the order of their independent
7885 inchash::hash one
, two
;
7886 inchash::add_expr (TREE_OPERAND (t
, 0), one
);
7887 inchash::add_expr (TREE_OPERAND (t
, 1), two
);
7888 hstate
.add_commutative (one
, two
);
7891 for (i
= TREE_OPERAND_LENGTH (t
) - 1; i
>= 0; --i
)
7892 inchash::add_expr (TREE_OPERAND (t
, i
), hstate
);
7900 /* Constructors for pointer, array and function types.
7901 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7902 constructed by language-dependent code, not here.) */
7904 /* Construct, lay out and return the type of pointers to TO_TYPE with
7905 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7906 reference all of memory. If such a type has already been
7907 constructed, reuse it. */
7910 build_pointer_type_for_mode (tree to_type
, machine_mode mode
,
7914 bool could_alias
= can_alias_all
;
7916 if (to_type
== error_mark_node
)
7917 return error_mark_node
;
7919 /* If the pointed-to type has the may_alias attribute set, force
7920 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7921 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type
)))
7922 can_alias_all
= true;
7924 /* In some cases, languages will have things that aren't a POINTER_TYPE
7925 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7926 In that case, return that type without regard to the rest of our
7929 ??? This is a kludge, but consistent with the way this function has
7930 always operated and there doesn't seem to be a good way to avoid this
7932 if (TYPE_POINTER_TO (to_type
) != 0
7933 && TREE_CODE (TYPE_POINTER_TO (to_type
)) != POINTER_TYPE
)
7934 return TYPE_POINTER_TO (to_type
);
7936 /* First, if we already have a type for pointers to TO_TYPE and it's
7937 the proper mode, use it. */
7938 for (t
= TYPE_POINTER_TO (to_type
); t
; t
= TYPE_NEXT_PTR_TO (t
))
7939 if (TYPE_MODE (t
) == mode
&& TYPE_REF_CAN_ALIAS_ALL (t
) == can_alias_all
)
7942 t
= make_node (POINTER_TYPE
);
7944 TREE_TYPE (t
) = to_type
;
7945 SET_TYPE_MODE (t
, mode
);
7946 TYPE_REF_CAN_ALIAS_ALL (t
) = can_alias_all
;
7947 TYPE_NEXT_PTR_TO (t
) = TYPE_POINTER_TO (to_type
);
7948 TYPE_POINTER_TO (to_type
) = t
;
7950 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7951 if (TYPE_STRUCTURAL_EQUALITY_P (to_type
) || in_lto_p
)
7952 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7953 else if (TYPE_CANONICAL (to_type
) != to_type
|| could_alias
)
7955 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type
),
7958 /* Lay out the type. This function has many callers that are concerned
7959 with expression-construction, and this simplifies them all. */
7965 /* By default build pointers in ptr_mode. */
7968 build_pointer_type (tree to_type
)
7970 addr_space_t as
= to_type
== error_mark_node
? ADDR_SPACE_GENERIC
7971 : TYPE_ADDR_SPACE (to_type
);
7972 machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
7973 return build_pointer_type_for_mode (to_type
, pointer_mode
, false);
7976 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7979 build_reference_type_for_mode (tree to_type
, machine_mode mode
,
7983 bool could_alias
= can_alias_all
;
7985 if (to_type
== error_mark_node
)
7986 return error_mark_node
;
7988 /* If the pointed-to type has the may_alias attribute set, force
7989 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7990 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type
)))
7991 can_alias_all
= true;
7993 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7994 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7995 In that case, return that type without regard to the rest of our
7998 ??? This is a kludge, but consistent with the way this function has
7999 always operated and there doesn't seem to be a good way to avoid this
8001 if (TYPE_REFERENCE_TO (to_type
) != 0
8002 && TREE_CODE (TYPE_REFERENCE_TO (to_type
)) != REFERENCE_TYPE
)
8003 return TYPE_REFERENCE_TO (to_type
);
8005 /* First, if we already have a type for pointers to TO_TYPE and it's
8006 the proper mode, use it. */
8007 for (t
= TYPE_REFERENCE_TO (to_type
); t
; t
= TYPE_NEXT_REF_TO (t
))
8008 if (TYPE_MODE (t
) == mode
&& TYPE_REF_CAN_ALIAS_ALL (t
) == can_alias_all
)
8011 t
= make_node (REFERENCE_TYPE
);
8013 TREE_TYPE (t
) = to_type
;
8014 SET_TYPE_MODE (t
, mode
);
8015 TYPE_REF_CAN_ALIAS_ALL (t
) = can_alias_all
;
8016 TYPE_NEXT_REF_TO (t
) = TYPE_REFERENCE_TO (to_type
);
8017 TYPE_REFERENCE_TO (to_type
) = t
;
8019 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
8020 if (TYPE_STRUCTURAL_EQUALITY_P (to_type
) || in_lto_p
)
8021 SET_TYPE_STRUCTURAL_EQUALITY (t
);
8022 else if (TYPE_CANONICAL (to_type
) != to_type
|| could_alias
)
8024 = build_reference_type_for_mode (TYPE_CANONICAL (to_type
),
8033 /* Build the node for the type of references-to-TO_TYPE by default
8037 build_reference_type (tree to_type
)
8039 addr_space_t as
= to_type
== error_mark_node
? ADDR_SPACE_GENERIC
8040 : TYPE_ADDR_SPACE (to_type
);
8041 machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
8042 return build_reference_type_for_mode (to_type
, pointer_mode
, false);
8045 #define MAX_INT_CACHED_PREC \
8046 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8047 static GTY(()) tree nonstandard_integer_type_cache
[2 * MAX_INT_CACHED_PREC
+ 2];
8049 /* Builds a signed or unsigned integer type of precision PRECISION.
8050 Used for C bitfields whose precision does not match that of
8051 built-in target types. */
8053 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision
,
8059 unsignedp
= MAX_INT_CACHED_PREC
+ 1;
8061 if (precision
<= MAX_INT_CACHED_PREC
)
8063 itype
= nonstandard_integer_type_cache
[precision
+ unsignedp
];
8068 itype
= make_node (INTEGER_TYPE
);
8069 TYPE_PRECISION (itype
) = precision
;
8072 fixup_unsigned_type (itype
);
8074 fixup_signed_type (itype
);
8077 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype
)))
8078 ret
= type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype
)), itype
);
8079 if (precision
<= MAX_INT_CACHED_PREC
)
8080 nonstandard_integer_type_cache
[precision
+ unsignedp
] = ret
;
8085 #define MAX_BOOL_CACHED_PREC \
8086 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8087 static GTY(()) tree nonstandard_boolean_type_cache
[MAX_BOOL_CACHED_PREC
+ 1];
8089 /* Builds a boolean type of precision PRECISION.
8090 Used for boolean vectors to choose proper vector element size. */
8092 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision
)
8096 if (precision
<= MAX_BOOL_CACHED_PREC
)
8098 type
= nonstandard_boolean_type_cache
[precision
];
8103 type
= make_node (BOOLEAN_TYPE
);
8104 TYPE_PRECISION (type
) = precision
;
8105 fixup_signed_type (type
);
8107 if (precision
<= MAX_INT_CACHED_PREC
)
8108 nonstandard_boolean_type_cache
[precision
] = type
;
8113 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
8114 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
8115 is true, reuse such a type that has already been constructed. */
8118 build_range_type_1 (tree type
, tree lowval
, tree highval
, bool shared
)
8120 tree itype
= make_node (INTEGER_TYPE
);
8121 inchash::hash hstate
;
8123 TREE_TYPE (itype
) = type
;
8125 TYPE_MIN_VALUE (itype
) = fold_convert (type
, lowval
);
8126 TYPE_MAX_VALUE (itype
) = highval
? fold_convert (type
, highval
) : NULL
;
8128 TYPE_PRECISION (itype
) = TYPE_PRECISION (type
);
8129 SET_TYPE_MODE (itype
, TYPE_MODE (type
));
8130 TYPE_SIZE (itype
) = TYPE_SIZE (type
);
8131 TYPE_SIZE_UNIT (itype
) = TYPE_SIZE_UNIT (type
);
8132 TYPE_ALIGN (itype
) = TYPE_ALIGN (type
);
8133 TYPE_USER_ALIGN (itype
) = TYPE_USER_ALIGN (type
);
8138 if ((TYPE_MIN_VALUE (itype
)
8139 && TREE_CODE (TYPE_MIN_VALUE (itype
)) != INTEGER_CST
)
8140 || (TYPE_MAX_VALUE (itype
)
8141 && TREE_CODE (TYPE_MAX_VALUE (itype
)) != INTEGER_CST
))
8143 /* Since we cannot reliably merge this type, we need to compare it using
8144 structural equality checks. */
8145 SET_TYPE_STRUCTURAL_EQUALITY (itype
);
8149 inchash::add_expr (TYPE_MIN_VALUE (itype
), hstate
);
8150 inchash::add_expr (TYPE_MAX_VALUE (itype
), hstate
);
8151 hstate
.merge_hash (TYPE_HASH (type
));
8152 itype
= type_hash_canon (hstate
.end (), itype
);
8157 /* Wrapper around build_range_type_1 with SHARED set to true. */
8160 build_range_type (tree type
, tree lowval
, tree highval
)
8162 return build_range_type_1 (type
, lowval
, highval
, true);
8165 /* Wrapper around build_range_type_1 with SHARED set to false. */
8168 build_nonshared_range_type (tree type
, tree lowval
, tree highval
)
8170 return build_range_type_1 (type
, lowval
, highval
, false);
8173 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
8174 MAXVAL should be the maximum value in the domain
8175 (one less than the length of the array).
8177 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
8178 We don't enforce this limit, that is up to caller (e.g. language front end).
8179 The limit exists because the result is a signed type and we don't handle
8180 sizes that use more than one HOST_WIDE_INT. */
8183 build_index_type (tree maxval
)
8185 return build_range_type (sizetype
, size_zero_node
, maxval
);
8188 /* Return true if the debug information for TYPE, a subtype, should be emitted
8189 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
8190 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
8191 debug info and doesn't reflect the source code. */
8194 subrange_type_for_debug_p (const_tree type
, tree
*lowval
, tree
*highval
)
8196 tree base_type
= TREE_TYPE (type
), low
, high
;
8198 /* Subrange types have a base type which is an integral type. */
8199 if (!INTEGRAL_TYPE_P (base_type
))
8202 /* Get the real bounds of the subtype. */
8203 if (lang_hooks
.types
.get_subrange_bounds
)
8204 lang_hooks
.types
.get_subrange_bounds (type
, &low
, &high
);
8207 low
= TYPE_MIN_VALUE (type
);
8208 high
= TYPE_MAX_VALUE (type
);
8211 /* If the type and its base type have the same representation and the same
8212 name, then the type is not a subrange but a copy of the base type. */
8213 if ((TREE_CODE (base_type
) == INTEGER_TYPE
8214 || TREE_CODE (base_type
) == BOOLEAN_TYPE
)
8215 && int_size_in_bytes (type
) == int_size_in_bytes (base_type
)
8216 && tree_int_cst_equal (low
, TYPE_MIN_VALUE (base_type
))
8217 && tree_int_cst_equal (high
, TYPE_MAX_VALUE (base_type
))
8218 && TYPE_IDENTIFIER (type
) == TYPE_IDENTIFIER (base_type
))
8228 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8229 and number of elements specified by the range of values of INDEX_TYPE.
8230 If SHARED is true, reuse such a type that has already been constructed. */
8233 build_array_type_1 (tree elt_type
, tree index_type
, bool shared
)
8237 if (TREE_CODE (elt_type
) == FUNCTION_TYPE
)
8239 error ("arrays of functions are not meaningful");
8240 elt_type
= integer_type_node
;
8243 t
= make_node (ARRAY_TYPE
);
8244 TREE_TYPE (t
) = elt_type
;
8245 TYPE_DOMAIN (t
) = index_type
;
8246 TYPE_ADDR_SPACE (t
) = TYPE_ADDR_SPACE (elt_type
);
8249 /* If the element type is incomplete at this point we get marked for
8250 structural equality. Do not record these types in the canonical
8252 if (TYPE_STRUCTURAL_EQUALITY_P (t
))
8257 inchash::hash hstate
;
8258 hstate
.add_object (TYPE_HASH (elt_type
));
8260 hstate
.add_object (TYPE_HASH (index_type
));
8261 t
= type_hash_canon (hstate
.end (), t
);
8264 if (TYPE_CANONICAL (t
) == t
)
8266 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type
)
8267 || (index_type
&& TYPE_STRUCTURAL_EQUALITY_P (index_type
))
8269 SET_TYPE_STRUCTURAL_EQUALITY (t
);
8270 else if (TYPE_CANONICAL (elt_type
) != elt_type
8271 || (index_type
&& TYPE_CANONICAL (index_type
) != index_type
))
8273 = build_array_type_1 (TYPE_CANONICAL (elt_type
),
8275 ? TYPE_CANONICAL (index_type
) : NULL_TREE
,
8282 /* Wrapper around build_array_type_1 with SHARED set to true. */
8285 build_array_type (tree elt_type
, tree index_type
)
8287 return build_array_type_1 (elt_type
, index_type
, true);
8290 /* Wrapper around build_array_type_1 with SHARED set to false. */
8293 build_nonshared_array_type (tree elt_type
, tree index_type
)
8295 return build_array_type_1 (elt_type
, index_type
, false);
8298 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8302 build_array_type_nelts (tree elt_type
, unsigned HOST_WIDE_INT nelts
)
8304 return build_array_type (elt_type
, build_index_type (size_int (nelts
- 1)));
8307 /* Recursively examines the array elements of TYPE, until a non-array
8308 element type is found. */
8311 strip_array_types (tree type
)
8313 while (TREE_CODE (type
) == ARRAY_TYPE
)
8314 type
= TREE_TYPE (type
);
8319 /* Computes the canonical argument types from the argument type list
8322 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8323 on entry to this function, or if any of the ARGTYPES are
8326 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8327 true on entry to this function, or if any of the ARGTYPES are
8330 Returns a canonical argument list, which may be ARGTYPES when the
8331 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8332 true) or would not differ from ARGTYPES. */
8335 maybe_canonicalize_argtypes (tree argtypes
,
8336 bool *any_structural_p
,
8337 bool *any_noncanonical_p
)
8340 bool any_noncanonical_argtypes_p
= false;
8342 for (arg
= argtypes
; arg
&& !(*any_structural_p
); arg
= TREE_CHAIN (arg
))
8344 if (!TREE_VALUE (arg
) || TREE_VALUE (arg
) == error_mark_node
)
8345 /* Fail gracefully by stating that the type is structural. */
8346 *any_structural_p
= true;
8347 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg
)))
8348 *any_structural_p
= true;
8349 else if (TYPE_CANONICAL (TREE_VALUE (arg
)) != TREE_VALUE (arg
)
8350 || TREE_PURPOSE (arg
))
8351 /* If the argument has a default argument, we consider it
8352 non-canonical even though the type itself is canonical.
8353 That way, different variants of function and method types
8354 with default arguments will all point to the variant with
8355 no defaults as their canonical type. */
8356 any_noncanonical_argtypes_p
= true;
8359 if (*any_structural_p
)
8362 if (any_noncanonical_argtypes_p
)
8364 /* Build the canonical list of argument types. */
8365 tree canon_argtypes
= NULL_TREE
;
8366 bool is_void
= false;
8368 for (arg
= argtypes
; arg
; arg
= TREE_CHAIN (arg
))
8370 if (arg
== void_list_node
)
8373 canon_argtypes
= tree_cons (NULL_TREE
,
8374 TYPE_CANONICAL (TREE_VALUE (arg
)),
8378 canon_argtypes
= nreverse (canon_argtypes
);
8380 canon_argtypes
= chainon (canon_argtypes
, void_list_node
);
8382 /* There is a non-canonical type. */
8383 *any_noncanonical_p
= true;
8384 return canon_argtypes
;
8387 /* The canonical argument types are the same as ARGTYPES. */
8391 /* Construct, lay out and return
8392 the type of functions returning type VALUE_TYPE
8393 given arguments of types ARG_TYPES.
8394 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8395 are data type nodes for the arguments of the function.
8396 If such a type has already been constructed, reuse it. */
8399 build_function_type (tree value_type
, tree arg_types
)
8402 inchash::hash hstate
;
8403 bool any_structural_p
, any_noncanonical_p
;
8404 tree canon_argtypes
;
8406 if (TREE_CODE (value_type
) == FUNCTION_TYPE
)
8408 error ("function return type cannot be function");
8409 value_type
= integer_type_node
;
8412 /* Make a node of the sort we want. */
8413 t
= make_node (FUNCTION_TYPE
);
8414 TREE_TYPE (t
) = value_type
;
8415 TYPE_ARG_TYPES (t
) = arg_types
;
8417 /* If we already have such a type, use the old one. */
8418 hstate
.add_object (TYPE_HASH (value_type
));
8419 type_hash_list (arg_types
, hstate
);
8420 t
= type_hash_canon (hstate
.end (), t
);
8422 /* Set up the canonical type. */
8423 any_structural_p
= TYPE_STRUCTURAL_EQUALITY_P (value_type
);
8424 any_noncanonical_p
= TYPE_CANONICAL (value_type
) != value_type
;
8425 canon_argtypes
= maybe_canonicalize_argtypes (arg_types
,
8427 &any_noncanonical_p
);
8428 if (any_structural_p
)
8429 SET_TYPE_STRUCTURAL_EQUALITY (t
);
8430 else if (any_noncanonical_p
)
8431 TYPE_CANONICAL (t
) = build_function_type (TYPE_CANONICAL (value_type
),
8434 if (!COMPLETE_TYPE_P (t
))
8439 /* Build a function type. The RETURN_TYPE is the type returned by the
8440 function. If VAARGS is set, no void_type_node is appended to the
8441 list. ARGP must be always be terminated be a NULL_TREE. */
8444 build_function_type_list_1 (bool vaargs
, tree return_type
, va_list argp
)
8448 t
= va_arg (argp
, tree
);
8449 for (args
= NULL_TREE
; t
!= NULL_TREE
; t
= va_arg (argp
, tree
))
8450 args
= tree_cons (NULL_TREE
, t
, args
);
8455 if (args
!= NULL_TREE
)
8456 args
= nreverse (args
);
8457 gcc_assert (last
!= void_list_node
);
8459 else if (args
== NULL_TREE
)
8460 args
= void_list_node
;
8464 args
= nreverse (args
);
8465 TREE_CHAIN (last
) = void_list_node
;
8467 args
= build_function_type (return_type
, args
);
8472 /* Build a function type. The RETURN_TYPE is the type returned by the
8473 function. If additional arguments are provided, they are
8474 additional argument types. The list of argument types must always
8475 be terminated by NULL_TREE. */
8478 build_function_type_list (tree return_type
, ...)
8483 va_start (p
, return_type
);
8484 args
= build_function_type_list_1 (false, return_type
, p
);
8489 /* Build a variable argument function type. The RETURN_TYPE is the
8490 type returned by the function. If additional arguments are provided,
8491 they are additional argument types. The list of argument types must
8492 always be terminated by NULL_TREE. */
8495 build_varargs_function_type_list (tree return_type
, ...)
8500 va_start (p
, return_type
);
8501 args
= build_function_type_list_1 (true, return_type
, p
);
8507 /* Build a function type. RETURN_TYPE is the type returned by the
8508 function; VAARGS indicates whether the function takes varargs. The
8509 function takes N named arguments, the types of which are provided in
8513 build_function_type_array_1 (bool vaargs
, tree return_type
, int n
,
8517 tree t
= vaargs
? NULL_TREE
: void_list_node
;
8519 for (i
= n
- 1; i
>= 0; i
--)
8520 t
= tree_cons (NULL_TREE
, arg_types
[i
], t
);
8522 return build_function_type (return_type
, t
);
8525 /* Build a function type. RETURN_TYPE is the type returned by the
8526 function. The function takes N named arguments, the types of which
8527 are provided in ARG_TYPES. */
8530 build_function_type_array (tree return_type
, int n
, tree
*arg_types
)
8532 return build_function_type_array_1 (false, return_type
, n
, arg_types
);
8535 /* Build a variable argument function type. RETURN_TYPE is the type
8536 returned by the function. The function takes N named arguments, the
8537 types of which are provided in ARG_TYPES. */
8540 build_varargs_function_type_array (tree return_type
, int n
, tree
*arg_types
)
8542 return build_function_type_array_1 (true, return_type
, n
, arg_types
);
8545 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8546 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8547 for the method. An implicit additional parameter (of type
8548 pointer-to-BASETYPE) is added to the ARGTYPES. */
8551 build_method_type_directly (tree basetype
,
8557 inchash::hash hstate
;
8558 bool any_structural_p
, any_noncanonical_p
;
8559 tree canon_argtypes
;
8561 /* Make a node of the sort we want. */
8562 t
= make_node (METHOD_TYPE
);
8564 TYPE_METHOD_BASETYPE (t
) = TYPE_MAIN_VARIANT (basetype
);
8565 TREE_TYPE (t
) = rettype
;
8566 ptype
= build_pointer_type (basetype
);
8568 /* The actual arglist for this function includes a "hidden" argument
8569 which is "this". Put it into the list of argument types. */
8570 argtypes
= tree_cons (NULL_TREE
, ptype
, argtypes
);
8571 TYPE_ARG_TYPES (t
) = argtypes
;
8573 /* If we already have such a type, use the old one. */
8574 hstate
.add_object (TYPE_HASH (basetype
));
8575 hstate
.add_object (TYPE_HASH (rettype
));
8576 type_hash_list (argtypes
, hstate
);
8577 t
= type_hash_canon (hstate
.end (), t
);
8579 /* Set up the canonical type. */
8581 = (TYPE_STRUCTURAL_EQUALITY_P (basetype
)
8582 || TYPE_STRUCTURAL_EQUALITY_P (rettype
));
8584 = (TYPE_CANONICAL (basetype
) != basetype
8585 || TYPE_CANONICAL (rettype
) != rettype
);
8586 canon_argtypes
= maybe_canonicalize_argtypes (TREE_CHAIN (argtypes
),
8588 &any_noncanonical_p
);
8589 if (any_structural_p
)
8590 SET_TYPE_STRUCTURAL_EQUALITY (t
);
8591 else if (any_noncanonical_p
)
8593 = build_method_type_directly (TYPE_CANONICAL (basetype
),
8594 TYPE_CANONICAL (rettype
),
8596 if (!COMPLETE_TYPE_P (t
))
8602 /* Construct, lay out and return the type of methods belonging to class
8603 BASETYPE and whose arguments and values are described by TYPE.
8604 If that type exists already, reuse it.
8605 TYPE must be a FUNCTION_TYPE node. */
8608 build_method_type (tree basetype
, tree type
)
8610 gcc_assert (TREE_CODE (type
) == FUNCTION_TYPE
);
8612 return build_method_type_directly (basetype
,
8614 TYPE_ARG_TYPES (type
));
8617 /* Construct, lay out and return the type of offsets to a value
8618 of type TYPE, within an object of type BASETYPE.
8619 If a suitable offset type exists already, reuse it. */
8622 build_offset_type (tree basetype
, tree type
)
8625 inchash::hash hstate
;
8627 /* Make a node of the sort we want. */
8628 t
= make_node (OFFSET_TYPE
);
8630 TYPE_OFFSET_BASETYPE (t
) = TYPE_MAIN_VARIANT (basetype
);
8631 TREE_TYPE (t
) = type
;
8633 /* If we already have such a type, use the old one. */
8634 hstate
.add_object (TYPE_HASH (basetype
));
8635 hstate
.add_object (TYPE_HASH (type
));
8636 t
= type_hash_canon (hstate
.end (), t
);
8638 if (!COMPLETE_TYPE_P (t
))
8641 if (TYPE_CANONICAL (t
) == t
)
8643 if (TYPE_STRUCTURAL_EQUALITY_P (basetype
)
8644 || TYPE_STRUCTURAL_EQUALITY_P (type
))
8645 SET_TYPE_STRUCTURAL_EQUALITY (t
);
8646 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype
)) != basetype
8647 || TYPE_CANONICAL (type
) != type
)
8649 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype
)),
8650 TYPE_CANONICAL (type
));
8656 /* Create a complex type whose components are COMPONENT_TYPE. */
8659 build_complex_type (tree component_type
)
8662 inchash::hash hstate
;
8664 gcc_assert (INTEGRAL_TYPE_P (component_type
)
8665 || SCALAR_FLOAT_TYPE_P (component_type
)
8666 || FIXED_POINT_TYPE_P (component_type
));
8668 /* Make a node of the sort we want. */
8669 t
= make_node (COMPLEX_TYPE
);
8671 TREE_TYPE (t
) = TYPE_MAIN_VARIANT (component_type
);
8673 /* If we already have such a type, use the old one. */
8674 hstate
.add_object (TYPE_HASH (component_type
));
8675 t
= type_hash_canon (hstate
.end (), t
);
8677 if (!COMPLETE_TYPE_P (t
))
8680 if (TYPE_CANONICAL (t
) == t
)
8682 if (TYPE_STRUCTURAL_EQUALITY_P (component_type
))
8683 SET_TYPE_STRUCTURAL_EQUALITY (t
);
8684 else if (TYPE_CANONICAL (component_type
) != component_type
)
8686 = build_complex_type (TYPE_CANONICAL (component_type
));
8689 /* We need to create a name, since complex is a fundamental type. */
8690 if (! TYPE_NAME (t
))
8693 if (component_type
== char_type_node
)
8694 name
= "complex char";
8695 else if (component_type
== signed_char_type_node
)
8696 name
= "complex signed char";
8697 else if (component_type
== unsigned_char_type_node
)
8698 name
= "complex unsigned char";
8699 else if (component_type
== short_integer_type_node
)
8700 name
= "complex short int";
8701 else if (component_type
== short_unsigned_type_node
)
8702 name
= "complex short unsigned int";
8703 else if (component_type
== integer_type_node
)
8704 name
= "complex int";
8705 else if (component_type
== unsigned_type_node
)
8706 name
= "complex unsigned int";
8707 else if (component_type
== long_integer_type_node
)
8708 name
= "complex long int";
8709 else if (component_type
== long_unsigned_type_node
)
8710 name
= "complex long unsigned int";
8711 else if (component_type
== long_long_integer_type_node
)
8712 name
= "complex long long int";
8713 else if (component_type
== long_long_unsigned_type_node
)
8714 name
= "complex long long unsigned int";
8719 TYPE_NAME (t
) = build_decl (UNKNOWN_LOCATION
, TYPE_DECL
,
8720 get_identifier (name
), t
);
8723 return build_qualified_type (t
, TYPE_QUALS (component_type
));
8726 /* If TYPE is a real or complex floating-point type and the target
8727 does not directly support arithmetic on TYPE then return the wider
8728 type to be used for arithmetic on TYPE. Otherwise, return
8732 excess_precision_type (tree type
)
8734 if (flag_excess_precision
!= EXCESS_PRECISION_FAST
)
8736 int flt_eval_method
= TARGET_FLT_EVAL_METHOD
;
8737 switch (TREE_CODE (type
))
8740 switch (flt_eval_method
)
8743 if (TYPE_MODE (type
) == TYPE_MODE (float_type_node
))
8744 return double_type_node
;
8747 if (TYPE_MODE (type
) == TYPE_MODE (float_type_node
)
8748 || TYPE_MODE (type
) == TYPE_MODE (double_type_node
))
8749 return long_double_type_node
;
8756 if (TREE_CODE (TREE_TYPE (type
)) != REAL_TYPE
)
8758 switch (flt_eval_method
)
8761 if (TYPE_MODE (TREE_TYPE (type
)) == TYPE_MODE (float_type_node
))
8762 return complex_double_type_node
;
8765 if (TYPE_MODE (TREE_TYPE (type
)) == TYPE_MODE (float_type_node
)
8766 || (TYPE_MODE (TREE_TYPE (type
))
8767 == TYPE_MODE (double_type_node
)))
8768 return complex_long_double_type_node
;
8781 /* Return OP, stripped of any conversions to wider types as much as is safe.
8782 Converting the value back to OP's type makes a value equivalent to OP.
8784 If FOR_TYPE is nonzero, we return a value which, if converted to
8785 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8787 OP must have integer, real or enumeral type. Pointers are not allowed!
8789 There are some cases where the obvious value we could return
8790 would regenerate to OP if converted to OP's type,
8791 but would not extend like OP to wider types.
8792 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8793 For example, if OP is (unsigned short)(signed char)-1,
8794 we avoid returning (signed char)-1 if FOR_TYPE is int,
8795 even though extending that to an unsigned short would regenerate OP,
8796 since the result of extending (signed char)-1 to (int)
8797 is different from (int) OP. */
8800 get_unwidened (tree op
, tree for_type
)
8802 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8803 tree type
= TREE_TYPE (op
);
8805 = TYPE_PRECISION (for_type
!= 0 ? for_type
: type
);
8807 = (for_type
!= 0 && for_type
!= type
8808 && final_prec
> TYPE_PRECISION (type
)
8809 && TYPE_UNSIGNED (type
));
8812 while (CONVERT_EXPR_P (op
))
8816 /* TYPE_PRECISION on vector types has different meaning
8817 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8818 so avoid them here. */
8819 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op
, 0))) == VECTOR_TYPE
)
8822 bitschange
= TYPE_PRECISION (TREE_TYPE (op
))
8823 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op
, 0)));
8825 /* Truncations are many-one so cannot be removed.
8826 Unless we are later going to truncate down even farther. */
8828 && final_prec
> TYPE_PRECISION (TREE_TYPE (op
)))
8831 /* See what's inside this conversion. If we decide to strip it,
8833 op
= TREE_OPERAND (op
, 0);
8835 /* If we have not stripped any zero-extensions (uns is 0),
8836 we can strip any kind of extension.
8837 If we have previously stripped a zero-extension,
8838 only zero-extensions can safely be stripped.
8839 Any extension can be stripped if the bits it would produce
8840 are all going to be discarded later by truncating to FOR_TYPE. */
8844 if (! uns
|| final_prec
<= TYPE_PRECISION (TREE_TYPE (op
)))
8846 /* TYPE_UNSIGNED says whether this is a zero-extension.
8847 Let's avoid computing it if it does not affect WIN
8848 and if UNS will not be needed again. */
8850 || CONVERT_EXPR_P (op
))
8851 && TYPE_UNSIGNED (TREE_TYPE (op
)))
8859 /* If we finally reach a constant see if it fits in for_type and
8860 in that case convert it. */
8862 && TREE_CODE (win
) == INTEGER_CST
8863 && TREE_TYPE (win
) != for_type
8864 && int_fits_type_p (win
, for_type
))
8865 win
= fold_convert (for_type
, win
);
8870 /* Return OP or a simpler expression for a narrower value
8871 which can be sign-extended or zero-extended to give back OP.
8872 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8873 or 0 if the value should be sign-extended. */
8876 get_narrower (tree op
, int *unsignedp_ptr
)
8881 bool integral_p
= INTEGRAL_TYPE_P (TREE_TYPE (op
));
8883 while (TREE_CODE (op
) == NOP_EXPR
)
8886 = (TYPE_PRECISION (TREE_TYPE (op
))
8887 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op
, 0))));
8889 /* Truncations are many-one so cannot be removed. */
8893 /* See what's inside this conversion. If we decide to strip it,
8898 op
= TREE_OPERAND (op
, 0);
8899 /* An extension: the outermost one can be stripped,
8900 but remember whether it is zero or sign extension. */
8902 uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
8903 /* Otherwise, if a sign extension has been stripped,
8904 only sign extensions can now be stripped;
8905 if a zero extension has been stripped, only zero-extensions. */
8906 else if (uns
!= TYPE_UNSIGNED (TREE_TYPE (op
)))
8910 else /* bitschange == 0 */
8912 /* A change in nominal type can always be stripped, but we must
8913 preserve the unsignedness. */
8915 uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
8917 op
= TREE_OPERAND (op
, 0);
8918 /* Keep trying to narrow, but don't assign op to win if it
8919 would turn an integral type into something else. */
8920 if (INTEGRAL_TYPE_P (TREE_TYPE (op
)) != integral_p
)
8927 if (TREE_CODE (op
) == COMPONENT_REF
8928 /* Since type_for_size always gives an integer type. */
8929 && TREE_CODE (TREE_TYPE (op
)) != REAL_TYPE
8930 && TREE_CODE (TREE_TYPE (op
)) != FIXED_POINT_TYPE
8931 /* Ensure field is laid out already. */
8932 && DECL_SIZE (TREE_OPERAND (op
, 1)) != 0
8933 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op
, 1))))
8935 unsigned HOST_WIDE_INT innerprec
8936 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op
, 1)));
8937 int unsignedp
= (DECL_UNSIGNED (TREE_OPERAND (op
, 1))
8938 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op
, 1))));
8939 tree type
= lang_hooks
.types
.type_for_size (innerprec
, unsignedp
);
8941 /* We can get this structure field in a narrower type that fits it,
8942 but the resulting extension to its nominal type (a fullword type)
8943 must satisfy the same conditions as for other extensions.
8945 Do this only for fields that are aligned (not bit-fields),
8946 because when bit-field insns will be used there is no
8947 advantage in doing this. */
8949 if (innerprec
< TYPE_PRECISION (TREE_TYPE (op
))
8950 && ! DECL_BIT_FIELD (TREE_OPERAND (op
, 1))
8951 && (first
|| uns
== DECL_UNSIGNED (TREE_OPERAND (op
, 1)))
8955 uns
= DECL_UNSIGNED (TREE_OPERAND (op
, 1));
8956 win
= fold_convert (type
, op
);
8960 *unsignedp_ptr
= uns
;
8964 /* Returns true if integer constant C has a value that is permissible
8965 for type TYPE (an INTEGER_TYPE). */
8968 int_fits_type_p (const_tree c
, const_tree type
)
8970 tree type_low_bound
, type_high_bound
;
8971 bool ok_for_low_bound
, ok_for_high_bound
;
8972 signop sgn_c
= TYPE_SIGN (TREE_TYPE (c
));
8975 type_low_bound
= TYPE_MIN_VALUE (type
);
8976 type_high_bound
= TYPE_MAX_VALUE (type
);
8978 /* If at least one bound of the type is a constant integer, we can check
8979 ourselves and maybe make a decision. If no such decision is possible, but
8980 this type is a subtype, try checking against that. Otherwise, use
8981 fits_to_tree_p, which checks against the precision.
8983 Compute the status for each possibly constant bound, and return if we see
8984 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8985 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8986 for "constant known to fit". */
8988 /* Check if c >= type_low_bound. */
8989 if (type_low_bound
&& TREE_CODE (type_low_bound
) == INTEGER_CST
)
8991 if (tree_int_cst_lt (c
, type_low_bound
))
8993 ok_for_low_bound
= true;
8996 ok_for_low_bound
= false;
8998 /* Check if c <= type_high_bound. */
8999 if (type_high_bound
&& TREE_CODE (type_high_bound
) == INTEGER_CST
)
9001 if (tree_int_cst_lt (type_high_bound
, c
))
9003 ok_for_high_bound
= true;
9006 ok_for_high_bound
= false;
9008 /* If the constant fits both bounds, the result is known. */
9009 if (ok_for_low_bound
&& ok_for_high_bound
)
9012 /* Perform some generic filtering which may allow making a decision
9013 even if the bounds are not constant. First, negative integers
9014 never fit in unsigned types, */
9015 if (TYPE_UNSIGNED (type
) && sgn_c
== SIGNED
&& wi::neg_p (c
))
9018 /* Second, narrower types always fit in wider ones. */
9019 if (TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (c
)))
9022 /* Third, unsigned integers with top bit set never fit signed types. */
9023 if (!TYPE_UNSIGNED (type
) && sgn_c
== UNSIGNED
)
9025 int prec
= GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c
))) - 1;
9026 if (prec
< TYPE_PRECISION (TREE_TYPE (c
)))
9028 /* When a tree_cst is converted to a wide-int, the precision
9029 is taken from the type. However, if the precision of the
9030 mode underneath the type is smaller than that, it is
9031 possible that the value will not fit. The test below
9032 fails if any bit is set between the sign bit of the
9033 underlying mode and the top bit of the type. */
9034 if (wi::ne_p (wi::zext (c
, prec
- 1), c
))
9037 else if (wi::neg_p (c
))
9041 /* If we haven't been able to decide at this point, there nothing more we
9042 can check ourselves here. Look at the base type if we have one and it
9043 has the same precision. */
9044 if (TREE_CODE (type
) == INTEGER_TYPE
9045 && TREE_TYPE (type
) != 0
9046 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (type
)))
9048 type
= TREE_TYPE (type
);
9052 /* Or to fits_to_tree_p, if nothing else. */
9053 return wi::fits_to_tree_p (c
, type
);
9056 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
9057 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
9058 represented (assuming two's-complement arithmetic) within the bit
9059 precision of the type are returned instead. */
9062 get_type_static_bounds (const_tree type
, mpz_t min
, mpz_t max
)
9064 if (!POINTER_TYPE_P (type
) && TYPE_MIN_VALUE (type
)
9065 && TREE_CODE (TYPE_MIN_VALUE (type
)) == INTEGER_CST
)
9066 wi::to_mpz (TYPE_MIN_VALUE (type
), min
, TYPE_SIGN (type
));
9069 if (TYPE_UNSIGNED (type
))
9070 mpz_set_ui (min
, 0);
9073 wide_int mn
= wi::min_value (TYPE_PRECISION (type
), SIGNED
);
9074 wi::to_mpz (mn
, min
, SIGNED
);
9078 if (!POINTER_TYPE_P (type
) && TYPE_MAX_VALUE (type
)
9079 && TREE_CODE (TYPE_MAX_VALUE (type
)) == INTEGER_CST
)
9080 wi::to_mpz (TYPE_MAX_VALUE (type
), max
, TYPE_SIGN (type
));
9083 wide_int mn
= wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
9084 wi::to_mpz (mn
, max
, TYPE_SIGN (type
));
9088 /* Return true if VAR is an automatic variable defined in function FN. */
9091 auto_var_in_fn_p (const_tree var
, const_tree fn
)
9093 return (DECL_P (var
) && DECL_CONTEXT (var
) == fn
9094 && ((((TREE_CODE (var
) == VAR_DECL
&& ! DECL_EXTERNAL (var
))
9095 || TREE_CODE (var
) == PARM_DECL
)
9096 && ! TREE_STATIC (var
))
9097 || TREE_CODE (var
) == LABEL_DECL
9098 || TREE_CODE (var
) == RESULT_DECL
));
9101 /* Subprogram of following function. Called by walk_tree.
9103 Return *TP if it is an automatic variable or parameter of the
9104 function passed in as DATA. */
9107 find_var_from_fn (tree
*tp
, int *walk_subtrees
, void *data
)
9109 tree fn
= (tree
) data
;
9114 else if (DECL_P (*tp
)
9115 && auto_var_in_fn_p (*tp
, fn
))
9121 /* Returns true if T is, contains, or refers to a type with variable
9122 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
9123 arguments, but not the return type. If FN is nonzero, only return
9124 true if a modifier of the type or position of FN is a variable or
9125 parameter inside FN.
9127 This concept is more general than that of C99 'variably modified types':
9128 in C99, a struct type is never variably modified because a VLA may not
9129 appear as a structure member. However, in GNU C code like:
9131 struct S { int i[f()]; };
9133 is valid, and other languages may define similar constructs. */
9136 variably_modified_type_p (tree type
, tree fn
)
9140 /* Test if T is either variable (if FN is zero) or an expression containing
9141 a variable in FN. If TYPE isn't gimplified, return true also if
9142 gimplify_one_sizepos would gimplify the expression into a local
9144 #define RETURN_TRUE_IF_VAR(T) \
9145 do { tree _t = (T); \
9146 if (_t != NULL_TREE \
9147 && _t != error_mark_node \
9148 && TREE_CODE (_t) != INTEGER_CST \
9149 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
9151 || (!TYPE_SIZES_GIMPLIFIED (type) \
9152 && !is_gimple_sizepos (_t)) \
9153 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
9154 return true; } while (0)
9156 if (type
== error_mark_node
)
9159 /* If TYPE itself has variable size, it is variably modified. */
9160 RETURN_TRUE_IF_VAR (TYPE_SIZE (type
));
9161 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type
));
9163 switch (TREE_CODE (type
))
9166 case REFERENCE_TYPE
:
9168 if (variably_modified_type_p (TREE_TYPE (type
), fn
))
9174 /* If TYPE is a function type, it is variably modified if the
9175 return type is variably modified. */
9176 if (variably_modified_type_p (TREE_TYPE (type
), fn
))
9182 case FIXED_POINT_TYPE
:
9185 /* Scalar types are variably modified if their end points
9187 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type
));
9188 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type
));
9193 case QUAL_UNION_TYPE
:
9194 /* We can't see if any of the fields are variably-modified by the
9195 definition we normally use, since that would produce infinite
9196 recursion via pointers. */
9197 /* This is variably modified if some field's type is. */
9198 for (t
= TYPE_FIELDS (type
); t
; t
= DECL_CHAIN (t
))
9199 if (TREE_CODE (t
) == FIELD_DECL
)
9201 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t
));
9202 RETURN_TRUE_IF_VAR (DECL_SIZE (t
));
9203 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t
));
9205 if (TREE_CODE (type
) == QUAL_UNION_TYPE
)
9206 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t
));
9211 /* Do not call ourselves to avoid infinite recursion. This is
9212 variably modified if the element type is. */
9213 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type
)));
9214 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type
)));
9221 /* The current language may have other cases to check, but in general,
9222 all other types are not variably modified. */
9223 return lang_hooks
.tree_inlining
.var_mod_type_p (type
, fn
);
9225 #undef RETURN_TRUE_IF_VAR
9228 /* Given a DECL or TYPE, return the scope in which it was declared, or
9229 NULL_TREE if there is no containing scope. */
9232 get_containing_scope (const_tree t
)
9234 return (TYPE_P (t
) ? TYPE_CONTEXT (t
) : DECL_CONTEXT (t
));
9237 /* Return the innermost context enclosing DECL that is
9238 a FUNCTION_DECL, or zero if none. */
9241 decl_function_context (const_tree decl
)
9245 if (TREE_CODE (decl
) == ERROR_MARK
)
9248 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9249 where we look up the function at runtime. Such functions always take
9250 a first argument of type 'pointer to real context'.
9252 C++ should really be fixed to use DECL_CONTEXT for the real context,
9253 and use something else for the "virtual context". */
9254 else if (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_VINDEX (decl
))
9257 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl
)))));
9259 context
= DECL_CONTEXT (decl
);
9261 while (context
&& TREE_CODE (context
) != FUNCTION_DECL
)
9263 if (TREE_CODE (context
) == BLOCK
)
9264 context
= BLOCK_SUPERCONTEXT (context
);
9266 context
= get_containing_scope (context
);
9272 /* Return the innermost context enclosing DECL that is
9273 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9274 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9277 decl_type_context (const_tree decl
)
9279 tree context
= DECL_CONTEXT (decl
);
9282 switch (TREE_CODE (context
))
9284 case NAMESPACE_DECL
:
9285 case TRANSLATION_UNIT_DECL
:
9290 case QUAL_UNION_TYPE
:
9295 context
= DECL_CONTEXT (context
);
9299 context
= BLOCK_SUPERCONTEXT (context
);
9309 /* CALL is a CALL_EXPR. Return the declaration for the function
9310 called, or NULL_TREE if the called function cannot be
9314 get_callee_fndecl (const_tree call
)
9318 if (call
== error_mark_node
)
9319 return error_mark_node
;
9321 /* It's invalid to call this function with anything but a
9323 gcc_assert (TREE_CODE (call
) == CALL_EXPR
);
9325 /* The first operand to the CALL is the address of the function
9327 addr
= CALL_EXPR_FN (call
);
9329 /* If there is no function, return early. */
9330 if (addr
== NULL_TREE
)
9335 /* If this is a readonly function pointer, extract its initial value. */
9336 if (DECL_P (addr
) && TREE_CODE (addr
) != FUNCTION_DECL
9337 && TREE_READONLY (addr
) && ! TREE_THIS_VOLATILE (addr
)
9338 && DECL_INITIAL (addr
))
9339 addr
= DECL_INITIAL (addr
);
9341 /* If the address is just `&f' for some function `f', then we know
9342 that `f' is being called. */
9343 if (TREE_CODE (addr
) == ADDR_EXPR
9344 && TREE_CODE (TREE_OPERAND (addr
, 0)) == FUNCTION_DECL
)
9345 return TREE_OPERAND (addr
, 0);
9347 /* We couldn't figure out what was being called. */
9351 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
9352 return the associated function code, otherwise return CFN_LAST. */
9355 get_call_combined_fn (const_tree call
)
9357 /* It's invalid to call this function with anything but a CALL_EXPR. */
9358 gcc_assert (TREE_CODE (call
) == CALL_EXPR
);
9360 if (!CALL_EXPR_FN (call
))
9361 return as_combined_fn (CALL_EXPR_IFN (call
));
9363 tree fndecl
= get_callee_fndecl (call
);
9364 if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
9365 return as_combined_fn (DECL_FUNCTION_CODE (fndecl
));
9370 #define TREE_MEM_USAGE_SPACES 40
9372 /* Print debugging information about tree nodes generated during the compile,
9373 and any language-specific information. */
9376 dump_tree_statistics (void)
9378 if (GATHER_STATISTICS
)
9381 int total_nodes
, total_bytes
;
9382 fprintf (stderr
, "\nKind Nodes Bytes\n");
9383 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
9384 total_nodes
= total_bytes
= 0;
9385 for (i
= 0; i
< (int) all_kinds
; i
++)
9387 fprintf (stderr
, "%-20s %7d %10d\n", tree_node_kind_names
[i
],
9388 tree_node_counts
[i
], tree_node_sizes
[i
]);
9389 total_nodes
+= tree_node_counts
[i
];
9390 total_bytes
+= tree_node_sizes
[i
];
9392 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
9393 fprintf (stderr
, "%-20s %7d %10d\n", "Total", total_nodes
, total_bytes
);
9394 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
9395 fprintf (stderr
, "Code Nodes\n");
9396 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
9397 for (i
= 0; i
< (int) MAX_TREE_CODES
; i
++)
9398 fprintf (stderr
, "%-32s %7d\n", get_tree_code_name ((enum tree_code
) i
),
9399 tree_code_counts
[i
]);
9400 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES
);
9401 fprintf (stderr
, "\n");
9402 ssanames_print_statistics ();
9403 fprintf (stderr
, "\n");
9404 phinodes_print_statistics ();
9405 fprintf (stderr
, "\n");
9408 fprintf (stderr
, "(No per-node statistics)\n");
9410 print_type_hash_statistics ();
9411 print_debug_expr_statistics ();
9412 print_value_expr_statistics ();
9413 lang_hooks
.print_statistics ();
9416 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9418 /* Generate a crc32 of a byte. */
9421 crc32_unsigned_bits (unsigned chksum
, unsigned value
, unsigned bits
)
9425 for (ix
= bits
; ix
--; value
<<= 1)
9429 feedback
= (value
^ chksum
) & 0x80000000 ? 0x04c11db7 : 0;
9436 /* Generate a crc32 of a 32-bit unsigned. */
9439 crc32_unsigned (unsigned chksum
, unsigned value
)
9441 return crc32_unsigned_bits (chksum
, value
, 32);
9444 /* Generate a crc32 of a byte. */
9447 crc32_byte (unsigned chksum
, char byte
)
9449 return crc32_unsigned_bits (chksum
, (unsigned) byte
<< 24, 8);
9452 /* Generate a crc32 of a string. */
9455 crc32_string (unsigned chksum
, const char *string
)
9459 chksum
= crc32_byte (chksum
, *string
);
9465 /* P is a string that will be used in a symbol. Mask out any characters
9466 that are not valid in that context. */
9469 clean_symbol_name (char *p
)
9473 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9476 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9483 /* For anonymous aggregate types, we need some sort of name to
9484 hold on to. In practice, this should not appear, but it should
9485 not be harmful if it does. */
9487 anon_aggrname_p(const_tree id_node
)
9489 #ifndef NO_DOT_IN_LABEL
9490 return (IDENTIFIER_POINTER (id_node
)[0] == '.'
9491 && IDENTIFIER_POINTER (id_node
)[1] == '_');
9492 #else /* NO_DOT_IN_LABEL */
9493 #ifndef NO_DOLLAR_IN_LABEL
9494 return (IDENTIFIER_POINTER (id_node
)[0] == '$' \
9495 && IDENTIFIER_POINTER (id_node
)[1] == '_');
9496 #else /* NO_DOLLAR_IN_LABEL */
9497 #define ANON_AGGRNAME_PREFIX "__anon_"
9498 return (!strncmp (IDENTIFIER_POINTER (id_node
), ANON_AGGRNAME_PREFIX
,
9499 sizeof (ANON_AGGRNAME_PREFIX
) - 1));
9500 #endif /* NO_DOLLAR_IN_LABEL */
9501 #endif /* NO_DOT_IN_LABEL */
9504 /* Return a format for an anonymous aggregate name. */
9506 anon_aggrname_format()
9508 #ifndef NO_DOT_IN_LABEL
9510 #else /* NO_DOT_IN_LABEL */
9511 #ifndef NO_DOLLAR_IN_LABEL
9513 #else /* NO_DOLLAR_IN_LABEL */
9515 #endif /* NO_DOLLAR_IN_LABEL */
9516 #endif /* NO_DOT_IN_LABEL */
9519 /* Generate a name for a special-purpose function.
9520 The generated name may need to be unique across the whole link.
9521 Changes to this function may also require corresponding changes to
9522 xstrdup_mask_random.
9523 TYPE is some string to identify the purpose of this function to the
9524 linker or collect2; it must start with an uppercase letter,
9526 I - for constructors
9528 N - for C++ anonymous namespaces
9529 F - for DWARF unwind frame information. */
9532 get_file_function_name (const char *type
)
9538 /* If we already have a name we know to be unique, just use that. */
9539 if (first_global_object_name
)
9540 p
= q
= ASTRDUP (first_global_object_name
);
9541 /* If the target is handling the constructors/destructors, they
9542 will be local to this file and the name is only necessary for
9544 We also assign sub_I and sub_D sufixes to constructors called from
9545 the global static constructors. These are always local. */
9546 else if (((type
[0] == 'I' || type
[0] == 'D') && targetm
.have_ctors_dtors
)
9547 || (strncmp (type
, "sub_", 4) == 0
9548 && (type
[4] == 'I' || type
[4] == 'D')))
9550 const char *file
= main_input_filename
;
9552 file
= LOCATION_FILE (input_location
);
9553 /* Just use the file's basename, because the full pathname
9554 might be quite long. */
9555 p
= q
= ASTRDUP (lbasename (file
));
9559 /* Otherwise, the name must be unique across the entire link.
9560 We don't have anything that we know to be unique to this translation
9561 unit, so use what we do have and throw in some randomness. */
9563 const char *name
= weak_global_object_name
;
9564 const char *file
= main_input_filename
;
9569 file
= LOCATION_FILE (input_location
);
9571 len
= strlen (file
);
9572 q
= (char *) alloca (9 + 17 + len
+ 1);
9573 memcpy (q
, file
, len
+ 1);
9575 snprintf (q
+ len
, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX
,
9576 crc32_string (0, name
), get_random_seed (false));
9581 clean_symbol_name (q
);
9582 buf
= (char *) alloca (sizeof (FILE_FUNCTION_FORMAT
) + strlen (p
)
9585 /* Set up the name of the file-level functions we may need.
9586 Use a global object (which is already required to be unique over
9587 the program) rather than the file name (which imposes extra
9589 sprintf (buf
, FILE_FUNCTION_FORMAT
, type
, p
);
9591 return get_identifier (buf
);
9594 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9596 /* Complain that the tree code of NODE does not match the expected 0
9597 terminated list of trailing codes. The trailing code list can be
9598 empty, for a more vague error message. FILE, LINE, and FUNCTION
9599 are of the caller. */
9602 tree_check_failed (const_tree node
, const char *file
,
9603 int line
, const char *function
, ...)
9607 unsigned length
= 0;
9608 enum tree_code code
;
9610 va_start (args
, function
);
9611 while ((code
= (enum tree_code
) va_arg (args
, int)))
9612 length
+= 4 + strlen (get_tree_code_name (code
));
9617 va_start (args
, function
);
9618 length
+= strlen ("expected ");
9619 buffer
= tmp
= (char *) alloca (length
);
9621 while ((code
= (enum tree_code
) va_arg (args
, int)))
9623 const char *prefix
= length
? " or " : "expected ";
9625 strcpy (tmp
+ length
, prefix
);
9626 length
+= strlen (prefix
);
9627 strcpy (tmp
+ length
, get_tree_code_name (code
));
9628 length
+= strlen (get_tree_code_name (code
));
9633 buffer
= "unexpected node";
9635 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9636 buffer
, get_tree_code_name (TREE_CODE (node
)),
9637 function
, trim_filename (file
), line
);
9640 /* Complain that the tree code of NODE does match the expected 0
9641 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9645 tree_not_check_failed (const_tree node
, const char *file
,
9646 int line
, const char *function
, ...)
9650 unsigned length
= 0;
9651 enum tree_code code
;
9653 va_start (args
, function
);
9654 while ((code
= (enum tree_code
) va_arg (args
, int)))
9655 length
+= 4 + strlen (get_tree_code_name (code
));
9657 va_start (args
, function
);
9658 buffer
= (char *) alloca (length
);
9660 while ((code
= (enum tree_code
) va_arg (args
, int)))
9664 strcpy (buffer
+ length
, " or ");
9667 strcpy (buffer
+ length
, get_tree_code_name (code
));
9668 length
+= strlen (get_tree_code_name (code
));
9672 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9673 buffer
, get_tree_code_name (TREE_CODE (node
)),
9674 function
, trim_filename (file
), line
);
9677 /* Similar to tree_check_failed, except that we check for a class of tree
9678 code, given in CL. */
9681 tree_class_check_failed (const_tree node
, const enum tree_code_class cl
,
9682 const char *file
, int line
, const char *function
)
9685 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9686 TREE_CODE_CLASS_STRING (cl
),
9687 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node
))),
9688 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
9691 /* Similar to tree_check_failed, except that instead of specifying a
9692 dozen codes, use the knowledge that they're all sequential. */
9695 tree_range_check_failed (const_tree node
, const char *file
, int line
,
9696 const char *function
, enum tree_code c1
,
9700 unsigned length
= 0;
9703 for (c
= c1
; c
<= c2
; ++c
)
9704 length
+= 4 + strlen (get_tree_code_name ((enum tree_code
) c
));
9706 length
+= strlen ("expected ");
9707 buffer
= (char *) alloca (length
);
9710 for (c
= c1
; c
<= c2
; ++c
)
9712 const char *prefix
= length
? " or " : "expected ";
9714 strcpy (buffer
+ length
, prefix
);
9715 length
+= strlen (prefix
);
9716 strcpy (buffer
+ length
, get_tree_code_name ((enum tree_code
) c
));
9717 length
+= strlen (get_tree_code_name ((enum tree_code
) c
));
9720 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9721 buffer
, get_tree_code_name (TREE_CODE (node
)),
9722 function
, trim_filename (file
), line
);
9726 /* Similar to tree_check_failed, except that we check that a tree does
9727 not have the specified code, given in CL. */
9730 tree_not_class_check_failed (const_tree node
, const enum tree_code_class cl
,
9731 const char *file
, int line
, const char *function
)
9734 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9735 TREE_CODE_CLASS_STRING (cl
),
9736 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node
))),
9737 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
9741 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9744 omp_clause_check_failed (const_tree node
, const char *file
, int line
,
9745 const char *function
, enum omp_clause_code code
)
9747 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9748 omp_clause_code_name
[code
], get_tree_code_name (TREE_CODE (node
)),
9749 function
, trim_filename (file
), line
);
9753 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9756 omp_clause_range_check_failed (const_tree node
, const char *file
, int line
,
9757 const char *function
, enum omp_clause_code c1
,
9758 enum omp_clause_code c2
)
9761 unsigned length
= 0;
9764 for (c
= c1
; c
<= c2
; ++c
)
9765 length
+= 4 + strlen (omp_clause_code_name
[c
]);
9767 length
+= strlen ("expected ");
9768 buffer
= (char *) alloca (length
);
9771 for (c
= c1
; c
<= c2
; ++c
)
9773 const char *prefix
= length
? " or " : "expected ";
9775 strcpy (buffer
+ length
, prefix
);
9776 length
+= strlen (prefix
);
9777 strcpy (buffer
+ length
, omp_clause_code_name
[c
]);
9778 length
+= strlen (omp_clause_code_name
[c
]);
9781 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9782 buffer
, omp_clause_code_name
[TREE_CODE (node
)],
9783 function
, trim_filename (file
), line
);
9787 #undef DEFTREESTRUCT
9788 #define DEFTREESTRUCT(VAL, NAME) NAME,
9790 static const char *ts_enum_names
[] = {
9791 #include "treestruct.def"
9793 #undef DEFTREESTRUCT
9795 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9797 /* Similar to tree_class_check_failed, except that we check for
9798 whether CODE contains the tree structure identified by EN. */
9801 tree_contains_struct_check_failed (const_tree node
,
9802 const enum tree_node_structure_enum en
,
9803 const char *file
, int line
,
9804 const char *function
)
9807 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9809 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
9813 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9814 (dynamically sized) vector. */
9817 tree_int_cst_elt_check_failed (int idx
, int len
, const char *file
, int line
,
9818 const char *function
)
9821 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9822 idx
+ 1, len
, function
, trim_filename (file
), line
);
9825 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9826 (dynamically sized) vector. */
9829 tree_vec_elt_check_failed (int idx
, int len
, const char *file
, int line
,
9830 const char *function
)
9833 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9834 idx
+ 1, len
, function
, trim_filename (file
), line
);
9837 /* Similar to above, except that the check is for the bounds of the operand
9838 vector of an expression node EXP. */
9841 tree_operand_check_failed (int idx
, const_tree exp
, const char *file
,
9842 int line
, const char *function
)
9844 enum tree_code code
= TREE_CODE (exp
);
9846 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9847 idx
+ 1, get_tree_code_name (code
), TREE_OPERAND_LENGTH (exp
),
9848 function
, trim_filename (file
), line
);
9851 /* Similar to above, except that the check is for the number of
9852 operands of an OMP_CLAUSE node. */
9855 omp_clause_operand_check_failed (int idx
, const_tree t
, const char *file
,
9856 int line
, const char *function
)
9859 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9860 "in %s, at %s:%d", idx
+ 1, omp_clause_code_name
[OMP_CLAUSE_CODE (t
)],
9861 omp_clause_num_ops
[OMP_CLAUSE_CODE (t
)], function
,
9862 trim_filename (file
), line
);
9864 #endif /* ENABLE_TREE_CHECKING */
9866 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9867 and mapped to the machine mode MODE. Initialize its fields and build
9868 the information necessary for debugging output. */
9871 make_vector_type (tree innertype
, int nunits
, machine_mode mode
)
9874 inchash::hash hstate
;
9875 tree mv_innertype
= TYPE_MAIN_VARIANT (innertype
);
9877 t
= make_node (VECTOR_TYPE
);
9878 TREE_TYPE (t
) = mv_innertype
;
9879 SET_TYPE_VECTOR_SUBPARTS (t
, nunits
);
9880 SET_TYPE_MODE (t
, mode
);
9882 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype
) || in_lto_p
)
9883 SET_TYPE_STRUCTURAL_EQUALITY (t
);
9884 else if ((TYPE_CANONICAL (mv_innertype
) != innertype
9885 || mode
!= VOIDmode
)
9886 && !VECTOR_BOOLEAN_TYPE_P (t
))
9888 = make_vector_type (TYPE_CANONICAL (mv_innertype
), nunits
, VOIDmode
);
9892 hstate
.add_wide_int (VECTOR_TYPE
);
9893 hstate
.add_wide_int (nunits
);
9894 hstate
.add_wide_int (mode
);
9895 hstate
.add_object (TYPE_HASH (TREE_TYPE (t
)));
9896 t
= type_hash_canon (hstate
.end (), t
);
9898 /* We have built a main variant, based on the main variant of the
9899 inner type. Use it to build the variant we return. */
9900 if ((TYPE_ATTRIBUTES (innertype
) || TYPE_QUALS (innertype
))
9901 && TREE_TYPE (t
) != innertype
)
9902 return build_type_attribute_qual_variant (t
,
9903 TYPE_ATTRIBUTES (innertype
),
9904 TYPE_QUALS (innertype
));
9910 make_or_reuse_type (unsigned size
, int unsignedp
)
9914 if (size
== INT_TYPE_SIZE
)
9915 return unsignedp
? unsigned_type_node
: integer_type_node
;
9916 if (size
== CHAR_TYPE_SIZE
)
9917 return unsignedp
? unsigned_char_type_node
: signed_char_type_node
;
9918 if (size
== SHORT_TYPE_SIZE
)
9919 return unsignedp
? short_unsigned_type_node
: short_integer_type_node
;
9920 if (size
== LONG_TYPE_SIZE
)
9921 return unsignedp
? long_unsigned_type_node
: long_integer_type_node
;
9922 if (size
== LONG_LONG_TYPE_SIZE
)
9923 return (unsignedp
? long_long_unsigned_type_node
9924 : long_long_integer_type_node
);
9926 for (i
= 0; i
< NUM_INT_N_ENTS
; i
++)
9927 if (size
== int_n_data
[i
].bitsize
9928 && int_n_enabled_p
[i
])
9929 return (unsignedp
? int_n_trees
[i
].unsigned_type
9930 : int_n_trees
[i
].signed_type
);
9933 return make_unsigned_type (size
);
9935 return make_signed_type (size
);
9938 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9941 make_or_reuse_fract_type (unsigned size
, int unsignedp
, int satp
)
9945 if (size
== SHORT_FRACT_TYPE_SIZE
)
9946 return unsignedp
? sat_unsigned_short_fract_type_node
9947 : sat_short_fract_type_node
;
9948 if (size
== FRACT_TYPE_SIZE
)
9949 return unsignedp
? sat_unsigned_fract_type_node
: sat_fract_type_node
;
9950 if (size
== LONG_FRACT_TYPE_SIZE
)
9951 return unsignedp
? sat_unsigned_long_fract_type_node
9952 : sat_long_fract_type_node
;
9953 if (size
== LONG_LONG_FRACT_TYPE_SIZE
)
9954 return unsignedp
? sat_unsigned_long_long_fract_type_node
9955 : sat_long_long_fract_type_node
;
9959 if (size
== SHORT_FRACT_TYPE_SIZE
)
9960 return unsignedp
? unsigned_short_fract_type_node
9961 : short_fract_type_node
;
9962 if (size
== FRACT_TYPE_SIZE
)
9963 return unsignedp
? unsigned_fract_type_node
: fract_type_node
;
9964 if (size
== LONG_FRACT_TYPE_SIZE
)
9965 return unsignedp
? unsigned_long_fract_type_node
9966 : long_fract_type_node
;
9967 if (size
== LONG_LONG_FRACT_TYPE_SIZE
)
9968 return unsignedp
? unsigned_long_long_fract_type_node
9969 : long_long_fract_type_node
;
9972 return make_fract_type (size
, unsignedp
, satp
);
9975 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9978 make_or_reuse_accum_type (unsigned size
, int unsignedp
, int satp
)
9982 if (size
== SHORT_ACCUM_TYPE_SIZE
)
9983 return unsignedp
? sat_unsigned_short_accum_type_node
9984 : sat_short_accum_type_node
;
9985 if (size
== ACCUM_TYPE_SIZE
)
9986 return unsignedp
? sat_unsigned_accum_type_node
: sat_accum_type_node
;
9987 if (size
== LONG_ACCUM_TYPE_SIZE
)
9988 return unsignedp
? sat_unsigned_long_accum_type_node
9989 : sat_long_accum_type_node
;
9990 if (size
== LONG_LONG_ACCUM_TYPE_SIZE
)
9991 return unsignedp
? sat_unsigned_long_long_accum_type_node
9992 : sat_long_long_accum_type_node
;
9996 if (size
== SHORT_ACCUM_TYPE_SIZE
)
9997 return unsignedp
? unsigned_short_accum_type_node
9998 : short_accum_type_node
;
9999 if (size
== ACCUM_TYPE_SIZE
)
10000 return unsignedp
? unsigned_accum_type_node
: accum_type_node
;
10001 if (size
== LONG_ACCUM_TYPE_SIZE
)
10002 return unsignedp
? unsigned_long_accum_type_node
10003 : long_accum_type_node
;
10004 if (size
== LONG_LONG_ACCUM_TYPE_SIZE
)
10005 return unsignedp
? unsigned_long_long_accum_type_node
10006 : long_long_accum_type_node
;
10009 return make_accum_type (size
, unsignedp
, satp
);
10013 /* Create an atomic variant node for TYPE. This routine is called
10014 during initialization of data types to create the 5 basic atomic
10015 types. The generic build_variant_type function requires these to
10016 already be set up in order to function properly, so cannot be
10017 called from there. If ALIGN is non-zero, then ensure alignment is
10018 overridden to this value. */
10021 build_atomic_base (tree type
, unsigned int align
)
10025 /* Make sure its not already registered. */
10026 if ((t
= get_qualified_type (type
, TYPE_QUAL_ATOMIC
)))
10029 t
= build_variant_type_copy (type
);
10030 set_type_quals (t
, TYPE_QUAL_ATOMIC
);
10033 TYPE_ALIGN (t
) = align
;
10038 /* Create nodes for all integer types (and error_mark_node) using the sizes
10039 of C datatypes. SIGNED_CHAR specifies whether char is signed,
10040 SHORT_DOUBLE specifies whether double should be of the same precision
10044 build_common_tree_nodes (bool signed_char
, bool short_double
)
10048 error_mark_node
= make_node (ERROR_MARK
);
10049 TREE_TYPE (error_mark_node
) = error_mark_node
;
10051 initialize_sizetypes ();
10053 /* Define both `signed char' and `unsigned char'. */
10054 signed_char_type_node
= make_signed_type (CHAR_TYPE_SIZE
);
10055 TYPE_STRING_FLAG (signed_char_type_node
) = 1;
10056 unsigned_char_type_node
= make_unsigned_type (CHAR_TYPE_SIZE
);
10057 TYPE_STRING_FLAG (unsigned_char_type_node
) = 1;
10059 /* Define `char', which is like either `signed char' or `unsigned char'
10060 but not the same as either. */
10063 ? make_signed_type (CHAR_TYPE_SIZE
)
10064 : make_unsigned_type (CHAR_TYPE_SIZE
));
10065 TYPE_STRING_FLAG (char_type_node
) = 1;
10067 short_integer_type_node
= make_signed_type (SHORT_TYPE_SIZE
);
10068 short_unsigned_type_node
= make_unsigned_type (SHORT_TYPE_SIZE
);
10069 integer_type_node
= make_signed_type (INT_TYPE_SIZE
);
10070 unsigned_type_node
= make_unsigned_type (INT_TYPE_SIZE
);
10071 long_integer_type_node
= make_signed_type (LONG_TYPE_SIZE
);
10072 long_unsigned_type_node
= make_unsigned_type (LONG_TYPE_SIZE
);
10073 long_long_integer_type_node
= make_signed_type (LONG_LONG_TYPE_SIZE
);
10074 long_long_unsigned_type_node
= make_unsigned_type (LONG_LONG_TYPE_SIZE
);
10076 for (i
= 0; i
< NUM_INT_N_ENTS
; i
++)
10078 int_n_trees
[i
].signed_type
= make_signed_type (int_n_data
[i
].bitsize
);
10079 int_n_trees
[i
].unsigned_type
= make_unsigned_type (int_n_data
[i
].bitsize
);
10080 TYPE_SIZE (int_n_trees
[i
].signed_type
) = bitsize_int (int_n_data
[i
].bitsize
);
10081 TYPE_SIZE (int_n_trees
[i
].unsigned_type
) = bitsize_int (int_n_data
[i
].bitsize
);
10083 if (int_n_data
[i
].bitsize
> LONG_LONG_TYPE_SIZE
10084 && int_n_enabled_p
[i
])
10086 integer_types
[itk_intN_0
+ i
* 2] = int_n_trees
[i
].signed_type
;
10087 integer_types
[itk_unsigned_intN_0
+ i
* 2] = int_n_trees
[i
].unsigned_type
;
10091 /* Define a boolean type. This type only represents boolean values but
10092 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
10093 boolean_type_node
= make_unsigned_type (BOOL_TYPE_SIZE
);
10094 TREE_SET_CODE (boolean_type_node
, BOOLEAN_TYPE
);
10095 TYPE_PRECISION (boolean_type_node
) = 1;
10096 TYPE_MAX_VALUE (boolean_type_node
) = build_int_cst (boolean_type_node
, 1);
10098 /* Define what type to use for size_t. */
10099 if (strcmp (SIZE_TYPE
, "unsigned int") == 0)
10100 size_type_node
= unsigned_type_node
;
10101 else if (strcmp (SIZE_TYPE
, "long unsigned int") == 0)
10102 size_type_node
= long_unsigned_type_node
;
10103 else if (strcmp (SIZE_TYPE
, "long long unsigned int") == 0)
10104 size_type_node
= long_long_unsigned_type_node
;
10105 else if (strcmp (SIZE_TYPE
, "short unsigned int") == 0)
10106 size_type_node
= short_unsigned_type_node
;
10111 size_type_node
= NULL_TREE
;
10112 for (i
= 0; i
< NUM_INT_N_ENTS
; i
++)
10113 if (int_n_enabled_p
[i
])
10116 sprintf (name
, "__int%d unsigned", int_n_data
[i
].bitsize
);
10118 if (strcmp (name
, SIZE_TYPE
) == 0)
10120 size_type_node
= int_n_trees
[i
].unsigned_type
;
10123 if (size_type_node
== NULL_TREE
)
10124 gcc_unreachable ();
10127 /* Fill in the rest of the sized types. Reuse existing type nodes
10129 intQI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (QImode
), 0);
10130 intHI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (HImode
), 0);
10131 intSI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (SImode
), 0);
10132 intDI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (DImode
), 0);
10133 intTI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (TImode
), 0);
10135 unsigned_intQI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (QImode
), 1);
10136 unsigned_intHI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (HImode
), 1);
10137 unsigned_intSI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (SImode
), 1);
10138 unsigned_intDI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (DImode
), 1);
10139 unsigned_intTI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (TImode
), 1);
10141 /* Don't call build_qualified type for atomics. That routine does
10142 special processing for atomics, and until they are initialized
10143 it's better not to make that call.
10145 Check to see if there is a target override for atomic types. */
10147 atomicQI_type_node
= build_atomic_base (unsigned_intQI_type_node
,
10148 targetm
.atomic_align_for_mode (QImode
));
10149 atomicHI_type_node
= build_atomic_base (unsigned_intHI_type_node
,
10150 targetm
.atomic_align_for_mode (HImode
));
10151 atomicSI_type_node
= build_atomic_base (unsigned_intSI_type_node
,
10152 targetm
.atomic_align_for_mode (SImode
));
10153 atomicDI_type_node
= build_atomic_base (unsigned_intDI_type_node
,
10154 targetm
.atomic_align_for_mode (DImode
));
10155 atomicTI_type_node
= build_atomic_base (unsigned_intTI_type_node
,
10156 targetm
.atomic_align_for_mode (TImode
));
10158 access_public_node
= get_identifier ("public");
10159 access_protected_node
= get_identifier ("protected");
10160 access_private_node
= get_identifier ("private");
10162 /* Define these next since types below may used them. */
10163 integer_zero_node
= build_int_cst (integer_type_node
, 0);
10164 integer_one_node
= build_int_cst (integer_type_node
, 1);
10165 integer_three_node
= build_int_cst (integer_type_node
, 3);
10166 integer_minus_one_node
= build_int_cst (integer_type_node
, -1);
10168 size_zero_node
= size_int (0);
10169 size_one_node
= size_int (1);
10170 bitsize_zero_node
= bitsize_int (0);
10171 bitsize_one_node
= bitsize_int (1);
10172 bitsize_unit_node
= bitsize_int (BITS_PER_UNIT
);
10174 boolean_false_node
= TYPE_MIN_VALUE (boolean_type_node
);
10175 boolean_true_node
= TYPE_MAX_VALUE (boolean_type_node
);
10177 void_type_node
= make_node (VOID_TYPE
);
10178 layout_type (void_type_node
);
10180 pointer_bounds_type_node
= targetm
.chkp_bound_type ();
10182 /* We are not going to have real types in C with less than byte alignment,
10183 so we might as well not have any types that claim to have it. */
10184 TYPE_ALIGN (void_type_node
) = BITS_PER_UNIT
;
10185 TYPE_USER_ALIGN (void_type_node
) = 0;
10187 void_node
= make_node (VOID_CST
);
10188 TREE_TYPE (void_node
) = void_type_node
;
10190 null_pointer_node
= build_int_cst (build_pointer_type (void_type_node
), 0);
10191 layout_type (TREE_TYPE (null_pointer_node
));
10193 ptr_type_node
= build_pointer_type (void_type_node
);
10194 const_ptr_type_node
10195 = build_pointer_type (build_type_variant (void_type_node
, 1, 0));
10196 fileptr_type_node
= ptr_type_node
;
10198 pointer_sized_int_node
= build_nonstandard_integer_type (POINTER_SIZE
, 1);
10200 float_type_node
= make_node (REAL_TYPE
);
10201 TYPE_PRECISION (float_type_node
) = FLOAT_TYPE_SIZE
;
10202 layout_type (float_type_node
);
10204 double_type_node
= make_node (REAL_TYPE
);
10206 TYPE_PRECISION (double_type_node
) = FLOAT_TYPE_SIZE
;
10208 TYPE_PRECISION (double_type_node
) = DOUBLE_TYPE_SIZE
;
10209 layout_type (double_type_node
);
10211 long_double_type_node
= make_node (REAL_TYPE
);
10212 TYPE_PRECISION (long_double_type_node
) = LONG_DOUBLE_TYPE_SIZE
;
10213 layout_type (long_double_type_node
);
10215 float_ptr_type_node
= build_pointer_type (float_type_node
);
10216 double_ptr_type_node
= build_pointer_type (double_type_node
);
10217 long_double_ptr_type_node
= build_pointer_type (long_double_type_node
);
10218 integer_ptr_type_node
= build_pointer_type (integer_type_node
);
10220 /* Fixed size integer types. */
10221 uint16_type_node
= make_or_reuse_type (16, 1);
10222 uint32_type_node
= make_or_reuse_type (32, 1);
10223 uint64_type_node
= make_or_reuse_type (64, 1);
10225 /* Decimal float types. */
10226 dfloat32_type_node
= make_node (REAL_TYPE
);
10227 TYPE_PRECISION (dfloat32_type_node
) = DECIMAL32_TYPE_SIZE
;
10228 layout_type (dfloat32_type_node
);
10229 SET_TYPE_MODE (dfloat32_type_node
, SDmode
);
10230 dfloat32_ptr_type_node
= build_pointer_type (dfloat32_type_node
);
10232 dfloat64_type_node
= make_node (REAL_TYPE
);
10233 TYPE_PRECISION (dfloat64_type_node
) = DECIMAL64_TYPE_SIZE
;
10234 layout_type (dfloat64_type_node
);
10235 SET_TYPE_MODE (dfloat64_type_node
, DDmode
);
10236 dfloat64_ptr_type_node
= build_pointer_type (dfloat64_type_node
);
10238 dfloat128_type_node
= make_node (REAL_TYPE
);
10239 TYPE_PRECISION (dfloat128_type_node
) = DECIMAL128_TYPE_SIZE
;
10240 layout_type (dfloat128_type_node
);
10241 SET_TYPE_MODE (dfloat128_type_node
, TDmode
);
10242 dfloat128_ptr_type_node
= build_pointer_type (dfloat128_type_node
);
10244 complex_integer_type_node
= build_complex_type (integer_type_node
);
10245 complex_float_type_node
= build_complex_type (float_type_node
);
10246 complex_double_type_node
= build_complex_type (double_type_node
);
10247 complex_long_double_type_node
= build_complex_type (long_double_type_node
);
10249 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
10250 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10251 sat_ ## KIND ## _type_node = \
10252 make_sat_signed_ ## KIND ## _type (SIZE); \
10253 sat_unsigned_ ## KIND ## _type_node = \
10254 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10255 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10256 unsigned_ ## KIND ## _type_node = \
10257 make_unsigned_ ## KIND ## _type (SIZE);
10259 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10260 sat_ ## WIDTH ## KIND ## _type_node = \
10261 make_sat_signed_ ## KIND ## _type (SIZE); \
10262 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10263 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10264 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10265 unsigned_ ## WIDTH ## KIND ## _type_node = \
10266 make_unsigned_ ## KIND ## _type (SIZE);
10268 /* Make fixed-point type nodes based on four different widths. */
10269 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10270 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10271 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10272 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10273 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10275 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
10276 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10277 NAME ## _type_node = \
10278 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10279 u ## NAME ## _type_node = \
10280 make_or_reuse_unsigned_ ## KIND ## _type \
10281 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10282 sat_ ## NAME ## _type_node = \
10283 make_or_reuse_sat_signed_ ## KIND ## _type \
10284 (GET_MODE_BITSIZE (MODE ## mode)); \
10285 sat_u ## NAME ## _type_node = \
10286 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10287 (GET_MODE_BITSIZE (U ## MODE ## mode));
10289 /* Fixed-point type and mode nodes. */
10290 MAKE_FIXED_TYPE_NODE_FAMILY (fract
, FRACT
)
10291 MAKE_FIXED_TYPE_NODE_FAMILY (accum
, ACCUM
)
10292 MAKE_FIXED_MODE_NODE (fract
, qq
, QQ
)
10293 MAKE_FIXED_MODE_NODE (fract
, hq
, HQ
)
10294 MAKE_FIXED_MODE_NODE (fract
, sq
, SQ
)
10295 MAKE_FIXED_MODE_NODE (fract
, dq
, DQ
)
10296 MAKE_FIXED_MODE_NODE (fract
, tq
, TQ
)
10297 MAKE_FIXED_MODE_NODE (accum
, ha
, HA
)
10298 MAKE_FIXED_MODE_NODE (accum
, sa
, SA
)
10299 MAKE_FIXED_MODE_NODE (accum
, da
, DA
)
10300 MAKE_FIXED_MODE_NODE (accum
, ta
, TA
)
10303 tree t
= targetm
.build_builtin_va_list ();
10305 /* Many back-ends define record types without setting TYPE_NAME.
10306 If we copied the record type here, we'd keep the original
10307 record type without a name. This breaks name mangling. So,
10308 don't copy record types and let c_common_nodes_and_builtins()
10309 declare the type to be __builtin_va_list. */
10310 if (TREE_CODE (t
) != RECORD_TYPE
)
10311 t
= build_variant_type_copy (t
);
10313 va_list_type_node
= t
;
10317 /* Modify DECL for given flags.
10318 TM_PURE attribute is set only on types, so the function will modify
10319 DECL's type when ECF_TM_PURE is used. */
10322 set_call_expr_flags (tree decl
, int flags
)
10324 if (flags
& ECF_NOTHROW
)
10325 TREE_NOTHROW (decl
) = 1;
10326 if (flags
& ECF_CONST
)
10327 TREE_READONLY (decl
) = 1;
10328 if (flags
& ECF_PURE
)
10329 DECL_PURE_P (decl
) = 1;
10330 if (flags
& ECF_LOOPING_CONST_OR_PURE
)
10331 DECL_LOOPING_CONST_OR_PURE_P (decl
) = 1;
10332 if (flags
& ECF_NOVOPS
)
10333 DECL_IS_NOVOPS (decl
) = 1;
10334 if (flags
& ECF_NORETURN
)
10335 TREE_THIS_VOLATILE (decl
) = 1;
10336 if (flags
& ECF_MALLOC
)
10337 DECL_IS_MALLOC (decl
) = 1;
10338 if (flags
& ECF_RETURNS_TWICE
)
10339 DECL_IS_RETURNS_TWICE (decl
) = 1;
10340 if (flags
& ECF_LEAF
)
10341 DECL_ATTRIBUTES (decl
) = tree_cons (get_identifier ("leaf"),
10342 NULL
, DECL_ATTRIBUTES (decl
));
10343 if ((flags
& ECF_TM_PURE
) && flag_tm
)
10344 apply_tm_attr (decl
, get_identifier ("transaction_pure"));
10345 /* Looping const or pure is implied by noreturn.
10346 There is currently no way to declare looping const or looping pure alone. */
10347 gcc_assert (!(flags
& ECF_LOOPING_CONST_OR_PURE
)
10348 || ((flags
& ECF_NORETURN
) && (flags
& (ECF_CONST
| ECF_PURE
))));
10352 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10355 local_define_builtin (const char *name
, tree type
, enum built_in_function code
,
10356 const char *library_name
, int ecf_flags
)
10360 decl
= add_builtin_function (name
, type
, code
, BUILT_IN_NORMAL
,
10361 library_name
, NULL_TREE
);
10362 set_call_expr_flags (decl
, ecf_flags
);
10364 set_builtin_decl (code
, decl
, true);
10367 /* Call this function after instantiating all builtins that the language
10368 front end cares about. This will build the rest of the builtins
10369 and internal functions that are relied upon by the tree optimizers and
10373 build_common_builtin_nodes (void)
10378 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE
))
10380 ftype
= build_function_type (void_type_node
, void_list_node
);
10381 local_define_builtin ("__builtin_unreachable", ftype
, BUILT_IN_UNREACHABLE
,
10382 "__builtin_unreachable",
10383 ECF_NOTHROW
| ECF_LEAF
| ECF_NORETURN
10387 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY
)
10388 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE
))
10390 ftype
= build_function_type_list (ptr_type_node
,
10391 ptr_type_node
, const_ptr_type_node
,
10392 size_type_node
, NULL_TREE
);
10394 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY
))
10395 local_define_builtin ("__builtin_memcpy", ftype
, BUILT_IN_MEMCPY
,
10396 "memcpy", ECF_NOTHROW
| ECF_LEAF
);
10397 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE
))
10398 local_define_builtin ("__builtin_memmove", ftype
, BUILT_IN_MEMMOVE
,
10399 "memmove", ECF_NOTHROW
| ECF_LEAF
);
10402 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP
))
10404 ftype
= build_function_type_list (integer_type_node
, const_ptr_type_node
,
10405 const_ptr_type_node
, size_type_node
,
10407 local_define_builtin ("__builtin_memcmp", ftype
, BUILT_IN_MEMCMP
,
10408 "memcmp", ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
10411 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET
))
10413 ftype
= build_function_type_list (ptr_type_node
,
10414 ptr_type_node
, integer_type_node
,
10415 size_type_node
, NULL_TREE
);
10416 local_define_builtin ("__builtin_memset", ftype
, BUILT_IN_MEMSET
,
10417 "memset", ECF_NOTHROW
| ECF_LEAF
);
10420 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA
))
10422 ftype
= build_function_type_list (ptr_type_node
,
10423 size_type_node
, NULL_TREE
);
10424 local_define_builtin ("__builtin_alloca", ftype
, BUILT_IN_ALLOCA
,
10425 "alloca", ECF_MALLOC
| ECF_NOTHROW
| ECF_LEAF
);
10428 ftype
= build_function_type_list (ptr_type_node
, size_type_node
,
10429 size_type_node
, NULL_TREE
);
10430 local_define_builtin ("__builtin_alloca_with_align", ftype
,
10431 BUILT_IN_ALLOCA_WITH_ALIGN
,
10432 "__builtin_alloca_with_align",
10433 ECF_MALLOC
| ECF_NOTHROW
| ECF_LEAF
);
10435 /* If we're checking the stack, `alloca' can throw. */
10436 if (flag_stack_check
)
10438 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA
)) = 0;
10439 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
)) = 0;
10442 ftype
= build_function_type_list (void_type_node
,
10443 ptr_type_node
, ptr_type_node
,
10444 ptr_type_node
, NULL_TREE
);
10445 local_define_builtin ("__builtin_init_trampoline", ftype
,
10446 BUILT_IN_INIT_TRAMPOLINE
,
10447 "__builtin_init_trampoline", ECF_NOTHROW
| ECF_LEAF
);
10448 local_define_builtin ("__builtin_init_heap_trampoline", ftype
,
10449 BUILT_IN_INIT_HEAP_TRAMPOLINE
,
10450 "__builtin_init_heap_trampoline",
10451 ECF_NOTHROW
| ECF_LEAF
);
10453 ftype
= build_function_type_list (ptr_type_node
, ptr_type_node
, NULL_TREE
);
10454 local_define_builtin ("__builtin_adjust_trampoline", ftype
,
10455 BUILT_IN_ADJUST_TRAMPOLINE
,
10456 "__builtin_adjust_trampoline",
10457 ECF_CONST
| ECF_NOTHROW
);
10459 ftype
= build_function_type_list (void_type_node
,
10460 ptr_type_node
, ptr_type_node
, NULL_TREE
);
10461 local_define_builtin ("__builtin_nonlocal_goto", ftype
,
10462 BUILT_IN_NONLOCAL_GOTO
,
10463 "__builtin_nonlocal_goto",
10464 ECF_NORETURN
| ECF_NOTHROW
);
10466 ftype
= build_function_type_list (void_type_node
,
10467 ptr_type_node
, ptr_type_node
, NULL_TREE
);
10468 local_define_builtin ("__builtin_setjmp_setup", ftype
,
10469 BUILT_IN_SETJMP_SETUP
,
10470 "__builtin_setjmp_setup", ECF_NOTHROW
);
10472 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
10473 local_define_builtin ("__builtin_setjmp_receiver", ftype
,
10474 BUILT_IN_SETJMP_RECEIVER
,
10475 "__builtin_setjmp_receiver", ECF_NOTHROW
| ECF_LEAF
);
10477 ftype
= build_function_type_list (ptr_type_node
, NULL_TREE
);
10478 local_define_builtin ("__builtin_stack_save", ftype
, BUILT_IN_STACK_SAVE
,
10479 "__builtin_stack_save", ECF_NOTHROW
| ECF_LEAF
);
10481 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
10482 local_define_builtin ("__builtin_stack_restore", ftype
,
10483 BUILT_IN_STACK_RESTORE
,
10484 "__builtin_stack_restore", ECF_NOTHROW
| ECF_LEAF
);
10486 /* If there's a possibility that we might use the ARM EABI, build the
10487 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10488 if (targetm
.arm_eabi_unwinder
)
10490 ftype
= build_function_type_list (void_type_node
, NULL_TREE
);
10491 local_define_builtin ("__builtin_cxa_end_cleanup", ftype
,
10492 BUILT_IN_CXA_END_CLEANUP
,
10493 "__cxa_end_cleanup", ECF_NORETURN
| ECF_LEAF
);
10496 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
10497 local_define_builtin ("__builtin_unwind_resume", ftype
,
10498 BUILT_IN_UNWIND_RESUME
,
10499 ((targetm_common
.except_unwind_info (&global_options
)
10501 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10504 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS
) == NULL_TREE
)
10506 ftype
= build_function_type_list (ptr_type_node
, integer_type_node
,
10508 local_define_builtin ("__builtin_return_address", ftype
,
10509 BUILT_IN_RETURN_ADDRESS
,
10510 "__builtin_return_address",
10514 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER
)
10515 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT
))
10517 ftype
= build_function_type_list (void_type_node
, ptr_type_node
,
10518 ptr_type_node
, NULL_TREE
);
10519 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER
))
10520 local_define_builtin ("__cyg_profile_func_enter", ftype
,
10521 BUILT_IN_PROFILE_FUNC_ENTER
,
10522 "__cyg_profile_func_enter", 0);
10523 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT
))
10524 local_define_builtin ("__cyg_profile_func_exit", ftype
,
10525 BUILT_IN_PROFILE_FUNC_EXIT
,
10526 "__cyg_profile_func_exit", 0);
10529 /* The exception object and filter values from the runtime. The argument
10530 must be zero before exception lowering, i.e. from the front end. After
10531 exception lowering, it will be the region number for the exception
10532 landing pad. These functions are PURE instead of CONST to prevent
10533 them from being hoisted past the exception edge that will initialize
10534 its value in the landing pad. */
10535 ftype
= build_function_type_list (ptr_type_node
,
10536 integer_type_node
, NULL_TREE
);
10537 ecf_flags
= ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
;
10538 /* Only use TM_PURE if we have TM language support. */
10539 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1
))
10540 ecf_flags
|= ECF_TM_PURE
;
10541 local_define_builtin ("__builtin_eh_pointer", ftype
, BUILT_IN_EH_POINTER
,
10542 "__builtin_eh_pointer", ecf_flags
);
10544 tmp
= lang_hooks
.types
.type_for_mode (targetm
.eh_return_filter_mode (), 0);
10545 ftype
= build_function_type_list (tmp
, integer_type_node
, NULL_TREE
);
10546 local_define_builtin ("__builtin_eh_filter", ftype
, BUILT_IN_EH_FILTER
,
10547 "__builtin_eh_filter", ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
10549 ftype
= build_function_type_list (void_type_node
,
10550 integer_type_node
, integer_type_node
,
10552 local_define_builtin ("__builtin_eh_copy_values", ftype
,
10553 BUILT_IN_EH_COPY_VALUES
,
10554 "__builtin_eh_copy_values", ECF_NOTHROW
);
10556 /* Complex multiplication and division. These are handled as builtins
10557 rather than optabs because emit_library_call_value doesn't support
10558 complex. Further, we can do slightly better with folding these
10559 beasties if the real and complex parts of the arguments are separate. */
10563 for (mode
= MIN_MODE_COMPLEX_FLOAT
; mode
<= MAX_MODE_COMPLEX_FLOAT
; ++mode
)
10565 char mode_name_buf
[4], *q
;
10567 enum built_in_function mcode
, dcode
;
10568 tree type
, inner_type
;
10569 const char *prefix
= "__";
10571 if (targetm
.libfunc_gnu_prefix
)
10574 type
= lang_hooks
.types
.type_for_mode ((machine_mode
) mode
, 0);
10577 inner_type
= TREE_TYPE (type
);
10579 ftype
= build_function_type_list (type
, inner_type
, inner_type
,
10580 inner_type
, inner_type
, NULL_TREE
);
10582 mcode
= ((enum built_in_function
)
10583 (BUILT_IN_COMPLEX_MUL_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
10584 dcode
= ((enum built_in_function
)
10585 (BUILT_IN_COMPLEX_DIV_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
10587 for (p
= GET_MODE_NAME (mode
), q
= mode_name_buf
; *p
; p
++, q
++)
10591 built_in_names
[mcode
] = concat (prefix
, "mul", mode_name_buf
, "3",
10593 local_define_builtin (built_in_names
[mcode
], ftype
, mcode
,
10594 built_in_names
[mcode
],
10595 ECF_CONST
| ECF_NOTHROW
| ECF_LEAF
);
10597 built_in_names
[dcode
] = concat (prefix
, "div", mode_name_buf
, "3",
10599 local_define_builtin (built_in_names
[dcode
], ftype
, dcode
,
10600 built_in_names
[dcode
],
10601 ECF_CONST
| ECF_NOTHROW
| ECF_LEAF
);
10605 init_internal_fns ();
10608 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10611 If we requested a pointer to a vector, build up the pointers that
10612 we stripped off while looking for the inner type. Similarly for
10613 return values from functions.
10615 The argument TYPE is the top of the chain, and BOTTOM is the
10616 new type which we will point to. */
10619 reconstruct_complex_type (tree type
, tree bottom
)
10623 if (TREE_CODE (type
) == POINTER_TYPE
)
10625 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10626 outer
= build_pointer_type_for_mode (inner
, TYPE_MODE (type
),
10627 TYPE_REF_CAN_ALIAS_ALL (type
));
10629 else if (TREE_CODE (type
) == REFERENCE_TYPE
)
10631 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10632 outer
= build_reference_type_for_mode (inner
, TYPE_MODE (type
),
10633 TYPE_REF_CAN_ALIAS_ALL (type
));
10635 else if (TREE_CODE (type
) == ARRAY_TYPE
)
10637 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10638 outer
= build_array_type (inner
, TYPE_DOMAIN (type
));
10640 else if (TREE_CODE (type
) == FUNCTION_TYPE
)
10642 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10643 outer
= build_function_type (inner
, TYPE_ARG_TYPES (type
));
10645 else if (TREE_CODE (type
) == METHOD_TYPE
)
10647 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10648 /* The build_method_type_directly() routine prepends 'this' to argument list,
10649 so we must compensate by getting rid of it. */
10651 = build_method_type_directly
10652 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type
))),
10654 TREE_CHAIN (TYPE_ARG_TYPES (type
)));
10656 else if (TREE_CODE (type
) == OFFSET_TYPE
)
10658 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10659 outer
= build_offset_type (TYPE_OFFSET_BASETYPE (type
), inner
);
10664 return build_type_attribute_qual_variant (outer
, TYPE_ATTRIBUTES (type
),
10665 TYPE_QUALS (type
));
10668 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10671 build_vector_type_for_mode (tree innertype
, machine_mode mode
)
10675 switch (GET_MODE_CLASS (mode
))
10677 case MODE_VECTOR_INT
:
10678 case MODE_VECTOR_FLOAT
:
10679 case MODE_VECTOR_FRACT
:
10680 case MODE_VECTOR_UFRACT
:
10681 case MODE_VECTOR_ACCUM
:
10682 case MODE_VECTOR_UACCUM
:
10683 nunits
= GET_MODE_NUNITS (mode
);
10687 /* Check that there are no leftover bits. */
10688 gcc_assert (GET_MODE_BITSIZE (mode
)
10689 % TREE_INT_CST_LOW (TYPE_SIZE (innertype
)) == 0);
10691 nunits
= GET_MODE_BITSIZE (mode
)
10692 / TREE_INT_CST_LOW (TYPE_SIZE (innertype
));
10696 gcc_unreachable ();
10699 return make_vector_type (innertype
, nunits
, mode
);
10702 /* Similarly, but takes the inner type and number of units, which must be
10706 build_vector_type (tree innertype
, int nunits
)
10708 return make_vector_type (innertype
, nunits
, VOIDmode
);
10711 /* Build truth vector with specified length and number of units. */
10714 build_truth_vector_type (unsigned nunits
, unsigned vector_size
)
10716 machine_mode mask_mode
= targetm
.vectorize
.get_mask_mode (nunits
,
10719 gcc_assert (mask_mode
!= VOIDmode
);
10721 unsigned HOST_WIDE_INT vsize
;
10722 if (mask_mode
== BLKmode
)
10723 vsize
= vector_size
* BITS_PER_UNIT
;
10725 vsize
= GET_MODE_BITSIZE (mask_mode
);
10727 unsigned HOST_WIDE_INT esize
= vsize
/ nunits
;
10728 gcc_assert (esize
* nunits
== vsize
);
10730 tree bool_type
= build_nonstandard_boolean_type (esize
);
10732 return make_vector_type (bool_type
, nunits
, mask_mode
);
10735 /* Returns a vector type corresponding to a comparison of VECTYPE. */
10738 build_same_sized_truth_vector_type (tree vectype
)
10740 if (VECTOR_BOOLEAN_TYPE_P (vectype
))
10743 unsigned HOST_WIDE_INT size
= GET_MODE_SIZE (TYPE_MODE (vectype
));
10746 size
= tree_to_uhwi (TYPE_SIZE_UNIT (vectype
));
10748 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (vectype
), size
);
10751 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10754 build_opaque_vector_type (tree innertype
, int nunits
)
10756 tree t
= make_vector_type (innertype
, nunits
, VOIDmode
);
10758 /* We always build the non-opaque variant before the opaque one,
10759 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10760 cand
= TYPE_NEXT_VARIANT (t
);
10762 && TYPE_VECTOR_OPAQUE (cand
)
10763 && check_qualified_type (cand
, t
, TYPE_QUALS (t
)))
10765 /* Othewise build a variant type and make sure to queue it after
10766 the non-opaque type. */
10767 cand
= build_distinct_type_copy (t
);
10768 TYPE_VECTOR_OPAQUE (cand
) = true;
10769 TYPE_CANONICAL (cand
) = TYPE_CANONICAL (t
);
10770 TYPE_NEXT_VARIANT (cand
) = TYPE_NEXT_VARIANT (t
);
10771 TYPE_NEXT_VARIANT (t
) = cand
;
10772 TYPE_MAIN_VARIANT (cand
) = TYPE_MAIN_VARIANT (t
);
10777 /* Given an initializer INIT, return TRUE if INIT is zero or some
10778 aggregate of zeros. Otherwise return FALSE. */
10780 initializer_zerop (const_tree init
)
10786 switch (TREE_CODE (init
))
10789 return integer_zerop (init
);
10792 /* ??? Note that this is not correct for C4X float formats. There,
10793 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10794 negative exponent. */
10795 return real_zerop (init
)
10796 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init
));
10799 return fixed_zerop (init
);
10802 return integer_zerop (init
)
10803 || (real_zerop (init
)
10804 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init
)))
10805 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init
))));
10810 for (i
= 0; i
< VECTOR_CST_NELTS (init
); ++i
)
10811 if (!initializer_zerop (VECTOR_CST_ELT (init
, i
)))
10818 unsigned HOST_WIDE_INT idx
;
10820 if (TREE_CLOBBER_P (init
))
10822 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init
), idx
, elt
)
10823 if (!initializer_zerop (elt
))
10832 /* We need to loop through all elements to handle cases like
10833 "\0" and "\0foobar". */
10834 for (i
= 0; i
< TREE_STRING_LENGTH (init
); ++i
)
10835 if (TREE_STRING_POINTER (init
)[i
] != '\0')
10846 /* Check if vector VEC consists of all the equal elements and
10847 that the number of elements corresponds to the type of VEC.
10848 The function returns first element of the vector
10849 or NULL_TREE if the vector is not uniform. */
10851 uniform_vector_p (const_tree vec
)
10856 if (vec
== NULL_TREE
)
10859 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec
)));
10861 if (TREE_CODE (vec
) == VECTOR_CST
)
10863 first
= VECTOR_CST_ELT (vec
, 0);
10864 for (i
= 1; i
< VECTOR_CST_NELTS (vec
); ++i
)
10865 if (!operand_equal_p (first
, VECTOR_CST_ELT (vec
, i
), 0))
10871 else if (TREE_CODE (vec
) == CONSTRUCTOR
)
10873 first
= error_mark_node
;
10875 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec
), i
, t
)
10882 if (!operand_equal_p (first
, t
, 0))
10885 if (i
!= TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec
)))
10894 /* Build an empty statement at location LOC. */
10897 build_empty_stmt (location_t loc
)
10899 tree t
= build1 (NOP_EXPR
, void_type_node
, size_zero_node
);
10900 SET_EXPR_LOCATION (t
, loc
);
10905 /* Build an OpenMP clause with code CODE. LOC is the location of the
10909 build_omp_clause (location_t loc
, enum omp_clause_code code
)
10914 length
= omp_clause_num_ops
[code
];
10915 size
= (sizeof (struct tree_omp_clause
) + (length
- 1) * sizeof (tree
));
10917 record_node_allocation_statistics (OMP_CLAUSE
, size
);
10919 t
= (tree
) ggc_internal_alloc (size
);
10920 memset (t
, 0, size
);
10921 TREE_SET_CODE (t
, OMP_CLAUSE
);
10922 OMP_CLAUSE_SET_CODE (t
, code
);
10923 OMP_CLAUSE_LOCATION (t
) = loc
;
10928 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10929 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10930 Except for the CODE and operand count field, other storage for the
10931 object is initialized to zeros. */
10934 build_vl_exp_stat (enum tree_code code
, int len MEM_STAT_DECL
)
10937 int length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_exp
);
10939 gcc_assert (TREE_CODE_CLASS (code
) == tcc_vl_exp
);
10940 gcc_assert (len
>= 1);
10942 record_node_allocation_statistics (code
, length
);
10944 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
10946 TREE_SET_CODE (t
, code
);
10948 /* Can't use TREE_OPERAND to store the length because if checking is
10949 enabled, it will try to check the length before we store it. :-P */
10950 t
->exp
.operands
[0] = build_int_cst (sizetype
, len
);
10955 /* Helper function for build_call_* functions; build a CALL_EXPR with
10956 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10957 the argument slots. */
10960 build_call_1 (tree return_type
, tree fn
, int nargs
)
10964 t
= build_vl_exp (CALL_EXPR
, nargs
+ 3);
10965 TREE_TYPE (t
) = return_type
;
10966 CALL_EXPR_FN (t
) = fn
;
10967 CALL_EXPR_STATIC_CHAIN (t
) = NULL
;
10972 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10973 FN and a null static chain slot. NARGS is the number of call arguments
10974 which are specified as "..." arguments. */
10977 build_call_nary (tree return_type
, tree fn
, int nargs
, ...)
10981 va_start (args
, nargs
);
10982 ret
= build_call_valist (return_type
, fn
, nargs
, args
);
10987 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10988 FN and a null static chain slot. NARGS is the number of call arguments
10989 which are specified as a va_list ARGS. */
10992 build_call_valist (tree return_type
, tree fn
, int nargs
, va_list args
)
10997 t
= build_call_1 (return_type
, fn
, nargs
);
10998 for (i
= 0; i
< nargs
; i
++)
10999 CALL_EXPR_ARG (t
, i
) = va_arg (args
, tree
);
11000 process_call_operands (t
);
11004 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11005 FN and a null static chain slot. NARGS is the number of call arguments
11006 which are specified as a tree array ARGS. */
11009 build_call_array_loc (location_t loc
, tree return_type
, tree fn
,
11010 int nargs
, const tree
*args
)
11015 t
= build_call_1 (return_type
, fn
, nargs
);
11016 for (i
= 0; i
< nargs
; i
++)
11017 CALL_EXPR_ARG (t
, i
) = args
[i
];
11018 process_call_operands (t
);
11019 SET_EXPR_LOCATION (t
, loc
);
11023 /* Like build_call_array, but takes a vec. */
11026 build_call_vec (tree return_type
, tree fn
, vec
<tree
, va_gc
> *args
)
11031 ret
= build_call_1 (return_type
, fn
, vec_safe_length (args
));
11032 FOR_EACH_VEC_SAFE_ELT (args
, ix
, t
)
11033 CALL_EXPR_ARG (ret
, ix
) = t
;
11034 process_call_operands (ret
);
11038 /* Conveniently construct a function call expression. FNDECL names the
11039 function to be called and N arguments are passed in the array
11043 build_call_expr_loc_array (location_t loc
, tree fndecl
, int n
, tree
*argarray
)
11045 tree fntype
= TREE_TYPE (fndecl
);
11046 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
11048 return fold_build_call_array_loc (loc
, TREE_TYPE (fntype
), fn
, n
, argarray
);
11051 /* Conveniently construct a function call expression. FNDECL names the
11052 function to be called and the arguments are passed in the vector
11056 build_call_expr_loc_vec (location_t loc
, tree fndecl
, vec
<tree
, va_gc
> *vec
)
11058 return build_call_expr_loc_array (loc
, fndecl
, vec_safe_length (vec
),
11059 vec_safe_address (vec
));
11063 /* Conveniently construct a function call expression. FNDECL names the
11064 function to be called, N is the number of arguments, and the "..."
11065 parameters are the argument expressions. */
11068 build_call_expr_loc (location_t loc
, tree fndecl
, int n
, ...)
11071 tree
*argarray
= XALLOCAVEC (tree
, n
);
11075 for (i
= 0; i
< n
; i
++)
11076 argarray
[i
] = va_arg (ap
, tree
);
11078 return build_call_expr_loc_array (loc
, fndecl
, n
, argarray
);
11081 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11082 varargs macros aren't supported by all bootstrap compilers. */
11085 build_call_expr (tree fndecl
, int n
, ...)
11088 tree
*argarray
= XALLOCAVEC (tree
, n
);
11092 for (i
= 0; i
< n
; i
++)
11093 argarray
[i
] = va_arg (ap
, tree
);
11095 return build_call_expr_loc_array (UNKNOWN_LOCATION
, fndecl
, n
, argarray
);
11098 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
11099 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
11100 It will get gimplified later into an ordinary internal function. */
11103 build_call_expr_internal_loc_array (location_t loc
, internal_fn ifn
,
11104 tree type
, int n
, const tree
*args
)
11106 tree t
= build_call_1 (type
, NULL_TREE
, n
);
11107 for (int i
= 0; i
< n
; ++i
)
11108 CALL_EXPR_ARG (t
, i
) = args
[i
];
11109 SET_EXPR_LOCATION (t
, loc
);
11110 CALL_EXPR_IFN (t
) = ifn
;
11114 /* Build internal call expression. This is just like CALL_EXPR, except
11115 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
11116 internal function. */
11119 build_call_expr_internal_loc (location_t loc
, enum internal_fn ifn
,
11120 tree type
, int n
, ...)
11123 tree
*argarray
= XALLOCAVEC (tree
, n
);
11127 for (i
= 0; i
< n
; i
++)
11128 argarray
[i
] = va_arg (ap
, tree
);
11130 return build_call_expr_internal_loc_array (loc
, ifn
, type
, n
, argarray
);
11133 /* Return a function call to FN, if the target is guaranteed to support it,
11136 N is the number of arguments, passed in the "...", and TYPE is the
11137 type of the return value. */
11140 maybe_build_call_expr_loc (location_t loc
, combined_fn fn
, tree type
,
11144 tree
*argarray
= XALLOCAVEC (tree
, n
);
11148 for (i
= 0; i
< n
; i
++)
11149 argarray
[i
] = va_arg (ap
, tree
);
11151 if (internal_fn_p (fn
))
11153 internal_fn ifn
= as_internal_fn (fn
);
11154 if (direct_internal_fn_p (ifn
))
11156 tree_pair types
= direct_internal_fn_types (ifn
, type
, argarray
);
11157 if (!direct_internal_fn_supported_p (ifn
, types
,
11158 OPTIMIZE_FOR_BOTH
))
11161 return build_call_expr_internal_loc_array (loc
, ifn
, type
, n
, argarray
);
11165 tree fndecl
= builtin_decl_implicit (as_builtin_fn (fn
));
11168 return build_call_expr_loc_array (loc
, fndecl
, n
, argarray
);
11172 /* Create a new constant string literal and return a char* pointer to it.
11173 The STRING_CST value is the LEN characters at STR. */
11175 build_string_literal (int len
, const char *str
)
11177 tree t
, elem
, index
, type
;
11179 t
= build_string (len
, str
);
11180 elem
= build_type_variant (char_type_node
, 1, 0);
11181 index
= build_index_type (size_int (len
- 1));
11182 type
= build_array_type (elem
, index
);
11183 TREE_TYPE (t
) = type
;
11184 TREE_CONSTANT (t
) = 1;
11185 TREE_READONLY (t
) = 1;
11186 TREE_STATIC (t
) = 1;
11188 type
= build_pointer_type (elem
);
11189 t
= build1 (ADDR_EXPR
, type
,
11190 build4 (ARRAY_REF
, elem
,
11191 t
, integer_zero_node
, NULL_TREE
, NULL_TREE
));
11197 /* Return true if T (assumed to be a DECL) must be assigned a memory
11201 needs_to_live_in_memory (const_tree t
)
11203 return (TREE_ADDRESSABLE (t
)
11204 || is_global_var (t
)
11205 || (TREE_CODE (t
) == RESULT_DECL
11206 && !DECL_BY_REFERENCE (t
)
11207 && aggregate_value_p (t
, current_function_decl
)));
11210 /* Return value of a constant X and sign-extend it. */
11213 int_cst_value (const_tree x
)
11215 unsigned bits
= TYPE_PRECISION (TREE_TYPE (x
));
11216 unsigned HOST_WIDE_INT val
= TREE_INT_CST_LOW (x
);
11218 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11219 gcc_assert (cst_and_fits_in_hwi (x
));
11221 if (bits
< HOST_BITS_PER_WIDE_INT
)
11223 bool negative
= ((val
>> (bits
- 1)) & 1) != 0;
11225 val
|= (~(unsigned HOST_WIDE_INT
) 0) << (bits
- 1) << 1;
11227 val
&= ~((~(unsigned HOST_WIDE_INT
) 0) << (bits
- 1) << 1);
11233 /* If TYPE is an integral or pointer type, return an integer type with
11234 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11235 if TYPE is already an integer type of signedness UNSIGNEDP. */
11238 signed_or_unsigned_type_for (int unsignedp
, tree type
)
11240 if (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_UNSIGNED (type
) == unsignedp
)
11243 if (TREE_CODE (type
) == VECTOR_TYPE
)
11245 tree inner
= TREE_TYPE (type
);
11246 tree inner2
= signed_or_unsigned_type_for (unsignedp
, inner
);
11249 if (inner
== inner2
)
11251 return build_vector_type (inner2
, TYPE_VECTOR_SUBPARTS (type
));
11254 if (!INTEGRAL_TYPE_P (type
)
11255 && !POINTER_TYPE_P (type
)
11256 && TREE_CODE (type
) != OFFSET_TYPE
)
11259 return build_nonstandard_integer_type (TYPE_PRECISION (type
), unsignedp
);
11262 /* If TYPE is an integral or pointer type, return an integer type with
11263 the same precision which is unsigned, or itself if TYPE is already an
11264 unsigned integer type. */
11267 unsigned_type_for (tree type
)
11269 return signed_or_unsigned_type_for (1, type
);
11272 /* If TYPE is an integral or pointer type, return an integer type with
11273 the same precision which is signed, or itself if TYPE is already a
11274 signed integer type. */
11277 signed_type_for (tree type
)
11279 return signed_or_unsigned_type_for (0, type
);
11282 /* If TYPE is a vector type, return a signed integer vector type with the
11283 same width and number of subparts. Otherwise return boolean_type_node. */
11286 truth_type_for (tree type
)
11288 if (TREE_CODE (type
) == VECTOR_TYPE
)
11290 if (VECTOR_BOOLEAN_TYPE_P (type
))
11292 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (type
),
11293 GET_MODE_SIZE (TYPE_MODE (type
)));
11296 return boolean_type_node
;
11299 /* Returns the largest value obtainable by casting something in INNER type to
11303 upper_bound_in_type (tree outer
, tree inner
)
11305 unsigned int det
= 0;
11306 unsigned oprec
= TYPE_PRECISION (outer
);
11307 unsigned iprec
= TYPE_PRECISION (inner
);
11310 /* Compute a unique number for every combination. */
11311 det
|= (oprec
> iprec
) ? 4 : 0;
11312 det
|= TYPE_UNSIGNED (outer
) ? 2 : 0;
11313 det
|= TYPE_UNSIGNED (inner
) ? 1 : 0;
11315 /* Determine the exponent to use. */
11320 /* oprec <= iprec, outer: signed, inner: don't care. */
11325 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11329 /* oprec > iprec, outer: signed, inner: signed. */
11333 /* oprec > iprec, outer: signed, inner: unsigned. */
11337 /* oprec > iprec, outer: unsigned, inner: signed. */
11341 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11345 gcc_unreachable ();
11348 return wide_int_to_tree (outer
,
11349 wi::mask (prec
, false, TYPE_PRECISION (outer
)));
11352 /* Returns the smallest value obtainable by casting something in INNER type to
11356 lower_bound_in_type (tree outer
, tree inner
)
11358 unsigned oprec
= TYPE_PRECISION (outer
);
11359 unsigned iprec
= TYPE_PRECISION (inner
);
11361 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11363 if (TYPE_UNSIGNED (outer
)
11364 /* If we are widening something of an unsigned type, OUTER type
11365 contains all values of INNER type. In particular, both INNER
11366 and OUTER types have zero in common. */
11367 || (oprec
> iprec
&& TYPE_UNSIGNED (inner
)))
11368 return build_int_cst (outer
, 0);
11371 /* If we are widening a signed type to another signed type, we
11372 want to obtain -2^^(iprec-1). If we are keeping the
11373 precision or narrowing to a signed type, we want to obtain
11375 unsigned prec
= oprec
> iprec
? iprec
: oprec
;
11376 return wide_int_to_tree (outer
,
11377 wi::mask (prec
- 1, true,
11378 TYPE_PRECISION (outer
)));
11382 /* Return nonzero if two operands that are suitable for PHI nodes are
11383 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11384 SSA_NAME or invariant. Note that this is strictly an optimization.
11385 That is, callers of this function can directly call operand_equal_p
11386 and get the same result, only slower. */
11389 operand_equal_for_phi_arg_p (const_tree arg0
, const_tree arg1
)
11393 if (TREE_CODE (arg0
) == SSA_NAME
|| TREE_CODE (arg1
) == SSA_NAME
)
11395 return operand_equal_p (arg0
, arg1
, 0);
11398 /* Returns number of zeros at the end of binary representation of X. */
11401 num_ending_zeros (const_tree x
)
11403 return build_int_cst (TREE_TYPE (x
), wi::ctz (x
));
11407 #define WALK_SUBTREE(NODE) \
11410 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11416 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11417 be walked whenever a type is seen in the tree. Rest of operands and return
11418 value are as for walk_tree. */
11421 walk_type_fields (tree type
, walk_tree_fn func
, void *data
,
11422 hash_set
<tree
> *pset
, walk_tree_lh lh
)
11424 tree result
= NULL_TREE
;
11426 switch (TREE_CODE (type
))
11429 case REFERENCE_TYPE
:
11431 /* We have to worry about mutually recursive pointers. These can't
11432 be written in C. They can in Ada. It's pathological, but
11433 there's an ACATS test (c38102a) that checks it. Deal with this
11434 by checking if we're pointing to another pointer, that one
11435 points to another pointer, that one does too, and we have no htab.
11436 If so, get a hash table. We check three levels deep to avoid
11437 the cost of the hash table if we don't need one. */
11438 if (POINTER_TYPE_P (TREE_TYPE (type
))
11439 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type
)))
11440 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type
))))
11443 result
= walk_tree_without_duplicates (&TREE_TYPE (type
),
11451 /* ... fall through ... */
11454 WALK_SUBTREE (TREE_TYPE (type
));
11458 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type
));
11460 /* Fall through. */
11462 case FUNCTION_TYPE
:
11463 WALK_SUBTREE (TREE_TYPE (type
));
11467 /* We never want to walk into default arguments. */
11468 for (arg
= TYPE_ARG_TYPES (type
); arg
; arg
= TREE_CHAIN (arg
))
11469 WALK_SUBTREE (TREE_VALUE (arg
));
11474 /* Don't follow this nodes's type if a pointer for fear that
11475 we'll have infinite recursion. If we have a PSET, then we
11478 || (!POINTER_TYPE_P (TREE_TYPE (type
))
11479 && TREE_CODE (TREE_TYPE (type
)) != OFFSET_TYPE
))
11480 WALK_SUBTREE (TREE_TYPE (type
));
11481 WALK_SUBTREE (TYPE_DOMAIN (type
));
11485 WALK_SUBTREE (TREE_TYPE (type
));
11486 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type
));
11496 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11497 called with the DATA and the address of each sub-tree. If FUNC returns a
11498 non-NULL value, the traversal is stopped, and the value returned by FUNC
11499 is returned. If PSET is non-NULL it is used to record the nodes visited,
11500 and to avoid visiting a node more than once. */
11503 walk_tree_1 (tree
*tp
, walk_tree_fn func
, void *data
,
11504 hash_set
<tree
> *pset
, walk_tree_lh lh
)
11506 enum tree_code code
;
11510 #define WALK_SUBTREE_TAIL(NODE) \
11514 goto tail_recurse; \
11519 /* Skip empty subtrees. */
11523 /* Don't walk the same tree twice, if the user has requested
11524 that we avoid doing so. */
11525 if (pset
&& pset
->add (*tp
))
11528 /* Call the function. */
11530 result
= (*func
) (tp
, &walk_subtrees
, data
);
11532 /* If we found something, return it. */
11536 code
= TREE_CODE (*tp
);
11538 /* Even if we didn't, FUNC may have decided that there was nothing
11539 interesting below this point in the tree. */
11540 if (!walk_subtrees
)
11542 /* But we still need to check our siblings. */
11543 if (code
== TREE_LIST
)
11544 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp
));
11545 else if (code
== OMP_CLAUSE
)
11546 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11553 result
= (*lh
) (tp
, &walk_subtrees
, func
, data
, pset
);
11554 if (result
|| !walk_subtrees
)
11561 case IDENTIFIER_NODE
:
11568 case PLACEHOLDER_EXPR
:
11572 /* None of these have subtrees other than those already walked
11577 WALK_SUBTREE (TREE_VALUE (*tp
));
11578 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp
));
11583 int len
= TREE_VEC_LENGTH (*tp
);
11588 /* Walk all elements but the first. */
11590 WALK_SUBTREE (TREE_VEC_ELT (*tp
, len
));
11592 /* Now walk the first one as a tail call. */
11593 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp
, 0));
11597 WALK_SUBTREE (TREE_REALPART (*tp
));
11598 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp
));
11602 unsigned HOST_WIDE_INT idx
;
11603 constructor_elt
*ce
;
11605 for (idx
= 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp
), idx
, &ce
);
11607 WALK_SUBTREE (ce
->value
);
11612 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, 0));
11617 for (decl
= BIND_EXPR_VARS (*tp
); decl
; decl
= DECL_CHAIN (decl
))
11619 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11620 into declarations that are just mentioned, rather than
11621 declared; they don't really belong to this part of the tree.
11622 And, we can see cycles: the initializer for a declaration
11623 can refer to the declaration itself. */
11624 WALK_SUBTREE (DECL_INITIAL (decl
));
11625 WALK_SUBTREE (DECL_SIZE (decl
));
11626 WALK_SUBTREE (DECL_SIZE_UNIT (decl
));
11628 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp
));
11631 case STATEMENT_LIST
:
11633 tree_stmt_iterator i
;
11634 for (i
= tsi_start (*tp
); !tsi_end_p (i
); tsi_next (&i
))
11635 WALK_SUBTREE (*tsi_stmt_ptr (i
));
11640 switch (OMP_CLAUSE_CODE (*tp
))
11642 case OMP_CLAUSE_GANG
:
11643 case OMP_CLAUSE__GRIDDIM_
:
11644 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, 1));
11647 case OMP_CLAUSE_DEVICE_RESIDENT
:
11648 case OMP_CLAUSE_ASYNC
:
11649 case OMP_CLAUSE_WAIT
:
11650 case OMP_CLAUSE_WORKER
:
11651 case OMP_CLAUSE_VECTOR
:
11652 case OMP_CLAUSE_NUM_GANGS
:
11653 case OMP_CLAUSE_NUM_WORKERS
:
11654 case OMP_CLAUSE_VECTOR_LENGTH
:
11655 case OMP_CLAUSE_PRIVATE
:
11656 case OMP_CLAUSE_SHARED
:
11657 case OMP_CLAUSE_FIRSTPRIVATE
:
11658 case OMP_CLAUSE_COPYIN
:
11659 case OMP_CLAUSE_COPYPRIVATE
:
11660 case OMP_CLAUSE_FINAL
:
11661 case OMP_CLAUSE_IF
:
11662 case OMP_CLAUSE_NUM_THREADS
:
11663 case OMP_CLAUSE_SCHEDULE
:
11664 case OMP_CLAUSE_UNIFORM
:
11665 case OMP_CLAUSE_DEPEND
:
11666 case OMP_CLAUSE_NUM_TEAMS
:
11667 case OMP_CLAUSE_THREAD_LIMIT
:
11668 case OMP_CLAUSE_DEVICE
:
11669 case OMP_CLAUSE_DIST_SCHEDULE
:
11670 case OMP_CLAUSE_SAFELEN
:
11671 case OMP_CLAUSE_SIMDLEN
:
11672 case OMP_CLAUSE_ORDERED
:
11673 case OMP_CLAUSE_PRIORITY
:
11674 case OMP_CLAUSE_GRAINSIZE
:
11675 case OMP_CLAUSE_NUM_TASKS
:
11676 case OMP_CLAUSE_HINT
:
11677 case OMP_CLAUSE_TO_DECLARE
:
11678 case OMP_CLAUSE_LINK
:
11679 case OMP_CLAUSE_USE_DEVICE_PTR
:
11680 case OMP_CLAUSE_IS_DEVICE_PTR
:
11681 case OMP_CLAUSE__LOOPTEMP_
:
11682 case OMP_CLAUSE__SIMDUID_
:
11683 case OMP_CLAUSE__CILK_FOR_COUNT_
:
11684 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, 0));
11687 case OMP_CLAUSE_INDEPENDENT
:
11688 case OMP_CLAUSE_NOWAIT
:
11689 case OMP_CLAUSE_DEFAULT
:
11690 case OMP_CLAUSE_UNTIED
:
11691 case OMP_CLAUSE_MERGEABLE
:
11692 case OMP_CLAUSE_PROC_BIND
:
11693 case OMP_CLAUSE_INBRANCH
:
11694 case OMP_CLAUSE_NOTINBRANCH
:
11695 case OMP_CLAUSE_FOR
:
11696 case OMP_CLAUSE_PARALLEL
:
11697 case OMP_CLAUSE_SECTIONS
:
11698 case OMP_CLAUSE_TASKGROUP
:
11699 case OMP_CLAUSE_NOGROUP
:
11700 case OMP_CLAUSE_THREADS
:
11701 case OMP_CLAUSE_SIMD
:
11702 case OMP_CLAUSE_DEFAULTMAP
:
11703 case OMP_CLAUSE_AUTO
:
11704 case OMP_CLAUSE_SEQ
:
11705 case OMP_CLAUSE_TILE
:
11706 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11708 case OMP_CLAUSE_LASTPRIVATE
:
11709 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp
));
11710 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp
));
11711 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11713 case OMP_CLAUSE_COLLAPSE
:
11716 for (i
= 0; i
< 3; i
++)
11717 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, i
));
11718 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11721 case OMP_CLAUSE_LINEAR
:
11722 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp
));
11723 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp
));
11724 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp
));
11725 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11727 case OMP_CLAUSE_ALIGNED
:
11728 case OMP_CLAUSE_FROM
:
11729 case OMP_CLAUSE_TO
:
11730 case OMP_CLAUSE_MAP
:
11731 case OMP_CLAUSE__CACHE_
:
11732 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp
));
11733 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, 1));
11734 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11736 case OMP_CLAUSE_REDUCTION
:
11739 for (i
= 0; i
< 5; i
++)
11740 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, i
));
11741 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11745 gcc_unreachable ();
11753 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11754 But, we only want to walk once. */
11755 len
= (TREE_OPERAND (*tp
, 3) == TREE_OPERAND (*tp
, 1)) ? 2 : 3;
11756 for (i
= 0; i
< len
; ++i
)
11757 WALK_SUBTREE (TREE_OPERAND (*tp
, i
));
11758 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, len
));
11762 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11763 defining. We only want to walk into these fields of a type in this
11764 case and not in the general case of a mere reference to the type.
11766 The criterion is as follows: if the field can be an expression, it
11767 must be walked only here. This should be in keeping with the fields
11768 that are directly gimplified in gimplify_type_sizes in order for the
11769 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11770 variable-sized types.
11772 Note that DECLs get walked as part of processing the BIND_EXPR. */
11773 if (TREE_CODE (DECL_EXPR_DECL (*tp
)) == TYPE_DECL
)
11775 tree
*type_p
= &TREE_TYPE (DECL_EXPR_DECL (*tp
));
11776 if (TREE_CODE (*type_p
) == ERROR_MARK
)
11779 /* Call the function for the type. See if it returns anything or
11780 doesn't want us to continue. If we are to continue, walk both
11781 the normal fields and those for the declaration case. */
11782 result
= (*func
) (type_p
, &walk_subtrees
, data
);
11783 if (result
|| !walk_subtrees
)
11786 /* But do not walk a pointed-to type since it may itself need to
11787 be walked in the declaration case if it isn't anonymous. */
11788 if (!POINTER_TYPE_P (*type_p
))
11790 result
= walk_type_fields (*type_p
, func
, data
, pset
, lh
);
11795 /* If this is a record type, also walk the fields. */
11796 if (RECORD_OR_UNION_TYPE_P (*type_p
))
11800 for (field
= TYPE_FIELDS (*type_p
); field
;
11801 field
= DECL_CHAIN (field
))
11803 /* We'd like to look at the type of the field, but we can
11804 easily get infinite recursion. So assume it's pointed
11805 to elsewhere in the tree. Also, ignore things that
11807 if (TREE_CODE (field
) != FIELD_DECL
)
11810 WALK_SUBTREE (DECL_FIELD_OFFSET (field
));
11811 WALK_SUBTREE (DECL_SIZE (field
));
11812 WALK_SUBTREE (DECL_SIZE_UNIT (field
));
11813 if (TREE_CODE (*type_p
) == QUAL_UNION_TYPE
)
11814 WALK_SUBTREE (DECL_QUALIFIER (field
));
11818 /* Same for scalar types. */
11819 else if (TREE_CODE (*type_p
) == BOOLEAN_TYPE
11820 || TREE_CODE (*type_p
) == ENUMERAL_TYPE
11821 || TREE_CODE (*type_p
) == INTEGER_TYPE
11822 || TREE_CODE (*type_p
) == FIXED_POINT_TYPE
11823 || TREE_CODE (*type_p
) == REAL_TYPE
)
11825 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p
));
11826 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p
));
11829 WALK_SUBTREE (TYPE_SIZE (*type_p
));
11830 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p
));
11835 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
11839 /* Walk over all the sub-trees of this operand. */
11840 len
= TREE_OPERAND_LENGTH (*tp
);
11842 /* Go through the subtrees. We need to do this in forward order so
11843 that the scope of a FOR_EXPR is handled properly. */
11846 for (i
= 0; i
< len
- 1; ++i
)
11847 WALK_SUBTREE (TREE_OPERAND (*tp
, i
));
11848 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, len
- 1));
11851 /* If this is a type, walk the needed fields in the type. */
11852 else if (TYPE_P (*tp
))
11853 return walk_type_fields (*tp
, func
, data
, pset
, lh
);
11857 /* We didn't find what we were looking for. */
11860 #undef WALK_SUBTREE_TAIL
11862 #undef WALK_SUBTREE
11864 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11867 walk_tree_without_duplicates_1 (tree
*tp
, walk_tree_fn func
, void *data
,
11872 hash_set
<tree
> pset
;
11873 result
= walk_tree_1 (tp
, func
, data
, &pset
, lh
);
11879 tree_block (tree t
)
11881 const enum tree_code_class c
= TREE_CODE_CLASS (TREE_CODE (t
));
11883 if (IS_EXPR_CODE_CLASS (c
))
11884 return LOCATION_BLOCK (t
->exp
.locus
);
11885 gcc_unreachable ();
11890 tree_set_block (tree t
, tree b
)
11892 const enum tree_code_class c
= TREE_CODE_CLASS (TREE_CODE (t
));
11894 if (IS_EXPR_CODE_CLASS (c
))
11896 t
->exp
.locus
= set_block (t
->exp
.locus
, b
);
11899 gcc_unreachable ();
11902 /* Create a nameless artificial label and put it in the current
11903 function context. The label has a location of LOC. Returns the
11904 newly created label. */
11907 create_artificial_label (location_t loc
)
11909 tree lab
= build_decl (loc
,
11910 LABEL_DECL
, NULL_TREE
, void_type_node
);
11912 DECL_ARTIFICIAL (lab
) = 1;
11913 DECL_IGNORED_P (lab
) = 1;
11914 DECL_CONTEXT (lab
) = current_function_decl
;
11918 /* Given a tree, try to return a useful variable name that we can use
11919 to prefix a temporary that is being assigned the value of the tree.
11920 I.E. given <temp> = &A, return A. */
11925 tree stripped_decl
;
11928 STRIP_NOPS (stripped_decl
);
11929 if (DECL_P (stripped_decl
) && DECL_NAME (stripped_decl
))
11930 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl
));
11931 else if (TREE_CODE (stripped_decl
) == SSA_NAME
)
11933 tree name
= SSA_NAME_IDENTIFIER (stripped_decl
);
11936 return IDENTIFIER_POINTER (name
);
11940 switch (TREE_CODE (stripped_decl
))
11943 return get_name (TREE_OPERAND (stripped_decl
, 0));
11950 /* Return true if TYPE has a variable argument list. */
11953 stdarg_p (const_tree fntype
)
11955 function_args_iterator args_iter
;
11956 tree n
= NULL_TREE
, t
;
11961 FOREACH_FUNCTION_ARGS (fntype
, t
, args_iter
)
11966 return n
!= NULL_TREE
&& n
!= void_type_node
;
11969 /* Return true if TYPE has a prototype. */
11972 prototype_p (const_tree fntype
)
11976 gcc_assert (fntype
!= NULL_TREE
);
11978 t
= TYPE_ARG_TYPES (fntype
);
11979 return (t
!= NULL_TREE
);
11982 /* If BLOCK is inlined from an __attribute__((__artificial__))
11983 routine, return pointer to location from where it has been
11986 block_nonartificial_location (tree block
)
11988 location_t
*ret
= NULL
;
11990 while (block
&& TREE_CODE (block
) == BLOCK
11991 && BLOCK_ABSTRACT_ORIGIN (block
))
11993 tree ao
= BLOCK_ABSTRACT_ORIGIN (block
);
11995 while (TREE_CODE (ao
) == BLOCK
11996 && BLOCK_ABSTRACT_ORIGIN (ao
)
11997 && BLOCK_ABSTRACT_ORIGIN (ao
) != ao
)
11998 ao
= BLOCK_ABSTRACT_ORIGIN (ao
);
12000 if (TREE_CODE (ao
) == FUNCTION_DECL
)
12002 /* If AO is an artificial inline, point RET to the
12003 call site locus at which it has been inlined and continue
12004 the loop, in case AO's caller is also an artificial
12006 if (DECL_DECLARED_INLINE_P (ao
)
12007 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao
)))
12008 ret
= &BLOCK_SOURCE_LOCATION (block
);
12012 else if (TREE_CODE (ao
) != BLOCK
)
12015 block
= BLOCK_SUPERCONTEXT (block
);
12021 /* If EXP is inlined from an __attribute__((__artificial__))
12022 function, return the location of the original call expression. */
12025 tree_nonartificial_location (tree exp
)
12027 location_t
*loc
= block_nonartificial_location (TREE_BLOCK (exp
));
12032 return EXPR_LOCATION (exp
);
12036 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
12039 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
12042 cl_option_hasher::hash (tree x
)
12044 const_tree
const t
= x
;
12048 hashval_t hash
= 0;
12050 if (TREE_CODE (t
) == OPTIMIZATION_NODE
)
12052 p
= (const char *)TREE_OPTIMIZATION (t
);
12053 len
= sizeof (struct cl_optimization
);
12056 else if (TREE_CODE (t
) == TARGET_OPTION_NODE
)
12057 return cl_target_option_hash (TREE_TARGET_OPTION (t
));
12060 gcc_unreachable ();
12062 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
12064 for (i
= 0; i
< len
; i
++)
12066 hash
= (hash
<< 4) ^ ((i
<< 2) | p
[i
]);
12071 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
12072 TARGET_OPTION tree node) is the same as that given by *Y, which is the
12076 cl_option_hasher::equal (tree x
, tree y
)
12078 const_tree
const xt
= x
;
12079 const_tree
const yt
= y
;
12084 if (TREE_CODE (xt
) != TREE_CODE (yt
))
12087 if (TREE_CODE (xt
) == OPTIMIZATION_NODE
)
12089 xp
= (const char *)TREE_OPTIMIZATION (xt
);
12090 yp
= (const char *)TREE_OPTIMIZATION (yt
);
12091 len
= sizeof (struct cl_optimization
);
12094 else if (TREE_CODE (xt
) == TARGET_OPTION_NODE
)
12096 return cl_target_option_eq (TREE_TARGET_OPTION (xt
),
12097 TREE_TARGET_OPTION (yt
));
12101 gcc_unreachable ();
12103 return (memcmp (xp
, yp
, len
) == 0);
12106 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
12109 build_optimization_node (struct gcc_options
*opts
)
12113 /* Use the cache of optimization nodes. */
12115 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node
),
12118 tree
*slot
= cl_option_hash_table
->find_slot (cl_optimization_node
, INSERT
);
12122 /* Insert this one into the hash table. */
12123 t
= cl_optimization_node
;
12126 /* Make a new node for next time round. */
12127 cl_optimization_node
= make_node (OPTIMIZATION_NODE
);
12133 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
12136 build_target_option_node (struct gcc_options
*opts
)
12140 /* Use the cache of optimization nodes. */
12142 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node
),
12145 tree
*slot
= cl_option_hash_table
->find_slot (cl_target_option_node
, INSERT
);
12149 /* Insert this one into the hash table. */
12150 t
= cl_target_option_node
;
12153 /* Make a new node for next time round. */
12154 cl_target_option_node
= make_node (TARGET_OPTION_NODE
);
12160 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
12161 so that they aren't saved during PCH writing. */
12164 prepare_target_option_nodes_for_pch (void)
12166 hash_table
<cl_option_hasher
>::iterator iter
= cl_option_hash_table
->begin ();
12167 for (; iter
!= cl_option_hash_table
->end (); ++iter
)
12168 if (TREE_CODE (*iter
) == TARGET_OPTION_NODE
)
12169 TREE_TARGET_GLOBALS (*iter
) = NULL
;
12172 /* Determine the "ultimate origin" of a block. The block may be an inlined
12173 instance of an inlined instance of a block which is local to an inline
12174 function, so we have to trace all of the way back through the origin chain
12175 to find out what sort of node actually served as the original seed for the
12179 block_ultimate_origin (const_tree block
)
12181 tree immediate_origin
= BLOCK_ABSTRACT_ORIGIN (block
);
12183 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
12184 we're trying to output the abstract instance of this function. */
12185 if (BLOCK_ABSTRACT (block
) && immediate_origin
== block
)
12188 if (immediate_origin
== NULL_TREE
)
12193 tree lookahead
= immediate_origin
;
12197 ret_val
= lookahead
;
12198 lookahead
= (TREE_CODE (ret_val
) == BLOCK
12199 ? BLOCK_ABSTRACT_ORIGIN (ret_val
) : NULL
);
12201 while (lookahead
!= NULL
&& lookahead
!= ret_val
);
12203 /* The block's abstract origin chain may not be the *ultimate* origin of
12204 the block. It could lead to a DECL that has an abstract origin set.
12205 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
12206 will give us if it has one). Note that DECL's abstract origins are
12207 supposed to be the most distant ancestor (or so decl_ultimate_origin
12208 claims), so we don't need to loop following the DECL origins. */
12209 if (DECL_P (ret_val
))
12210 return DECL_ORIGIN (ret_val
);
12216 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12220 tree_nop_conversion_p (const_tree outer_type
, const_tree inner_type
)
12222 /* Use precision rather then machine mode when we can, which gives
12223 the correct answer even for submode (bit-field) types. */
12224 if ((INTEGRAL_TYPE_P (outer_type
)
12225 || POINTER_TYPE_P (outer_type
)
12226 || TREE_CODE (outer_type
) == OFFSET_TYPE
)
12227 && (INTEGRAL_TYPE_P (inner_type
)
12228 || POINTER_TYPE_P (inner_type
)
12229 || TREE_CODE (inner_type
) == OFFSET_TYPE
))
12230 return TYPE_PRECISION (outer_type
) == TYPE_PRECISION (inner_type
);
12232 /* Otherwise fall back on comparing machine modes (e.g. for
12233 aggregate types, floats). */
12234 return TYPE_MODE (outer_type
) == TYPE_MODE (inner_type
);
12237 /* Return true iff conversion in EXP generates no instruction. Mark
12238 it inline so that we fully inline into the stripping functions even
12239 though we have two uses of this function. */
12242 tree_nop_conversion (const_tree exp
)
12244 tree outer_type
, inner_type
;
12246 if (!CONVERT_EXPR_P (exp
)
12247 && TREE_CODE (exp
) != NON_LVALUE_EXPR
)
12249 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
12252 outer_type
= TREE_TYPE (exp
);
12253 inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
12258 return tree_nop_conversion_p (outer_type
, inner_type
);
12261 /* Return true iff conversion in EXP generates no instruction. Don't
12262 consider conversions changing the signedness. */
12265 tree_sign_nop_conversion (const_tree exp
)
12267 tree outer_type
, inner_type
;
12269 if (!tree_nop_conversion (exp
))
12272 outer_type
= TREE_TYPE (exp
);
12273 inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
12275 return (TYPE_UNSIGNED (outer_type
) == TYPE_UNSIGNED (inner_type
)
12276 && POINTER_TYPE_P (outer_type
) == POINTER_TYPE_P (inner_type
));
12279 /* Strip conversions from EXP according to tree_nop_conversion and
12280 return the resulting expression. */
12283 tree_strip_nop_conversions (tree exp
)
12285 while (tree_nop_conversion (exp
))
12286 exp
= TREE_OPERAND (exp
, 0);
12290 /* Strip conversions from EXP according to tree_sign_nop_conversion
12291 and return the resulting expression. */
12294 tree_strip_sign_nop_conversions (tree exp
)
12296 while (tree_sign_nop_conversion (exp
))
12297 exp
= TREE_OPERAND (exp
, 0);
12301 /* Avoid any floating point extensions from EXP. */
12303 strip_float_extensions (tree exp
)
12305 tree sub
, expt
, subt
;
12307 /* For floating point constant look up the narrowest type that can hold
12308 it properly and handle it like (type)(narrowest_type)constant.
12309 This way we can optimize for instance a=a*2.0 where "a" is float
12310 but 2.0 is double constant. */
12311 if (TREE_CODE (exp
) == REAL_CST
&& !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp
)))
12313 REAL_VALUE_TYPE orig
;
12316 orig
= TREE_REAL_CST (exp
);
12317 if (TYPE_PRECISION (TREE_TYPE (exp
)) > TYPE_PRECISION (float_type_node
)
12318 && exact_real_truncate (TYPE_MODE (float_type_node
), &orig
))
12319 type
= float_type_node
;
12320 else if (TYPE_PRECISION (TREE_TYPE (exp
))
12321 > TYPE_PRECISION (double_type_node
)
12322 && exact_real_truncate (TYPE_MODE (double_type_node
), &orig
))
12323 type
= double_type_node
;
12325 return build_real_truncate (type
, orig
);
12328 if (!CONVERT_EXPR_P (exp
))
12331 sub
= TREE_OPERAND (exp
, 0);
12332 subt
= TREE_TYPE (sub
);
12333 expt
= TREE_TYPE (exp
);
12335 if (!FLOAT_TYPE_P (subt
))
12338 if (DECIMAL_FLOAT_TYPE_P (expt
) != DECIMAL_FLOAT_TYPE_P (subt
))
12341 if (TYPE_PRECISION (subt
) > TYPE_PRECISION (expt
))
12344 return strip_float_extensions (sub
);
12347 /* Strip out all handled components that produce invariant
12351 strip_invariant_refs (const_tree op
)
12353 while (handled_component_p (op
))
12355 switch (TREE_CODE (op
))
12358 case ARRAY_RANGE_REF
:
12359 if (!is_gimple_constant (TREE_OPERAND (op
, 1))
12360 || TREE_OPERAND (op
, 2) != NULL_TREE
12361 || TREE_OPERAND (op
, 3) != NULL_TREE
)
12365 case COMPONENT_REF
:
12366 if (TREE_OPERAND (op
, 2) != NULL_TREE
)
12372 op
= TREE_OPERAND (op
, 0);
12378 static GTY(()) tree gcc_eh_personality_decl
;
12380 /* Return the GCC personality function decl. */
12383 lhd_gcc_personality (void)
12385 if (!gcc_eh_personality_decl
)
12386 gcc_eh_personality_decl
= build_personality_function ("gcc");
12387 return gcc_eh_personality_decl
;
12390 /* TARGET is a call target of GIMPLE call statement
12391 (obtained by gimple_call_fn). Return true if it is
12392 OBJ_TYPE_REF representing an virtual call of C++ method.
12393 (As opposed to OBJ_TYPE_REF representing objc calls
12394 through a cast where middle-end devirtualization machinery
12398 virtual_method_call_p (const_tree target
)
12400 if (TREE_CODE (target
) != OBJ_TYPE_REF
)
12402 tree t
= TREE_TYPE (target
);
12403 gcc_checking_assert (TREE_CODE (t
) == POINTER_TYPE
);
12405 if (TREE_CODE (t
) == FUNCTION_TYPE
)
12407 gcc_checking_assert (TREE_CODE (t
) == METHOD_TYPE
);
12408 /* If we do not have BINFO associated, it means that type was built
12409 without devirtualization enabled. Do not consider this a virtual
12411 if (!TYPE_BINFO (obj_type_ref_class (target
)))
12416 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
12419 obj_type_ref_class (const_tree ref
)
12421 gcc_checking_assert (TREE_CODE (ref
) == OBJ_TYPE_REF
);
12422 ref
= TREE_TYPE (ref
);
12423 gcc_checking_assert (TREE_CODE (ref
) == POINTER_TYPE
);
12424 ref
= TREE_TYPE (ref
);
12425 /* We look for type THIS points to. ObjC also builds
12426 OBJ_TYPE_REF with non-method calls, Their first parameter
12427 ID however also corresponds to class type. */
12428 gcc_checking_assert (TREE_CODE (ref
) == METHOD_TYPE
12429 || TREE_CODE (ref
) == FUNCTION_TYPE
);
12430 ref
= TREE_VALUE (TYPE_ARG_TYPES (ref
));
12431 gcc_checking_assert (TREE_CODE (ref
) == POINTER_TYPE
);
12432 return TREE_TYPE (ref
);
12435 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12438 lookup_binfo_at_offset (tree binfo
, tree type
, HOST_WIDE_INT pos
)
12441 tree base_binfo
, b
;
12443 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
12444 if (pos
== tree_to_shwi (BINFO_OFFSET (base_binfo
))
12445 && types_same_for_odr (TREE_TYPE (base_binfo
), type
))
12447 else if ((b
= lookup_binfo_at_offset (base_binfo
, type
, pos
)) != NULL
)
12452 /* Try to find a base info of BINFO that would have its field decl at offset
12453 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12454 found, return, otherwise return NULL_TREE. */
12457 get_binfo_at_offset (tree binfo
, HOST_WIDE_INT offset
, tree expected_type
)
12459 tree type
= BINFO_TYPE (binfo
);
12463 HOST_WIDE_INT pos
, size
;
12467 if (types_same_for_odr (type
, expected_type
))
12472 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
12474 if (TREE_CODE (fld
) != FIELD_DECL
|| !DECL_ARTIFICIAL (fld
))
12477 pos
= int_bit_position (fld
);
12478 size
= tree_to_uhwi (DECL_SIZE (fld
));
12479 if (pos
<= offset
&& (pos
+ size
) > offset
)
12482 if (!fld
|| TREE_CODE (TREE_TYPE (fld
)) != RECORD_TYPE
)
12485 /* Offset 0 indicates the primary base, whose vtable contents are
12486 represented in the binfo for the derived class. */
12487 else if (offset
!= 0)
12489 tree found_binfo
= NULL
, base_binfo
;
12490 /* Offsets in BINFO are in bytes relative to the whole structure
12491 while POS is in bits relative to the containing field. */
12492 int binfo_offset
= (tree_to_shwi (BINFO_OFFSET (binfo
)) + pos
12495 for (i
= 0; BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
12496 if (tree_to_shwi (BINFO_OFFSET (base_binfo
)) == binfo_offset
12497 && types_same_for_odr (TREE_TYPE (base_binfo
), TREE_TYPE (fld
)))
12499 found_binfo
= base_binfo
;
12503 binfo
= found_binfo
;
12505 binfo
= lookup_binfo_at_offset (binfo
, TREE_TYPE (fld
),
12509 type
= TREE_TYPE (fld
);
12514 /* Returns true if X is a typedef decl. */
12517 is_typedef_decl (const_tree x
)
12519 return (x
&& TREE_CODE (x
) == TYPE_DECL
12520 && DECL_ORIGINAL_TYPE (x
) != NULL_TREE
);
12523 /* Returns true iff TYPE is a type variant created for a typedef. */
12526 typedef_variant_p (const_tree type
)
12528 return is_typedef_decl (TYPE_NAME (type
));
12531 /* Warn about a use of an identifier which was marked deprecated. */
12533 warn_deprecated_use (tree node
, tree attr
)
12537 if (node
== 0 || !warn_deprecated_decl
)
12543 attr
= DECL_ATTRIBUTES (node
);
12544 else if (TYPE_P (node
))
12546 tree decl
= TYPE_STUB_DECL (node
);
12548 attr
= lookup_attribute ("deprecated",
12549 TYPE_ATTRIBUTES (TREE_TYPE (decl
)));
12554 attr
= lookup_attribute ("deprecated", attr
);
12557 msg
= TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
)));
12565 w
= warning (OPT_Wdeprecated_declarations
,
12566 "%qD is deprecated: %s", node
, msg
);
12568 w
= warning (OPT_Wdeprecated_declarations
,
12569 "%qD is deprecated", node
);
12571 inform (DECL_SOURCE_LOCATION (node
), "declared here");
12573 else if (TYPE_P (node
))
12575 tree what
= NULL_TREE
;
12576 tree decl
= TYPE_STUB_DECL (node
);
12578 if (TYPE_NAME (node
))
12580 if (TREE_CODE (TYPE_NAME (node
)) == IDENTIFIER_NODE
)
12581 what
= TYPE_NAME (node
);
12582 else if (TREE_CODE (TYPE_NAME (node
)) == TYPE_DECL
12583 && DECL_NAME (TYPE_NAME (node
)))
12584 what
= DECL_NAME (TYPE_NAME (node
));
12592 w
= warning (OPT_Wdeprecated_declarations
,
12593 "%qE is deprecated: %s", what
, msg
);
12595 w
= warning (OPT_Wdeprecated_declarations
,
12596 "%qE is deprecated", what
);
12601 w
= warning (OPT_Wdeprecated_declarations
,
12602 "type is deprecated: %s", msg
);
12604 w
= warning (OPT_Wdeprecated_declarations
,
12605 "type is deprecated");
12608 inform (DECL_SOURCE_LOCATION (decl
), "declared here");
12615 warning (OPT_Wdeprecated_declarations
, "%qE is deprecated: %s",
12618 warning (OPT_Wdeprecated_declarations
, "%qE is deprecated", what
);
12623 warning (OPT_Wdeprecated_declarations
, "type is deprecated: %s",
12626 warning (OPT_Wdeprecated_declarations
, "type is deprecated");
12632 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12633 somewhere in it. */
12636 contains_bitfld_component_ref_p (const_tree ref
)
12638 while (handled_component_p (ref
))
12640 if (TREE_CODE (ref
) == COMPONENT_REF
12641 && DECL_BIT_FIELD (TREE_OPERAND (ref
, 1)))
12643 ref
= TREE_OPERAND (ref
, 0);
12649 /* Try to determine whether a TRY_CATCH expression can fall through.
12650 This is a subroutine of block_may_fallthru. */
12653 try_catch_may_fallthru (const_tree stmt
)
12655 tree_stmt_iterator i
;
12657 /* If the TRY block can fall through, the whole TRY_CATCH can
12659 if (block_may_fallthru (TREE_OPERAND (stmt
, 0)))
12662 i
= tsi_start (TREE_OPERAND (stmt
, 1));
12663 switch (TREE_CODE (tsi_stmt (i
)))
12666 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12667 catch expression and a body. The whole TRY_CATCH may fall
12668 through iff any of the catch bodies falls through. */
12669 for (; !tsi_end_p (i
); tsi_next (&i
))
12671 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i
))))
12676 case EH_FILTER_EXPR
:
12677 /* The exception filter expression only matters if there is an
12678 exception. If the exception does not match EH_FILTER_TYPES,
12679 we will execute EH_FILTER_FAILURE, and we will fall through
12680 if that falls through. If the exception does match
12681 EH_FILTER_TYPES, the stack unwinder will continue up the
12682 stack, so we will not fall through. We don't know whether we
12683 will throw an exception which matches EH_FILTER_TYPES or not,
12684 so we just ignore EH_FILTER_TYPES and assume that we might
12685 throw an exception which doesn't match. */
12686 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i
)));
12689 /* This case represents statements to be executed when an
12690 exception occurs. Those statements are implicitly followed
12691 by a RESX statement to resume execution after the exception.
12692 So in this case the TRY_CATCH never falls through. */
12697 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12698 need not be 100% accurate; simply be conservative and return true if we
12699 don't know. This is used only to avoid stupidly generating extra code.
12700 If we're wrong, we'll just delete the extra code later. */
12703 block_may_fallthru (const_tree block
)
12705 /* This CONST_CAST is okay because expr_last returns its argument
12706 unmodified and we assign it to a const_tree. */
12707 const_tree stmt
= expr_last (CONST_CAST_TREE (block
));
12709 switch (stmt
? TREE_CODE (stmt
) : ERROR_MARK
)
12713 /* Easy cases. If the last statement of the block implies
12714 control transfer, then we can't fall through. */
12718 /* If SWITCH_LABELS is set, this is lowered, and represents a
12719 branch to a selected label and hence can not fall through.
12720 Otherwise SWITCH_BODY is set, and the switch can fall
12722 return SWITCH_LABELS (stmt
) == NULL_TREE
;
12725 if (block_may_fallthru (COND_EXPR_THEN (stmt
)))
12727 return block_may_fallthru (COND_EXPR_ELSE (stmt
));
12730 return block_may_fallthru (BIND_EXPR_BODY (stmt
));
12732 case TRY_CATCH_EXPR
:
12733 return try_catch_may_fallthru (stmt
);
12735 case TRY_FINALLY_EXPR
:
12736 /* The finally clause is always executed after the try clause,
12737 so if it does not fall through, then the try-finally will not
12738 fall through. Otherwise, if the try clause does not fall
12739 through, then when the finally clause falls through it will
12740 resume execution wherever the try clause was going. So the
12741 whole try-finally will only fall through if both the try
12742 clause and the finally clause fall through. */
12743 return (block_may_fallthru (TREE_OPERAND (stmt
, 0))
12744 && block_may_fallthru (TREE_OPERAND (stmt
, 1)));
12747 if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
)
12748 stmt
= TREE_OPERAND (stmt
, 1);
12754 /* Functions that do not return do not fall through. */
12755 return (call_expr_flags (stmt
) & ECF_NORETURN
) == 0;
12757 case CLEANUP_POINT_EXPR
:
12758 return block_may_fallthru (TREE_OPERAND (stmt
, 0));
12761 return block_may_fallthru (TREE_OPERAND (stmt
, 1));
12767 return lang_hooks
.block_may_fallthru (stmt
);
12771 /* True if we are using EH to handle cleanups. */
12772 static bool using_eh_for_cleanups_flag
= false;
12774 /* This routine is called from front ends to indicate eh should be used for
12777 using_eh_for_cleanups (void)
12779 using_eh_for_cleanups_flag
= true;
12782 /* Query whether EH is used for cleanups. */
12784 using_eh_for_cleanups_p (void)
12786 return using_eh_for_cleanups_flag
;
12789 /* Wrapper for tree_code_name to ensure that tree code is valid */
12791 get_tree_code_name (enum tree_code code
)
12793 const char *invalid
= "<invalid tree code>";
12795 if (code
>= MAX_TREE_CODES
)
12798 return tree_code_name
[code
];
12801 /* Drops the TREE_OVERFLOW flag from T. */
12804 drop_tree_overflow (tree t
)
12806 gcc_checking_assert (TREE_OVERFLOW (t
));
12808 /* For tree codes with a sharing machinery re-build the result. */
12809 if (TREE_CODE (t
) == INTEGER_CST
)
12810 return wide_int_to_tree (TREE_TYPE (t
), t
);
12812 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12813 and drop the flag. */
12815 TREE_OVERFLOW (t
) = 0;
12819 /* Given a memory reference expression T, return its base address.
12820 The base address of a memory reference expression is the main
12821 object being referenced. For instance, the base address for
12822 'array[i].fld[j]' is 'array'. You can think of this as stripping
12823 away the offset part from a memory address.
12825 This function calls handled_component_p to strip away all the inner
12826 parts of the memory reference until it reaches the base object. */
12829 get_base_address (tree t
)
12831 while (handled_component_p (t
))
12832 t
= TREE_OPERAND (t
, 0);
12834 if ((TREE_CODE (t
) == MEM_REF
12835 || TREE_CODE (t
) == TARGET_MEM_REF
)
12836 && TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
)
12837 t
= TREE_OPERAND (TREE_OPERAND (t
, 0), 0);
12839 /* ??? Either the alias oracle or all callers need to properly deal
12840 with WITH_SIZE_EXPRs before we can look through those. */
12841 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
12847 /* Return a tree of sizetype representing the size, in bytes, of the element
12848 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12851 array_ref_element_size (tree exp
)
12853 tree aligned_size
= TREE_OPERAND (exp
, 3);
12854 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
12855 location_t loc
= EXPR_LOCATION (exp
);
12857 /* If a size was specified in the ARRAY_REF, it's the size measured
12858 in alignment units of the element type. So multiply by that value. */
12861 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12862 sizetype from another type of the same width and signedness. */
12863 if (TREE_TYPE (aligned_size
) != sizetype
)
12864 aligned_size
= fold_convert_loc (loc
, sizetype
, aligned_size
);
12865 return size_binop_loc (loc
, MULT_EXPR
, aligned_size
,
12866 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
12869 /* Otherwise, take the size from that of the element type. Substitute
12870 any PLACEHOLDER_EXPR that we have. */
12872 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type
), exp
);
12875 /* Return a tree representing the lower bound of the array mentioned in
12876 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12879 array_ref_low_bound (tree exp
)
12881 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
12883 /* If a lower bound is specified in EXP, use it. */
12884 if (TREE_OPERAND (exp
, 2))
12885 return TREE_OPERAND (exp
, 2);
12887 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12888 substituting for a PLACEHOLDER_EXPR as needed. */
12889 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
12890 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type
), exp
);
12892 /* Otherwise, return a zero of the appropriate type. */
12893 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp
, 1)), 0);
12896 /* Return a tree representing the upper bound of the array mentioned in
12897 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12900 array_ref_up_bound (tree exp
)
12902 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
12904 /* If there is a domain type and it has an upper bound, use it, substituting
12905 for a PLACEHOLDER_EXPR as needed. */
12906 if (domain_type
&& TYPE_MAX_VALUE (domain_type
))
12907 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type
), exp
);
12909 /* Otherwise fail. */
12913 /* Returns true if REF is an array reference to an array at the end of
12914 a structure. If this is the case, the array may be allocated larger
12915 than its upper bound implies. */
12918 array_at_struct_end_p (tree ref
)
12920 if (TREE_CODE (ref
) != ARRAY_REF
12921 && TREE_CODE (ref
) != ARRAY_RANGE_REF
)
12924 while (handled_component_p (ref
))
12926 /* If the reference chain contains a component reference to a
12927 non-union type and there follows another field the reference
12928 is not at the end of a structure. */
12929 if (TREE_CODE (ref
) == COMPONENT_REF
12930 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref
, 0))) == RECORD_TYPE
)
12932 tree nextf
= DECL_CHAIN (TREE_OPERAND (ref
, 1));
12933 while (nextf
&& TREE_CODE (nextf
) != FIELD_DECL
)
12934 nextf
= DECL_CHAIN (nextf
);
12939 ref
= TREE_OPERAND (ref
, 0);
12942 /* If the reference is based on a declared entity, the size of the array
12943 is constrained by its given domain. */
12950 /* Return a tree representing the offset, in bytes, of the field referenced
12951 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12954 component_ref_field_offset (tree exp
)
12956 tree aligned_offset
= TREE_OPERAND (exp
, 2);
12957 tree field
= TREE_OPERAND (exp
, 1);
12958 location_t loc
= EXPR_LOCATION (exp
);
12960 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12961 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12963 if (aligned_offset
)
12965 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12966 sizetype from another type of the same width and signedness. */
12967 if (TREE_TYPE (aligned_offset
) != sizetype
)
12968 aligned_offset
= fold_convert_loc (loc
, sizetype
, aligned_offset
);
12969 return size_binop_loc (loc
, MULT_EXPR
, aligned_offset
,
12970 size_int (DECL_OFFSET_ALIGN (field
)
12974 /* Otherwise, take the offset from that of the field. Substitute
12975 any PLACEHOLDER_EXPR that we have. */
12977 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field
), exp
);
12980 /* Return the machine mode of T. For vectors, returns the mode of the
12981 inner type. The main use case is to feed the result to HONOR_NANS,
12982 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12985 element_mode (const_tree t
)
12989 if (VECTOR_TYPE_P (t
) || TREE_CODE (t
) == COMPLEX_TYPE
)
12991 return TYPE_MODE (t
);
12995 /* Veirfy that basic properties of T match TV and thus T can be a variant of
12996 TV. TV should be the more specified variant (i.e. the main variant). */
12999 verify_type_variant (const_tree t
, tree tv
)
13001 /* Type variant can differ by:
13003 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13004 ENCODE_QUAL_ADDR_SPACE.
13005 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13006 in this case some values may not be set in the variant types
13007 (see TYPE_COMPLETE_P checks).
13008 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13009 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13010 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13011 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13012 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13013 this is necessary to make it possible to merge types form different TUs
13014 - arrays, pointers and references may have TREE_TYPE that is a variant
13015 of TREE_TYPE of their main variants.
13016 - aggregates may have new TYPE_FIELDS list that list variants of
13017 the main variant TYPE_FIELDS.
13018 - vector types may differ by TYPE_VECTOR_OPAQUE
13019 - TYPE_METHODS is always NULL for vairant types and maintained for
13023 /* Convenience macro for matching individual fields. */
13024 #define verify_variant_match(flag) \
13026 if (flag (tv) != flag (t)) \
13028 error ("type variant differs by " #flag "."); \
13034 /* tree_base checks. */
13036 verify_variant_match (TREE_CODE
);
13037 /* FIXME: Ada builds non-artificial variants of artificial types. */
13038 if (TYPE_ARTIFICIAL (tv
) && 0)
13039 verify_variant_match (TYPE_ARTIFICIAL
);
13040 if (POINTER_TYPE_P (tv
))
13041 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL
);
13042 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13043 verify_variant_match (TYPE_UNSIGNED
);
13044 verify_variant_match (TYPE_ALIGN_OK
);
13045 verify_variant_match (TYPE_PACKED
);
13046 if (TREE_CODE (t
) == REFERENCE_TYPE
)
13047 verify_variant_match (TYPE_REF_IS_RVALUE
);
13048 if (AGGREGATE_TYPE_P (t
))
13049 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER
);
13051 verify_variant_match (TYPE_SATURATING
);
13052 /* FIXME: This check trigger during libstdc++ build. */
13053 if (RECORD_OR_UNION_TYPE_P (t
) && COMPLETE_TYPE_P (t
) && 0)
13054 verify_variant_match (TYPE_FINAL_P
);
13056 /* tree_type_common checks. */
13058 if (COMPLETE_TYPE_P (t
))
13060 verify_variant_match (TYPE_SIZE
);
13061 verify_variant_match (TYPE_MODE
);
13062 if (TYPE_SIZE_UNIT (t
) != TYPE_SIZE_UNIT (tv
)
13063 /* FIXME: ideally we should compare pointer equality, but java FE
13064 produce variants where size is INTEGER_CST of different type (int
13065 wrt size_type) during libjava biuld. */
13066 && !operand_equal_p (TYPE_SIZE_UNIT (t
), TYPE_SIZE_UNIT (tv
), 0))
13068 error ("type variant has different TYPE_SIZE_UNIT");
13070 error ("type variant's TYPE_SIZE_UNIT");
13071 debug_tree (TYPE_SIZE_UNIT (tv
));
13072 error ("type's TYPE_SIZE_UNIT");
13073 debug_tree (TYPE_SIZE_UNIT (t
));
13077 verify_variant_match (TYPE_PRECISION
);
13078 verify_variant_match (TYPE_NEEDS_CONSTRUCTING
);
13079 if (RECORD_OR_UNION_TYPE_P (t
))
13080 verify_variant_match (TYPE_TRANSPARENT_AGGR
);
13081 else if (TREE_CODE (t
) == ARRAY_TYPE
)
13082 verify_variant_match (TYPE_NONALIASED_COMPONENT
);
13083 /* During LTO we merge variant lists from diferent translation units
13084 that may differ BY TYPE_CONTEXT that in turn may point
13085 to TRANSLATION_UNIT_DECL.
13086 Ada also builds variants of types with different TYPE_CONTEXT. */
13087 if ((!in_lto_p
|| !TYPE_FILE_SCOPE_P (t
)) && 0)
13088 verify_variant_match (TYPE_CONTEXT
);
13089 verify_variant_match (TYPE_STRING_FLAG
);
13090 if (TYPE_ALIAS_SET_KNOWN_P (t
))
13092 error ("type variant with TYPE_ALIAS_SET_KNOWN_P");
13097 /* tree_type_non_common checks. */
13099 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13100 and dangle the pointer from time to time. */
13101 if (RECORD_OR_UNION_TYPE_P (t
) && TYPE_VFIELD (t
) != TYPE_VFIELD (tv
)
13102 && (in_lto_p
|| !TYPE_VFIELD (tv
)
13103 || TREE_CODE (TYPE_VFIELD (tv
)) != TREE_LIST
))
13105 error ("type variant has different TYPE_VFIELD");
13109 if ((TREE_CODE (t
) == ENUMERAL_TYPE
&& COMPLETE_TYPE_P (t
))
13110 || TREE_CODE (t
) == INTEGER_TYPE
13111 || TREE_CODE (t
) == BOOLEAN_TYPE
13112 || TREE_CODE (t
) == REAL_TYPE
13113 || TREE_CODE (t
) == FIXED_POINT_TYPE
)
13115 verify_variant_match (TYPE_MAX_VALUE
);
13116 verify_variant_match (TYPE_MIN_VALUE
);
13118 if (TREE_CODE (t
) == METHOD_TYPE
)
13119 verify_variant_match (TYPE_METHOD_BASETYPE
);
13120 if (RECORD_OR_UNION_TYPE_P (t
) && TYPE_METHODS (t
))
13122 error ("type variant has TYPE_METHODS");
13126 if (TREE_CODE (t
) == OFFSET_TYPE
)
13127 verify_variant_match (TYPE_OFFSET_BASETYPE
);
13128 if (TREE_CODE (t
) == ARRAY_TYPE
)
13129 verify_variant_match (TYPE_ARRAY_MAX_SIZE
);
13130 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13131 or even type's main variant. This is needed to make bootstrap pass
13132 and the bug seems new in GCC 5.
13133 C++ FE should be updated to make this consistent and we should check
13134 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13135 is a match with main variant.
13137 Also disable the check for Java for now because of parser hack that builds
13138 first an dummy BINFO and then sometimes replace it by real BINFO in some
13140 if (RECORD_OR_UNION_TYPE_P (t
) && TYPE_BINFO (t
) && TYPE_BINFO (tv
)
13141 && TYPE_BINFO (t
) != TYPE_BINFO (tv
)
13142 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13143 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13144 at LTO time only. */
13145 && (in_lto_p
&& odr_type_p (t
)))
13147 error ("type variant has different TYPE_BINFO");
13149 error ("type variant's TYPE_BINFO");
13150 debug_tree (TYPE_BINFO (tv
));
13151 error ("type's TYPE_BINFO");
13152 debug_tree (TYPE_BINFO (t
));
13156 /* Check various uses of TYPE_VALUES_RAW. */
13157 if (TREE_CODE (t
) == ENUMERAL_TYPE
)
13158 verify_variant_match (TYPE_VALUES
);
13159 else if (TREE_CODE (t
) == ARRAY_TYPE
)
13160 verify_variant_match (TYPE_DOMAIN
);
13161 /* Permit incomplete variants of complete type. While FEs may complete
13162 all variants, this does not happen for C++ templates in all cases. */
13163 else if (RECORD_OR_UNION_TYPE_P (t
)
13164 && COMPLETE_TYPE_P (t
)
13165 && TYPE_FIELDS (t
) != TYPE_FIELDS (tv
))
13169 /* Fortran builds qualified variants as new records with items of
13170 qualified type. Verify that they looks same. */
13171 for (f1
= TYPE_FIELDS (t
), f2
= TYPE_FIELDS (tv
);
13173 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
13174 if (TREE_CODE (f1
) != FIELD_DECL
|| TREE_CODE (f2
) != FIELD_DECL
13175 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1
))
13176 != TYPE_MAIN_VARIANT (TREE_TYPE (f2
))
13177 /* FIXME: gfc_nonrestricted_type builds all types as variants
13178 with exception of pointer types. It deeply copies the type
13179 which means that we may end up with a variant type
13180 referring non-variant pointer. We may change it to
13181 produce types as variants, too, like
13182 objc_get_protocol_qualified_type does. */
13183 && !POINTER_TYPE_P (TREE_TYPE (f1
)))
13184 || DECL_FIELD_OFFSET (f1
) != DECL_FIELD_OFFSET (f2
)
13185 || DECL_FIELD_BIT_OFFSET (f1
) != DECL_FIELD_BIT_OFFSET (f2
))
13189 error ("type variant has different TYPE_FIELDS");
13191 error ("first mismatch is field");
13193 error ("and field");
13198 else if ((TREE_CODE (t
) == FUNCTION_TYPE
|| TREE_CODE (t
) == METHOD_TYPE
))
13199 verify_variant_match (TYPE_ARG_TYPES
);
13200 /* For C++ the qualified variant of array type is really an array type
13201 of qualified TREE_TYPE.
13202 objc builds variants of pointer where pointer to type is a variant, too
13203 in objc_get_protocol_qualified_type. */
13204 if (TREE_TYPE (t
) != TREE_TYPE (tv
)
13205 && ((TREE_CODE (t
) != ARRAY_TYPE
13206 && !POINTER_TYPE_P (t
))
13207 || TYPE_MAIN_VARIANT (TREE_TYPE (t
))
13208 != TYPE_MAIN_VARIANT (TREE_TYPE (tv
))))
13210 error ("type variant has different TREE_TYPE");
13212 error ("type variant's TREE_TYPE");
13213 debug_tree (TREE_TYPE (tv
));
13214 error ("type's TREE_TYPE");
13215 debug_tree (TREE_TYPE (t
));
13218 if (type_with_alias_set_p (t
)
13219 && !gimple_canonical_types_compatible_p (t
, tv
, false))
13221 error ("type is not compatible with its vairant");
13223 error ("type variant's TREE_TYPE");
13224 debug_tree (TREE_TYPE (tv
));
13225 error ("type's TREE_TYPE");
13226 debug_tree (TREE_TYPE (t
));
13230 #undef verify_variant_match
13234 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13235 the middle-end types_compatible_p function. It needs to avoid
13236 claiming types are different for types that should be treated
13237 the same with respect to TBAA. Canonical types are also used
13238 for IL consistency checks via the useless_type_conversion_p
13239 predicate which does not handle all type kinds itself but falls
13240 back to pointer-comparison of TYPE_CANONICAL for aggregates
13243 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13244 type calculation because we need to allow inter-operability between signed
13245 and unsigned variants. */
13248 type_with_interoperable_signedness (const_tree type
)
13250 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13251 signed char and unsigned char. Similarly fortran FE builds
13252 C_SIZE_T as signed type, while C defines it unsigned. */
13254 return tree_code_for_canonical_type_merging (TREE_CODE (type
))
13256 && (TYPE_PRECISION (type
) == TYPE_PRECISION (signed_char_type_node
)
13257 || TYPE_PRECISION (type
) == TYPE_PRECISION (size_type_node
));
13260 /* Return true iff T1 and T2 are structurally identical for what
13262 This function is used both by lto.c canonical type merging and by the
13263 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13264 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13265 only for LTO because only in these cases TYPE_CANONICAL equivalence
13266 correspond to one defined by gimple_canonical_types_compatible_p. */
13269 gimple_canonical_types_compatible_p (const_tree t1
, const_tree t2
,
13270 bool trust_type_canonical
)
13272 /* Type variants should be same as the main variant. When not doing sanity
13273 checking to verify this fact, go to main variants and save some work. */
13274 if (trust_type_canonical
)
13276 t1
= TYPE_MAIN_VARIANT (t1
);
13277 t2
= TYPE_MAIN_VARIANT (t2
);
13280 /* Check first for the obvious case of pointer identity. */
13284 /* Check that we have two types to compare. */
13285 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
13288 /* We consider complete types always compatible with incomplete type.
13289 This does not make sense for canonical type calculation and thus we
13290 need to ensure that we are never called on it.
13292 FIXME: For more correctness the function probably should have three modes
13293 1) mode assuming that types are complete mathcing their structure
13294 2) mode allowing incomplete types but producing equivalence classes
13295 and thus ignoring all info from complete types
13296 3) mode allowing incomplete types to match complete but checking
13297 compatibility between complete types.
13299 1 and 2 can be used for canonical type calculation. 3 is the real
13300 definition of type compatibility that can be used i.e. for warnings during
13301 declaration merging. */
13303 gcc_assert (!trust_type_canonical
13304 || (type_with_alias_set_p (t1
) && type_with_alias_set_p (t2
)));
13305 /* If the types have been previously registered and found equal
13308 if (TYPE_CANONICAL (t1
) && TYPE_CANONICAL (t2
)
13309 && trust_type_canonical
)
13311 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13312 they are always NULL, but they are set to non-NULL for types
13313 constructed by build_pointer_type and variants. In this case the
13314 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13315 all pointers are considered equal. Be sure to not return false
13317 gcc_checking_assert (canonical_type_used_p (t1
)
13318 && canonical_type_used_p (t2
));
13319 return TYPE_CANONICAL (t1
) == TYPE_CANONICAL (t2
);
13322 /* Can't be the same type if the types don't have the same code. */
13323 enum tree_code code
= tree_code_for_canonical_type_merging (TREE_CODE (t1
));
13324 if (code
!= tree_code_for_canonical_type_merging (TREE_CODE (t2
)))
13327 /* Qualifiers do not matter for canonical type comparison purposes. */
13329 /* Void types and nullptr types are always the same. */
13330 if (TREE_CODE (t1
) == VOID_TYPE
13331 || TREE_CODE (t1
) == NULLPTR_TYPE
)
13334 /* Can't be the same type if they have different mode. */
13335 if (TYPE_MODE (t1
) != TYPE_MODE (t2
))
13338 /* Non-aggregate types can be handled cheaply. */
13339 if (INTEGRAL_TYPE_P (t1
)
13340 || SCALAR_FLOAT_TYPE_P (t1
)
13341 || FIXED_POINT_TYPE_P (t1
)
13342 || TREE_CODE (t1
) == VECTOR_TYPE
13343 || TREE_CODE (t1
) == COMPLEX_TYPE
13344 || TREE_CODE (t1
) == OFFSET_TYPE
13345 || POINTER_TYPE_P (t1
))
13347 /* Can't be the same type if they have different recision. */
13348 if (TYPE_PRECISION (t1
) != TYPE_PRECISION (t2
))
13351 /* In some cases the signed and unsigned types are required to be
13353 if (TYPE_UNSIGNED (t1
) != TYPE_UNSIGNED (t2
)
13354 && !type_with_interoperable_signedness (t1
))
13357 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13358 interoperable with "signed char". Unless all frontends are revisited
13359 to agree on these types, we must ignore the flag completely. */
13361 /* Fortran standard define C_PTR type that is compatible with every
13362 C pointer. For this reason we need to glob all pointers into one.
13363 Still pointers in different address spaces are not compatible. */
13364 if (POINTER_TYPE_P (t1
))
13366 if (TYPE_ADDR_SPACE (TREE_TYPE (t1
))
13367 != TYPE_ADDR_SPACE (TREE_TYPE (t2
)))
13371 /* Tail-recurse to components. */
13372 if (TREE_CODE (t1
) == VECTOR_TYPE
13373 || TREE_CODE (t1
) == COMPLEX_TYPE
)
13374 return gimple_canonical_types_compatible_p (TREE_TYPE (t1
),
13376 trust_type_canonical
);
13381 /* Do type-specific comparisons. */
13382 switch (TREE_CODE (t1
))
13385 /* Array types are the same if the element types are the same and
13386 the number of elements are the same. */
13387 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
),
13388 trust_type_canonical
)
13389 || TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
)
13390 || TYPE_REVERSE_STORAGE_ORDER (t1
) != TYPE_REVERSE_STORAGE_ORDER (t2
)
13391 || TYPE_NONALIASED_COMPONENT (t1
) != TYPE_NONALIASED_COMPONENT (t2
))
13395 tree i1
= TYPE_DOMAIN (t1
);
13396 tree i2
= TYPE_DOMAIN (t2
);
13398 /* For an incomplete external array, the type domain can be
13399 NULL_TREE. Check this condition also. */
13400 if (i1
== NULL_TREE
&& i2
== NULL_TREE
)
13402 else if (i1
== NULL_TREE
|| i2
== NULL_TREE
)
13406 tree min1
= TYPE_MIN_VALUE (i1
);
13407 tree min2
= TYPE_MIN_VALUE (i2
);
13408 tree max1
= TYPE_MAX_VALUE (i1
);
13409 tree max2
= TYPE_MAX_VALUE (i2
);
13411 /* The minimum/maximum values have to be the same. */
13414 && ((TREE_CODE (min1
) == PLACEHOLDER_EXPR
13415 && TREE_CODE (min2
) == PLACEHOLDER_EXPR
)
13416 || operand_equal_p (min1
, min2
, 0))))
13419 && ((TREE_CODE (max1
) == PLACEHOLDER_EXPR
13420 && TREE_CODE (max2
) == PLACEHOLDER_EXPR
)
13421 || operand_equal_p (max1
, max2
, 0)))))
13429 case FUNCTION_TYPE
:
13430 /* Function types are the same if the return type and arguments types
13432 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
),
13433 trust_type_canonical
))
13436 if (TYPE_ARG_TYPES (t1
) == TYPE_ARG_TYPES (t2
))
13440 tree parms1
, parms2
;
13442 for (parms1
= TYPE_ARG_TYPES (t1
), parms2
= TYPE_ARG_TYPES (t2
);
13444 parms1
= TREE_CHAIN (parms1
), parms2
= TREE_CHAIN (parms2
))
13446 if (!gimple_canonical_types_compatible_p
13447 (TREE_VALUE (parms1
), TREE_VALUE (parms2
),
13448 trust_type_canonical
))
13452 if (parms1
|| parms2
)
13460 case QUAL_UNION_TYPE
:
13464 /* Don't try to compare variants of an incomplete type, before
13465 TYPE_FIELDS has been copied around. */
13466 if (!COMPLETE_TYPE_P (t1
) && !COMPLETE_TYPE_P (t2
))
13470 if (TYPE_REVERSE_STORAGE_ORDER (t1
) != TYPE_REVERSE_STORAGE_ORDER (t2
))
13473 /* For aggregate types, all the fields must be the same. */
13474 for (f1
= TYPE_FIELDS (t1
), f2
= TYPE_FIELDS (t2
);
13476 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
13478 /* Skip non-fields. */
13479 while (f1
&& TREE_CODE (f1
) != FIELD_DECL
)
13480 f1
= TREE_CHAIN (f1
);
13481 while (f2
&& TREE_CODE (f2
) != FIELD_DECL
)
13482 f2
= TREE_CHAIN (f2
);
13485 /* The fields must have the same name, offset and type. */
13486 if (DECL_NONADDRESSABLE_P (f1
) != DECL_NONADDRESSABLE_P (f2
)
13487 || !gimple_compare_field_offset (f1
, f2
)
13488 || !gimple_canonical_types_compatible_p
13489 (TREE_TYPE (f1
), TREE_TYPE (f2
),
13490 trust_type_canonical
))
13494 /* If one aggregate has more fields than the other, they
13495 are not the same. */
13503 /* Consider all types with language specific trees in them mutually
13504 compatible. This is executed only from verify_type and false
13505 positives can be tolerated. */
13506 gcc_assert (!in_lto_p
);
13511 /* Verify type T. */
13514 verify_type (const_tree t
)
13516 bool error_found
= false;
13517 tree mv
= TYPE_MAIN_VARIANT (t
);
13520 error ("Main variant is not defined");
13521 error_found
= true;
13523 else if (mv
!= TYPE_MAIN_VARIANT (mv
))
13525 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
13527 error_found
= true;
13529 else if (t
!= mv
&& !verify_type_variant (t
, mv
))
13530 error_found
= true;
13532 tree ct
= TYPE_CANONICAL (t
);
13535 else if (TYPE_CANONICAL (t
) != ct
)
13537 error ("TYPE_CANONICAL has different TYPE_CANONICAL");
13539 error_found
= true;
13541 /* Method and function types can not be used to address memory and thus
13542 TYPE_CANONICAL really matters only for determining useless conversions.
13544 FIXME: C++ FE produce declarations of builtin functions that are not
13545 compatible with main variants. */
13546 else if (TREE_CODE (t
) == FUNCTION_TYPE
)
13549 /* FIXME: gimple_canonical_types_compatible_p can not compare types
13550 with variably sized arrays because their sizes possibly
13551 gimplified to different variables. */
13552 && !variably_modified_type_p (ct
, NULL
)
13553 && !gimple_canonical_types_compatible_p (t
, ct
, false))
13555 error ("TYPE_CANONICAL is not compatible");
13557 error_found
= true;
13560 if (COMPLETE_TYPE_P (t
) && TYPE_CANONICAL (t
)
13561 && TYPE_MODE (t
) != TYPE_MODE (TYPE_CANONICAL (t
)))
13563 error ("TYPE_MODE of TYPE_CANONICAL is not compatible");
13565 error_found
= true;
13567 if (TYPE_MAIN_VARIANT (t
) == t
&& ct
&& TYPE_MAIN_VARIANT (ct
) != ct
)
13569 error ("TYPE_CANONICAL of main variant is not main variant");
13571 debug_tree (TYPE_MAIN_VARIANT (ct
));
13572 error_found
= true;
13576 /* Check various uses of TYPE_MINVAL. */
13577 if (RECORD_OR_UNION_TYPE_P (t
))
13579 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13580 and danagle the pointer from time to time. */
13581 if (TYPE_VFIELD (t
)
13582 && TREE_CODE (TYPE_VFIELD (t
)) != FIELD_DECL
13583 && TREE_CODE (TYPE_VFIELD (t
)) != TREE_LIST
)
13585 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
13586 debug_tree (TYPE_VFIELD (t
));
13587 error_found
= true;
13590 else if (TREE_CODE (t
) == POINTER_TYPE
)
13592 if (TYPE_NEXT_PTR_TO (t
)
13593 && TREE_CODE (TYPE_NEXT_PTR_TO (t
)) != POINTER_TYPE
)
13595 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
13596 debug_tree (TYPE_NEXT_PTR_TO (t
));
13597 error_found
= true;
13600 else if (TREE_CODE (t
) == REFERENCE_TYPE
)
13602 if (TYPE_NEXT_REF_TO (t
)
13603 && TREE_CODE (TYPE_NEXT_REF_TO (t
)) != REFERENCE_TYPE
)
13605 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
13606 debug_tree (TYPE_NEXT_REF_TO (t
));
13607 error_found
= true;
13610 else if (INTEGRAL_TYPE_P (t
) || TREE_CODE (t
) == REAL_TYPE
13611 || TREE_CODE (t
) == FIXED_POINT_TYPE
)
13613 /* FIXME: The following check should pass:
13614 useless_type_conversion_p (const_cast <tree> (t),
13615 TREE_TYPE (TYPE_MIN_VALUE (t))
13616 but does not for C sizetypes in LTO. */
13618 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
13619 else if (TYPE_MINVAL (t
)
13620 && ((TREE_CODE (t
) != METHOD_TYPE
&& TREE_CODE (t
) != FUNCTION_TYPE
)
13623 error ("TYPE_MINVAL non-NULL");
13624 debug_tree (TYPE_MINVAL (t
));
13625 error_found
= true;
13628 /* Check various uses of TYPE_MAXVAL. */
13629 if (RECORD_OR_UNION_TYPE_P (t
))
13631 if (TYPE_METHODS (t
) && TREE_CODE (TYPE_METHODS (t
)) != FUNCTION_DECL
13632 && TREE_CODE (TYPE_METHODS (t
)) != TEMPLATE_DECL
13633 && TYPE_METHODS (t
) != error_mark_node
)
13635 error ("TYPE_METHODS is not FUNCTION_DECL, TEMPLATE_DECL nor error_mark_node");
13636 debug_tree (TYPE_METHODS (t
));
13637 error_found
= true;
13640 else if (TREE_CODE (t
) == FUNCTION_TYPE
|| TREE_CODE (t
) == METHOD_TYPE
)
13642 if (TYPE_METHOD_BASETYPE (t
)
13643 && TREE_CODE (TYPE_METHOD_BASETYPE (t
)) != RECORD_TYPE
13644 && TREE_CODE (TYPE_METHOD_BASETYPE (t
)) != UNION_TYPE
)
13646 error ("TYPE_METHOD_BASETYPE is not record nor union");
13647 debug_tree (TYPE_METHOD_BASETYPE (t
));
13648 error_found
= true;
13651 else if (TREE_CODE (t
) == OFFSET_TYPE
)
13653 if (TYPE_OFFSET_BASETYPE (t
)
13654 && TREE_CODE (TYPE_OFFSET_BASETYPE (t
)) != RECORD_TYPE
13655 && TREE_CODE (TYPE_OFFSET_BASETYPE (t
)) != UNION_TYPE
)
13657 error ("TYPE_OFFSET_BASETYPE is not record nor union");
13658 debug_tree (TYPE_OFFSET_BASETYPE (t
));
13659 error_found
= true;
13662 else if (INTEGRAL_TYPE_P (t
) || TREE_CODE (t
) == REAL_TYPE
13663 || TREE_CODE (t
) == FIXED_POINT_TYPE
)
13665 /* FIXME: The following check should pass:
13666 useless_type_conversion_p (const_cast <tree> (t),
13667 TREE_TYPE (TYPE_MAX_VALUE (t))
13668 but does not for C sizetypes in LTO. */
13670 else if (TREE_CODE (t
) == ARRAY_TYPE
)
13672 if (TYPE_ARRAY_MAX_SIZE (t
)
13673 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t
)) != INTEGER_CST
)
13675 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
13676 debug_tree (TYPE_ARRAY_MAX_SIZE (t
));
13677 error_found
= true;
13680 else if (TYPE_MAXVAL (t
))
13682 error ("TYPE_MAXVAL non-NULL");
13683 debug_tree (TYPE_MAXVAL (t
));
13684 error_found
= true;
13687 /* Check various uses of TYPE_BINFO. */
13688 if (RECORD_OR_UNION_TYPE_P (t
))
13690 if (!TYPE_BINFO (t
))
13692 else if (TREE_CODE (TYPE_BINFO (t
)) != TREE_BINFO
)
13694 error ("TYPE_BINFO is not TREE_BINFO");
13695 debug_tree (TYPE_BINFO (t
));
13696 error_found
= true;
13698 /* FIXME: Java builds invalid empty binfos that do not have
13700 else if (TREE_TYPE (TYPE_BINFO (t
)) != TYPE_MAIN_VARIANT (t
) && 0)
13702 error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT");
13703 debug_tree (TREE_TYPE (TYPE_BINFO (t
)));
13704 error_found
= true;
13707 else if (TYPE_LANG_SLOT_1 (t
) && in_lto_p
)
13709 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
13710 debug_tree (TYPE_LANG_SLOT_1 (t
));
13711 error_found
= true;
13714 /* Check various uses of TYPE_VALUES_RAW. */
13715 if (TREE_CODE (t
) == ENUMERAL_TYPE
)
13716 for (tree l
= TYPE_VALUES (t
); l
; l
= TREE_CHAIN (l
))
13718 tree value
= TREE_VALUE (l
);
13719 tree name
= TREE_PURPOSE (l
);
13721 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13722 CONST_DECL of ENUMERAL TYPE. */
13723 if (TREE_CODE (value
) != INTEGER_CST
&& TREE_CODE (value
) != CONST_DECL
)
13725 error ("Enum value is not CONST_DECL or INTEGER_CST");
13726 debug_tree (value
);
13728 error_found
= true;
13730 if (TREE_CODE (TREE_TYPE (value
)) != INTEGER_TYPE
13731 && !useless_type_conversion_p (const_cast <tree
> (t
), TREE_TYPE (value
)))
13733 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
13734 debug_tree (value
);
13736 error_found
= true;
13738 if (TREE_CODE (name
) != IDENTIFIER_NODE
)
13740 error ("Enum value name is not IDENTIFIER_NODE");
13741 debug_tree (value
);
13743 error_found
= true;
13746 else if (TREE_CODE (t
) == ARRAY_TYPE
)
13748 if (TYPE_DOMAIN (t
) && TREE_CODE (TYPE_DOMAIN (t
)) != INTEGER_TYPE
)
13750 error ("Array TYPE_DOMAIN is not integer type");
13751 debug_tree (TYPE_DOMAIN (t
));
13752 error_found
= true;
13755 else if (RECORD_OR_UNION_TYPE_P (t
))
13757 if (TYPE_FIELDS (t
) && !COMPLETE_TYPE_P (t
) && in_lto_p
)
13759 error ("TYPE_FIELDS defined in incomplete type");
13760 error_found
= true;
13762 for (tree fld
= TYPE_FIELDS (t
); fld
; fld
= TREE_CHAIN (fld
))
13764 /* TODO: verify properties of decls. */
13765 if (TREE_CODE (fld
) == FIELD_DECL
)
13767 else if (TREE_CODE (fld
) == TYPE_DECL
)
13769 else if (TREE_CODE (fld
) == CONST_DECL
)
13771 else if (TREE_CODE (fld
) == VAR_DECL
)
13773 else if (TREE_CODE (fld
) == TEMPLATE_DECL
)
13775 else if (TREE_CODE (fld
) == USING_DECL
)
13779 error ("Wrong tree in TYPE_FIELDS list");
13781 error_found
= true;
13785 else if (TREE_CODE (t
) == INTEGER_TYPE
13786 || TREE_CODE (t
) == BOOLEAN_TYPE
13787 || TREE_CODE (t
) == OFFSET_TYPE
13788 || TREE_CODE (t
) == REFERENCE_TYPE
13789 || TREE_CODE (t
) == NULLPTR_TYPE
13790 || TREE_CODE (t
) == POINTER_TYPE
)
13792 if (TYPE_CACHED_VALUES_P (t
) != (TYPE_CACHED_VALUES (t
) != NULL
))
13794 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
13795 TYPE_CACHED_VALUES_P (t
), (void *)TYPE_CACHED_VALUES (t
));
13796 error_found
= true;
13798 else if (TYPE_CACHED_VALUES_P (t
) && TREE_CODE (TYPE_CACHED_VALUES (t
)) != TREE_VEC
)
13800 error ("TYPE_CACHED_VALUES is not TREE_VEC");
13801 debug_tree (TYPE_CACHED_VALUES (t
));
13802 error_found
= true;
13804 /* Verify just enough of cache to ensure that no one copied it to new type.
13805 All copying should go by copy_node that should clear it. */
13806 else if (TYPE_CACHED_VALUES_P (t
))
13809 for (i
= 0; i
< TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t
)); i
++)
13810 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t
), i
)
13811 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t
), i
)) != t
)
13813 error ("wrong TYPE_CACHED_VALUES entry");
13814 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t
), i
));
13815 error_found
= true;
13820 else if (TREE_CODE (t
) == FUNCTION_TYPE
|| TREE_CODE (t
) == METHOD_TYPE
)
13821 for (tree l
= TYPE_ARG_TYPES (t
); l
; l
= TREE_CHAIN (l
))
13823 /* C++ FE uses TREE_PURPOSE to store initial values. */
13824 if (TREE_PURPOSE (l
) && in_lto_p
)
13826 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
13828 error_found
= true;
13830 if (!TYPE_P (TREE_VALUE (l
)))
13832 error ("Wrong entry in TYPE_ARG_TYPES list");
13834 error_found
= true;
13837 else if (!is_lang_specific (t
) && TYPE_VALUES_RAW (t
))
13839 error ("TYPE_VALUES_RAW field is non-NULL");
13840 debug_tree (TYPE_VALUES_RAW (t
));
13841 error_found
= true;
13843 if (TREE_CODE (t
) != INTEGER_TYPE
13844 && TREE_CODE (t
) != BOOLEAN_TYPE
13845 && TREE_CODE (t
) != OFFSET_TYPE
13846 && TREE_CODE (t
) != REFERENCE_TYPE
13847 && TREE_CODE (t
) != NULLPTR_TYPE
13848 && TREE_CODE (t
) != POINTER_TYPE
13849 && TYPE_CACHED_VALUES_P (t
))
13851 error ("TYPE_CACHED_VALUES_P is set while it should not");
13852 error_found
= true;
13854 if (TYPE_STRING_FLAG (t
)
13855 && TREE_CODE (t
) != ARRAY_TYPE
&& TREE_CODE (t
) != INTEGER_TYPE
)
13857 error ("TYPE_STRING_FLAG is set on wrong type code");
13858 error_found
= true;
13860 else if (TYPE_STRING_FLAG (t
))
13863 if (TREE_CODE (b
) == ARRAY_TYPE
)
13865 /* Java builds arrays with TYPE_STRING_FLAG of promoted_char_type
13867 if (TREE_CODE (b
) != INTEGER_TYPE
)
13869 error ("TYPE_STRING_FLAG is set on type that does not look like "
13870 "char nor array of chars");
13871 error_found
= true;
13875 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13876 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13878 if (TREE_CODE (t
) == METHOD_TYPE
13879 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t
)) != TYPE_METHOD_BASETYPE (t
))
13881 error ("TYPE_METHOD_BASETYPE is not main variant");
13882 error_found
= true;
13887 debug_tree (const_cast <tree
> (t
));
13888 internal_error ("verify_type failed");
13893 /* Return true if ARG is marked with the nonnull attribute in the
13894 current function signature. */
13897 nonnull_arg_p (const_tree arg
)
13899 tree t
, attrs
, fntype
;
13900 unsigned HOST_WIDE_INT arg_num
;
13902 gcc_assert (TREE_CODE (arg
) == PARM_DECL
13903 && (POINTER_TYPE_P (TREE_TYPE (arg
))
13904 || TREE_CODE (TREE_TYPE (arg
)) == OFFSET_TYPE
));
13906 /* The static chain decl is always non null. */
13907 if (arg
== cfun
->static_chain_decl
)
13910 /* THIS argument of method is always non-NULL. */
13911 if (TREE_CODE (TREE_TYPE (cfun
->decl
)) == METHOD_TYPE
13912 && arg
== DECL_ARGUMENTS (cfun
->decl
)
13913 && flag_delete_null_pointer_checks
)
13916 /* Values passed by reference are always non-NULL. */
13917 if (TREE_CODE (TREE_TYPE (arg
)) == REFERENCE_TYPE
13918 && flag_delete_null_pointer_checks
)
13921 fntype
= TREE_TYPE (cfun
->decl
);
13922 for (attrs
= TYPE_ATTRIBUTES (fntype
); attrs
; attrs
= TREE_CHAIN (attrs
))
13924 attrs
= lookup_attribute ("nonnull", attrs
);
13926 /* If "nonnull" wasn't specified, we know nothing about the argument. */
13927 if (attrs
== NULL_TREE
)
13930 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
13931 if (TREE_VALUE (attrs
) == NULL_TREE
)
13934 /* Get the position number for ARG in the function signature. */
13935 for (arg_num
= 1, t
= DECL_ARGUMENTS (cfun
->decl
);
13937 t
= DECL_CHAIN (t
), arg_num
++)
13943 gcc_assert (t
== arg
);
13945 /* Now see if ARG_NUM is mentioned in the nonnull list. */
13946 for (t
= TREE_VALUE (attrs
); t
; t
= TREE_CHAIN (t
))
13948 if (compare_tree_int (TREE_VALUE (t
), arg_num
) == 0)
13956 /* Given location LOC, strip away any packed range information
13957 or ad-hoc information. */
13960 get_pure_location (location_t loc
)
13962 if (IS_ADHOC_LOC (loc
))
13964 = line_table
->location_adhoc_data_map
.data
[loc
& MAX_SOURCE_LOCATION
].locus
;
13966 if (loc
>= LINEMAPS_MACRO_LOWEST_LOCATION (line_table
))
13969 if (loc
< RESERVED_LOCATION_COUNT
)
13972 const line_map
*map
= linemap_lookup (line_table
, loc
);
13973 const line_map_ordinary
*ordmap
= linemap_check_ordinary (map
);
13975 return loc
& ~((1 << ordmap
->m_range_bits
) - 1);
13978 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
13982 set_block (location_t loc
, tree block
)
13984 location_t pure_loc
= get_pure_location (loc
);
13985 source_range src_range
= get_range_from_loc (line_table
, loc
);
13986 return COMBINE_LOCATION_DATA (line_table
, pure_loc
, src_range
, block
);
13990 set_source_range (tree expr
, location_t start
, location_t finish
)
13992 source_range src_range
;
13993 src_range
.m_start
= start
;
13994 src_range
.m_finish
= finish
;
13995 return set_source_range (expr
, src_range
);
13999 set_source_range (tree expr
, source_range src_range
)
14001 if (!EXPR_P (expr
))
14002 return UNKNOWN_LOCATION
;
14004 location_t pure_loc
= get_pure_location (EXPR_LOCATION (expr
));
14005 location_t adhoc
= COMBINE_LOCATION_DATA (line_table
,
14009 SET_EXPR_LOCATION (expr
, adhoc
);
14014 make_location (location_t caret
, location_t start
, location_t finish
)
14016 location_t pure_loc
= get_pure_location (caret
);
14017 source_range src_range
;
14018 src_range
.m_start
= start
;
14019 src_range
.m_finish
= finish
;
14020 location_t combined_loc
= COMBINE_LOCATION_DATA (line_table
,
14024 return combined_loc
;
14027 /* Return the name of combined function FN, for debugging purposes. */
14030 combined_fn_name (combined_fn fn
)
14032 if (builtin_fn_p (fn
))
14034 tree fndecl
= builtin_decl_explicit (as_builtin_fn (fn
));
14035 return IDENTIFIER_POINTER (DECL_NAME (fndecl
));
14038 return internal_fn_name (as_internal_fn (fn
));
14041 #include "gt-tree.h"